Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
57 changes: 34 additions & 23 deletions api/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,35 @@
import eventlet
eventlet.monkey_patch()

# CRITICAL: Configure logging BEFORE any other imports to prevent auto-basicConfig
import os
import sys
import logging

# Set up logging to a user-writable directory
LOG_DIR = os.path.join(os.getenv('DATA_DIR', os.path.expanduser("~")), "PrintQueData")
os.makedirs(LOG_DIR, exist_ok=True)
LOG_FILE = os.path.join(LOG_DIR, "app.log")

# Clear any auto-configured handlers from the root logger
root_logger = logging.getLogger()
root_logger.handlers.clear()
root_logger.setLevel(logging.DEBUG) # Allow all through, handlers decide

# Set up logging with file handler at DEBUG (captures everything)
file_handler = logging.FileHandler(LOG_FILE)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
root_logger.addHandler(file_handler)

# Create console handler - will be updated with saved level after logger module imports
# Start with INFO as the safe default to prevent DEBUG spam during import
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.INFO)
console_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
root_logger.addHandler(console_handler)

# NOW import other modules (logging is already configured)
import webbrowser
import threading
from flask import Flask, send_from_directory, send_file
Expand All @@ -14,34 +41,17 @@
from services.printer_manager import start_background_tasks, close_connection_pool
from utils.config import Config
import asyncio
import logging
import time
import atexit
from utils.console_capture import console_capture

# Set up logging to a user-writable directory
LOG_DIR = os.path.join(os.getenv('DATA_DIR', os.path.expanduser("~")), "PrintQueData")
os.makedirs(LOG_DIR, exist_ok=True)
LOG_FILE = os.path.join(LOG_DIR, "app.log")

# Import log level configuration from logger module
from utils.logger import get_console_log_level, LOG_LEVELS
# Import log level configuration and update console handler with saved level
from utils.logger import get_console_log_level, LOG_LEVELS, DEFAULT_CONSOLE_LEVEL

# Set up logging with file handler at DEBUG (captures everything) and console at configured level
file_handler = logging.FileHandler(LOG_FILE)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))

# Create app's console handler using saved log level
app_console_handler = logging.StreamHandler()
app_console_handler.setLevel(LOG_LEVELS.get(get_console_log_level(), logging.INFO))
app_console_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))

# Configure root logger - handlers filter by their own levels
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG) # Allow all through, handlers decide
root_logger.addHandler(file_handler)
root_logger.addHandler(app_console_handler)
# Update console handler with the saved log level (default INFO)
saved_level = LOG_LEVELS.get(get_console_log_level(), LOG_LEVELS[DEFAULT_CONSOLE_LEVEL])
console_handler.setLevel(saved_level)
logging.info(f"Console log level set to: {get_console_log_level()}")

# Initialize the app with static and templates folders
# Handle both development and packaged (PyInstaller) environments
Expand All @@ -59,6 +69,7 @@

app = Flask(__name__, static_folder=static_folder, static_url_path='/static', template_folder=template_folder)
app.config['SECRET_KEY'] = Config.SECRET_KEY
app.config['APP_VERSION'] = Config.APP_VERSION # From api/__version__.py (updated by CI)
app.config['UPLOAD_FOLDER'] = os.path.join(LOG_DIR, "uploads") # Writable upload folder
app.config['LOG_DIR'] = LOG_DIR

Expand Down
34 changes: 28 additions & 6 deletions api/routes/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,8 +197,8 @@ def api_system_info():
import time
import psutil

# Get app version (you can update this as needed)
version = app.config.get('APP_VERSION', '1.0.0')
# Get app version from api/__version__.py (single source of truth, updated by CI)
version = app.config.get('APP_VERSION', '0.0.0')

# Get uptime - use process start time
process = psutil.Process()
Expand Down Expand Up @@ -390,6 +390,17 @@ def api_add_printer():
PRINTERS.append(new_printer)
save_data(PRINTERS_FILE, PRINTERS)

# Try to connect Bambu printers immediately (same as form-based add)
if printer_type == 'bambu':
try:
from services.bambu_handler import connect_bambu_printer
if connect_bambu_printer(new_printer):
logging.info(f"Bambu printer {name} connected successfully")
else:
logging.warning(f"Bambu printer {name} added but MQTT connection failed. Will retry automatically.")
except Exception as e:
logging.error(f"Error connecting Bambu printer {name}: {str(e)}")

return jsonify({'success': True, 'message': f'Printer {name} added successfully'})
except Exception as e:
logging.error(f"Error in api_add_printer: {str(e)}")
Expand Down Expand Up @@ -446,7 +457,7 @@ def api_mark_printer_ready(printer_name):
with WriteLock(printers_rwlock):
for printer in PRINTERS:
if printer['name'] == printer_name:
if printer['state'] in ['FINISHED', 'EJECTING']:
if printer['state'] in ['FINISHED', 'EJECTING', 'COOLING']:
printer['state'] = 'READY'
printer['status'] = 'Ready'
printer['manually_set'] = True
Expand All @@ -455,11 +466,14 @@ def api_mark_printer_ready(printer_name):
printer['file'] = None
printer['job_id'] = None
printer['order_id'] = None
# Clear cooldown state if skipping cooldown
printer['cooldown_target_temp'] = None
printer['cooldown_order_id'] = None
save_data(PRINTERS_FILE, PRINTERS)
start_background_distribution(socketio, app)
return jsonify({'success': True})
else:
return jsonify({'error': 'Printer is not in FINISHED or EJECTING state'}), 400
return jsonify({'error': 'Printer is not in FINISHED, EJECTING, or COOLING state'}), 400
return jsonify({'error': 'Printer not found'}), 404
except Exception as e:
return jsonify({'error': str(e)}), 500
Expand Down Expand Up @@ -506,7 +520,7 @@ def api_create_order():
if not any(filename.lower().endswith(ext) for ext in valid_extensions):
return jsonify({'error': 'Invalid file type. Must be .gcode, .3mf, or .stl'}), 400

quantity = int(request.form.get('quantity', 1))
quantity = int(request.form.get('quantity', 0))

# Handle optional order name
order_name = request.form.get('name', '').strip()
Expand Down Expand Up @@ -597,9 +611,13 @@ def api_create_order():
# Trigger distribution
start_background_distribution(socketio, app)

message = (
'Order added to library (set quantity to start printing)' if quantity == 0
else f'Order created for {quantity} print(s) of {filename}'
)
return jsonify({
'success': True,
'message': f'Order created for {quantity} print(s) of {filename}',
'message': message,
'order_id': order_id
})

Expand All @@ -624,16 +642,20 @@ def api_update_order(order_id):
"""API: Update an order"""
try:
data = request.get_json()
quantity_updated = False
with SafeLock(orders_lock):
for order in ORDERS:
if order.get('id') == order_id:
if 'quantity' in data:
order['quantity'] = int(data['quantity'])
quantity_updated = True
if 'groups' in data:
order['groups'] = data['groups']
if 'name' in data:
order['name'] = data['name'].strip() if data['name'] else None
save_data(ORDERS_FILE, ORDERS)
if quantity_updated and order.get('quantity', 0) > 0:
start_background_distribution(socketio, app)
return jsonify({'success': True})
return jsonify({'error': 'Order not found'}), 404
except Exception as e:
Expand Down
23 changes: 16 additions & 7 deletions api/routes/orders.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
ORDERS_FILE,
validate_gcode_file, sanitize_group_name
)
from services.printer_manager import extract_filament_from_file, start_background_distribution
from services.printer_manager import extract_filament_from_file, start_background_distribution, prepare_printer_data_for_broadcast
from services.default_settings import load_default_settings, save_default_settings
from utils.logger import debug_log

Expand Down Expand Up @@ -41,7 +41,7 @@ def start_print():
flash(message)
return redirect(url_for('index'))

quantity = request.form.get('quantity', type=int, default=1)
quantity = request.form.get('quantity', type=int, default=0)
# Updated to handle text-based groups with sanitization
groups = [sanitize_group_name(g) for g in request.form.getlist('groups') if g.strip()]
if not groups:
Expand Down Expand Up @@ -109,7 +109,10 @@ def start_print():
logging.info(f"Created order {order_id}: {filename}, qty={quantity}")
debug_log('cooldown', f"Order {order_id} created with cooldown_temp={cooldown_temp}")

flash(f"✅ Order for {quantity} print(s) of {filename} added successfully")
flash(
"✅ Order added to library (set quantity to start printing)" if quantity == 0
else f"✅ Order for {quantity} print(s) of {filename} added successfully"
)

start_background_distribution(socketio, app)

Expand Down Expand Up @@ -202,7 +205,8 @@ def move_order_up():
total_filament = TOTAL_FILAMENT_CONSUMPTION / 1000
orders_data = ORDERS.copy()
with ReadLock(printers_rwlock):
socketio.emit('status_update', {'printers': PRINTERS, 'total_filament': total_filament, 'orders': orders_data})
printers_copy = prepare_printer_data_for_broadcast(PRINTERS)
socketio.emit('status_update', {'printers': printers_copy, 'total_filament': total_filament, 'orders': orders_data})
return '', 200
logging.error(f"Failed to move order {order_id} up: not found or already at top")
return 'Order not found or already at top', 400
Expand All @@ -222,7 +226,8 @@ def move_order_down():
total_filament = TOTAL_FILAMENT_CONSUMPTION / 1000
orders_data = ORDERS.copy()
with ReadLock(printers_rwlock):
socketio.emit('status_update', {'printers': PRINTERS, 'total_filament': total_filament, 'orders': orders_data})
printers_copy = prepare_printer_data_for_broadcast(PRINTERS)
socketio.emit('status_update', {'printers': printers_copy, 'total_filament': total_filament, 'orders': orders_data})
return '', 200
logging.error(f"Failed to move order {order_id} down: not found or already at bottom")
return 'Order not found or already at bottom', 400
Expand All @@ -244,7 +249,8 @@ def delete_order(order_id):
total_filament = TOTAL_FILAMENT_CONSUMPTION / 1000
orders_data = ORDERS.copy()
with ReadLock(printers_rwlock):
socketio.emit('status_update', {'printers': PRINTERS, 'total_filament': total_filament, 'orders': orders_data})
printers_copy = prepare_printer_data_for_broadcast(PRINTERS)
socketio.emit('status_update', {'printers': printers_copy, 'total_filament': total_filament, 'orders': orders_data})

flash(f"✅ Order {order_id} permanently deleted")
return redirect(url_for('index'))
Expand Down Expand Up @@ -302,14 +308,17 @@ def update_order_quantity(order_id):
with SafeLock(filament_lock):
total_filament = TOTAL_FILAMENT_CONSUMPTION / 1000
with ReadLock(printers_rwlock):
printers_data = PRINTERS.copy()
printers_data = prepare_printer_data_for_broadcast(PRINTERS)

socketio.emit('status_update', {
'printers': printers_data,
'total_filament': total_filament,
'orders': ORDERS.copy()
})

if new_quantity > 0:
start_background_distribution(socketio, app)

return jsonify({
'success': True,
'order_id': order_id,
Expand Down
Loading