From 818d6573c08fab7ad3805ddbb8d988a70a39a4dd Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Thu, 8 Jan 2026 01:00:24 -0900 Subject: [PATCH 01/46] Added Click to Move --- UI/focus_overlay.py | 1 + UI/overlays/interactive_camera_overlay.py | 737 ++++++++++++++++++++++ UI/styles.py | 11 +- UI/ui_layout.py | 20 +- 4 files changed, 758 insertions(+), 11 deletions(-) create mode 100644 UI/overlays/interactive_camera_overlay.py diff --git a/UI/focus_overlay.py b/UI/focus_overlay.py index e40e3f9..4067f3c 100644 --- a/UI/focus_overlay.py +++ b/UI/focus_overlay.py @@ -54,6 +54,7 @@ def __init__( self.invalid_fill = pygame.Color(255, 0, 0, invalid_alpha_fill) self.invalid_border = pygame.Color(255, 0, 0, invalid_alpha_border) self.invalid_border_w = invalid_border_w + self.mouse_passthrough = True # Edge margin overlays (translucent red) self.draw_edge_margins = True diff --git a/UI/overlays/interactive_camera_overlay.py b/UI/overlays/interactive_camera_overlay.py new file mode 100644 index 0000000..5ee96dd --- /dev/null +++ b/UI/overlays/interactive_camera_overlay.py @@ -0,0 +1,737 @@ +import pygame +import numpy as np +import cv2 +import time +from typing import Optional, Tuple + +from UI.frame import Frame +from UI.camera_view import CameraView +from UI.styles import ( + CROSSHAIR_COLOR, + CROSSHAIR_LENGTH, + CROSSHAIR_THICKNESS, + CROSSHAIR_GAP, +) + + +class InteractiveCameraOverlay(Frame): + """ + UI overlay that renders a crosshair in the center of the camera view. + Supports click-to-move and camera calibration using phase correlation. + Only displays when the camera is initialized. + """ + def __init__( + self, + camera_view: CameraView, + controller, # AutomatedPrinter instance + visible: bool = True, + + # Crosshair visual properties (defaults from styles.py) + crosshair_color: Optional[pygame.Color] = None, + crosshair_length: Optional[int] = None, + crosshair_thickness: Optional[int] = None, + crosshair_gap: Optional[int] = None, + + # Calibration parameters (world units in 0.01mm) + cal_move_x_ticks: int = 100, # 1.00mm in 0.01mm units + cal_move_y_ticks: int = 100, # 1.00mm in 0.01mm units (increased from 75 for better correlation) + ): + super().__init__( + parent=camera_view, + x=0, y=0, + width=1, height=1, + x_is_percent=True, y_is_percent=True, + width_is_percent=True, height_is_percent=True, + z_index=camera_view.z_index + 1, + background_color=None + ) + + self.camera_view = camera_view + self.controller = controller + self.visible = visible + + # Enable click handling - CRITICAL for receiving mouse events + self.mouse_passthrough = False + + # Crosshair properties (use styles.py defaults if not provided) + self.crosshair_color = crosshair_color if crosshair_color is not None else CROSSHAIR_COLOR + self.crosshair_length = crosshair_length if crosshair_length is not None else CROSSHAIR_LENGTH + self.crosshair_thickness = crosshair_thickness if crosshair_thickness is not None else CROSSHAIR_THICKNESS + self.crosshair_gap = crosshair_gap if crosshair_gap is not None else CROSSHAIR_GAP + + # Cache overlay surface + self._overlay = None + self._overlay_size = None + + # Calibration state + self.M_est = None # 2x2 estimated mapping matrix (pixels = M * world_delta) + self.M_inv = None # Inverse mapping (world_delta = M_inv * pixel_delta) + self._cal_move_x = cal_move_x_ticks + self._cal_move_y = cal_move_y_ticks + self._calibrating = False + self._cal_ref_pos = None # Position where calibration was performed (camera center reference) + self._cal_start_x_mm = None # Starting X position for absolute moves + self._cal_start_y_mm = None # Starting Y position for absolute moves + + # Store calibration data during process + self._cal_base_pos = None + self._cal_edges_base = None + self._dp1 = None + self._dp2 = None + + def toggle_overlay(self) -> None: + """Toggle visibility of the crosshair overlay.""" + self.visible = not self.visible + + def set_visible(self, value: bool) -> None: + """Set visibility of the crosshair overlay.""" + self.visible = bool(value) + + def set_crosshair_color(self, color: pygame.Color) -> None: + """Update the crosshair color.""" + self.crosshair_color = color + + def set_crosshair_properties( + self, + length: Optional[int] = None, + thickness: Optional[int] = None, + gap: Optional[int] = None + ) -> None: + """Update crosshair geometry properties.""" + if length is not None: + self.crosshair_length = length + if thickness is not None: + self.crosshair_thickness = thickness + if gap is not None: + self.crosshair_gap = gap + + def _get_overlay(self, surface_size: tuple[int, int]) -> pygame.Surface: + """Return an RGBA overlay the size of the target surface (recreate on resize).""" + if self._overlay is None or self._overlay_size != surface_size: + self._overlay_size = surface_size + self._overlay = pygame.Surface(surface_size, flags=pygame.SRCALPHA) + else: + # Clear with fully transparent color + self._overlay.fill((0, 0, 0, 0)) + return self._overlay + + # ==================== Calibration Methods ==================== + + def _surface_to_gray_cv(self, arr: np.ndarray) -> np.ndarray: + """Convert RGB numpy array to grayscale for OpenCV.""" + if arr.ndim == 2: + return arr + gray = cv2.cvtColor(arr, cv2.COLOR_RGB2GRAY) + return gray + + def _edges_canny(self, gray_u8: np.ndarray) -> np.ndarray: + """Compute normalized Canny edges.""" + g = cv2.GaussianBlur(gray_u8, (5, 5), 0) + e = cv2.Canny(g, 60, 180) + ef = e.astype(np.float32) + ef -= ef.mean() + ef /= (ef.std() + 1e-6) + return ef + + def _phase_corr_shift(self, img_a_f32: np.ndarray, img_b_f32: np.ndarray) -> Tuple[float, float, float]: + """Compute phase correlation shift between two images.""" + win = cv2.createHanningWindow((img_a_f32.shape[1], img_a_f32.shape[0]), cv2.CV_32F) + (dx, dy), response = cv2.phaseCorrelate(img_a_f32, img_b_f32, win) + return float(dx), float(dy), float(response) + + def _capture_and_process_edges(self) -> Optional[np.ndarray]: + """Capture a still image and return its edge map.""" + try: + # Capture still + self.camera_view.camera.capture_image() + while self.camera_view.camera.is_taking_image: + time.sleep(0.01) + + # Get frame as numpy array + arr = self.camera_view.camera.get_last_frame(prefer="still", wait_for_still=False) + if arr is None: + return None + + # Store the calibration image resolution (the actual image we're correlating) + if not hasattr(self, '_cal_image_height'): + self._cal_image_height = arr.shape[0] + self._cal_image_width = arr.shape[1] + + # Also get the current display frame size + fr = self.camera_view.get_frame_rect() + if fr: + _, _, display_w, display_h = fr + self._cal_display_width = display_w + self._cal_display_height = display_h + + self.controller.status( + f"Calibration image: {self._cal_image_width}x{self._cal_image_height}, " + f"Display frame: {display_w:.0f}x{display_h:.0f}", + True + ) + else: + self.controller.status( + f"Calibration using image resolution: {self._cal_image_width}x{self._cal_image_height}", + True + ) + + # Convert to grayscale and compute edges + gray = self._surface_to_gray_cv(arr) + edges = self._edges_canny(gray) + return edges + except Exception as e: + self.controller.status(f"Edge capture failed: {e}", True) + return None + + def run_calibration(self) -> None: + """ + Run the calibration routine to determine the mapping between screen pixels and world coordinates. + This will move the printer to two positions and use phase correlation to determine the transformation. + Similar to start_autofocus in automated_controller.py. + """ + if self._calibrating: + self.controller.status("Calibration already in progress.", True) + return + + self._calibrating = True + self.controller.status("Starting camera calibration...", True) + + # Reset calibration state + self.M_est = None + self.M_inv = None + self._dp1 = None + self._dp2 = None + + # Store the starting position BEFORE building the macro + # This ensures we know where we started from + start_pos = self.controller.get_position() + self._cal_start_x_mm = start_pos.x / 100.0 + self._cal_start_y_mm = start_pos.y / 100.0 + + # Build calibration macro using ABSOLUTE positioning + steps = [] + + # Step 1: Capture base image + steps.append(self.controller.create_cmd( + kind="CALIBRATION_BASE", + value="", + message="Capturing base calibration image...", + log=True + )) + + # Step 2: Move to X+ offset (absolute positioning) + move_x_mm = self._cal_move_x / 100.0 + target_x_mm = self._cal_start_x_mm + move_x_mm + steps.append(self.controller.printer_cmd( + f"G0 X{target_x_mm:.2f}", + message=f"Moving +X {move_x_mm:.2f}mm for calibration...", + log=True + )) + + # Step 3: Capture moved image 1 + steps.append(self.controller.create_cmd( + kind="CALIBRATION_MOVE1", + value="", + message="Capturing X-moved calibration image...", + log=True + )) + + # Step 4: Return to base X + steps.append(self.controller.printer_cmd( + f"G0 X{self._cal_start_x_mm:.2f}", + message="Returning to base X position...", + log=True + )) + + # Step 5: Move to Y+ offset (absolute positioning) + move_y_mm = self._cal_move_y / 100.0 + target_y_mm = self._cal_start_y_mm + move_y_mm + steps.append(self.controller.printer_cmd( + f"G0 Y{target_y_mm:.2f}", + message=f"Moving +Y {move_y_mm:.2f}mm for calibration...", + log=True + )) + + # Step 6: Capture moved image 2 + steps.append(self.controller.create_cmd( + kind="CALIBRATION_MOVE2", + value="", + message="Capturing Y-moved calibration image...", + log=True + )) + + # Step 7: Return to base Y + steps.append(self.controller.printer_cmd( + f"G0 Y{self._cal_start_y_mm:.2f}", + message="Returning to base position...", + log=True + )) + + # Step 8: Finish calibration + steps.append(self.controller.create_cmd( + kind="CALIBRATION_FINISH", + value="", + message="Computing calibration matrix...", + log=True + )) + + # Register handlers for calibration commands + self.controller.register_handler("CALIBRATION_BASE", self._handle_calibration_base) + self.controller.register_handler("CALIBRATION_MOVE1", self._handle_calibration_move1) + self.controller.register_handler("CALIBRATION_MOVE2", self._handle_calibration_move2) + self.controller.register_handler("CALIBRATION_FINISH", self._handle_calibration_finish) + + # Create and enqueue macro + macro = self.controller.macro_cmd( + steps, + wait_printer=True, + message="Camera calibration routine", + log=True + ) + self.controller.enqueue_cmd(macro) + + def _handle_calibration_base(self, cmd) -> None: + """Handler: Capture base image.""" + time.sleep(0.5) # Allow printer to fully settle (increased for Y-axis stability) + self._cal_base_pos = self.controller.get_position() + self._cal_edges_base = self._capture_and_process_edges() + if self._cal_edges_base is None: + self.controller.status("Failed to capture base calibration image.", True) + self._calibrating = False + + def _handle_calibration_move1(self, cmd) -> None: + """Handler: Capture first moved image and compute shift.""" + time.sleep(0.5) # Allow printer to fully settle (increased for Y-axis stability) + edges = self._capture_and_process_edges() + if edges is None or self._cal_edges_base is None: + self.controller.status("Failed to capture first calibration image.", True) + self._calibrating = False + return + + dx, dy, response = self._phase_corr_shift(self._cal_edges_base, edges) + self._dp1 = np.array([dx, dy], dtype=np.float64) + self._response1 = response + self.controller.status(f"X-move shift: dx={dx:.2f}, dy={dy:.2f}, response={response:.3f}", True) + + # Warn if correlation confidence is low + if response < 0.3: + self.controller.status("WARNING: Low phase correlation confidence for X-move. Calibration may be inaccurate.", True) + + def _handle_calibration_move2(self, cmd) -> None: + """Handler: Capture second moved image and compute shift.""" + time.sleep(0.5) # Allow printer to fully settle (increased for Y-axis stability) + edges = self._capture_and_process_edges() + if edges is None or self._cal_edges_base is None: + self.controller.status("Failed to capture second calibration image.", True) + self._calibrating = False + return + + dx, dy, response = self._phase_corr_shift(self._cal_edges_base, edges) + self._dp2 = np.array([dx, dy], dtype=np.float64) + self._response2 = response + self.controller.status(f"Y-move shift: dx={dx:.2f}, dy={dy:.2f}, response={response:.3f}", True) + + # Warn if correlation confidence is low + if response < 0.3: + self.controller.status("WARNING: Low phase correlation confidence for Y-move. Calibration may be inaccurate.", True) + + def _handle_calibration_finish(self, cmd) -> None: + """Handler: Compute final calibration matrix.""" + if self._dp1 is None or self._dp2 is None: + self.controller.status("Calibration failed: missing measurements.", True) + self._calibrating = False + return + + # World deltas (in 0.01mm units) - these are what we commanded + DW1 = np.array([self._cal_move_x, 0.0], dtype=np.float64) + DW2 = np.array([0.0, self._cal_move_y], dtype=np.float64) + + # Pixel deltas - these are what we measured + # Phase correlation: positive dx/dy means second image shifted right/down + # + # For a standard camera (not inverted): + # Stage +X → Image shifts LEFT (negative dx) + # Stage +Y → Image shifts DOWN (positive dy) + # + # But camera orientation varies! We need to check the SIGN of the correlation: + # - If stage +X causes image +dx → camera X follows stage (don't negate) + # - If stage +X causes image -dx → camera X opposes stage (negate) + # - Same logic for Y + # + # From your calibration: + # X-move: dx=+452.95 → camera X follows stage → use as-is + # Y-move: dy=-341.63 → camera Y opposes stage → negate + + # Detect orientation by checking correlation signs + x_inverted = (self._dp1[0] < 0) # True if camera X opposes stage X + y_inverted = (self._dp2[1] < 0) # True if camera Y opposes stage Y + + dp1_corrected = self._dp1.copy() + dp2_corrected = self._dp2.copy() + + if x_inverted: + dp1_corrected[0] = -dp1_corrected[0] + if y_inverted: + dp2_corrected[1] = -dp2_corrected[1] + + # Debug: show raw and corrected deltas + self.controller.status( + f"Raw pixel deltas: X-move=[{self._dp1[0]:.2f}, {self._dp1[1]:.2f}], " + f"Y-move=[{self._dp2[0]:.2f}, {self._dp2[1]:.2f}]", + True + ) + self.controller.status( + f"Corrected deltas: X-move=[{dp1_corrected[0]:.2f}, {dp1_corrected[1]:.2f}], " + f"Y-move=[{dp2_corrected[0]:.2f}, {dp2_corrected[1]:.2f}]", + True + ) + + # Report detected orientation + orient_msg = f"Camera orientation: X={'inverted' if x_inverted else 'normal'}, Y={'inverted' if y_inverted else 'normal'}" + self.controller.status(orient_msg, True) + + # Build matrices: columns are the basis vectors + # DP: pixel space basis (each column is pixel response to a world move) + # DW: world space basis (each column is a world delta) + DP = np.column_stack([dp1_corrected, dp2_corrected]) # 2x2 + DW = np.column_stack([DW1, DW2]) # 2x2 + + # Check if DW is invertible + det_dw = np.linalg.det(DW) + if abs(det_dw) < 1e-9: + self.controller.status("Calibration failed: DW matrix singular.", True) + self._calibrating = False + return + + # The relationship is: dp = M @ dw + # Therefore: M = DP @ inv(DW) + # This maps world deltas (in 0.01mm units) to pixel deltas + self.M_est = DP @ np.linalg.inv(DW) + + # Check if M is invertible + detM = np.linalg.det(self.M_est) + if abs(detM) < 1e-9: + self.controller.status("Calibration failed: M not invertible.", True) + self.M_inv = None + self._calibrating = False + return + + # Invert to get world = M_inv @ pixel + self.M_inv = np.linalg.inv(self.M_est) + + # Debug: show M_inv for understanding click-to-world mapping + self.controller.status( + f"M_inv (pixel→world) = [[{self.M_inv[0,0]:.6f}, {self.M_inv[0,1]:.6f}], " + f"[{self.M_inv[1,0]:.6f}, {self.M_inv[1,1]:.6f}]]", + True + ) + + # Store the calibration reference position (where camera center corresponds to) + # This is the position at the END of calibration (after returning to base) + self._cal_ref_pos = self.controller.get_position() + + # Calculate DPI for full resolution (2592x1944) + # Extract pixels per 0.01mm unit from M_est + # M_est[0,0] is dx_pixels per 1.00 unit of world X (which is 0.01mm) + # M_est[1,1] is dy_pixels per 1.00 unit of world Y (which is 0.01mm) + px_per_0p01mm_x = abs(self.M_est[0, 0]) + px_per_0p01mm_y = abs(self.M_est[1, 1]) + + # Convert to pixels per mm + px_per_mm_x = px_per_0p01mm_x * 100.0 + px_per_mm_y = px_per_0p01mm_y * 100.0 + px_per_mm_avg = (px_per_mm_x + px_per_mm_y) / 2 + + # DPI = pixels per mm * mm per inch (25.4) + dpi = px_per_mm_avg * 25.4 + + # Log results + self.controller.status( + f"Calibration complete! M = [[{self.M_est[0,0]:.3f}, {self.M_est[0,1]:.3f}], " + f"[{self.M_est[1,0]:.3f}, {self.M_est[1,1]:.3f}]]", + True + ) + self.controller.status( + f"Estimated full-res DPI: {dpi:.1f} (avg {px_per_mm_avg:.2f} px/mm, " + f"X: {px_per_mm_x:.2f} px/mm, Y: {px_per_mm_y:.2f} px/mm)", + True + ) + + self._calibrating = False + + # ==================== Click-to-Move ==================== + + def go_to_calibration_pattern(self) -> None: + """ + Move the printer to the overlay calibration pattern position. + First moves Z up to 33.12mm, then moves to X=226.24mm, Y=187.08mm. + """ + self.controller.reset_after_stop() + + # Move Z up first (safe height) + self.controller.enqueue_printer( + "G0 Z33.12", + message="Moving to calibration height Z=33.12mm", + log=True + ) + + # Then move to XY position + self.controller.enqueue_printer( + "G0 X226.08 Y186.90", + message="Moving to calibration pattern at X=226.24mm, Y=187.08mm", + log=True + ) + + def _click_to_world_delta(self, screen_x: int, screen_y: int) -> Optional[Tuple[float, float]]: + """ + Convert a screen click position to world delta (in 0.01mm units). + Returns None if calibration hasn't been run yet. + """ + if self.M_inv is None: + return None + + # Get frame rectangle + fr = self.camera_view.get_frame_rect() + if not fr: + return None + + fx, fy, fw, fh = fr + + # Calculate center of camera frame + center_x = fx + fw / 2 + center_y = fy + fh / 2 + + # Pixel delta from center (in DISPLAY coordinates) + pixel_delta = np.array([screen_x - center_x, screen_y - center_y], dtype=np.float64) + + # CRITICAL: Scale pixel delta to match calibration image coordinates + # During calibration, we measured pixel shifts in the calibration image resolution + # But clicks are measured in the displayed frame resolution + # We need to scale clicks by (calibration_image_size / displayed_frame_size) + if hasattr(self, '_cal_image_width') and hasattr(self, '_cal_image_height'): + # Scale factor = calibration image pixels / display frame pixels + scale_x = self._cal_image_width / fw + scale_y = self._cal_image_height / fh + + pixel_delta_scaled = np.array([ + pixel_delta[0] * scale_x, + pixel_delta[1] * scale_y + ], dtype=np.float64) + + # Debug: show scaling (only once) + if not hasattr(self, '_scaling_reported'): + self._scaling_reported = True + self.controller.status( + f"Click scaling: Display {fw:.0f}x{fh:.0f} → Calibration {self._cal_image_width}x{self._cal_image_height} " + f"(scale X={scale_x:.3f}, Y={scale_y:.3f})", + True + ) + else: + pixel_delta_scaled = pixel_delta + + # Convert to world delta using inverse mapping + dw = self.M_inv @ pixel_delta_scaled + + # Store for debug output (will be printed in on_click) + self._last_pixel_delta = pixel_delta + self._last_pixel_delta_scaled = pixel_delta_scaled + self._last_world_delta = dw + + return float(dw[0]), float(dw[1]) + + def on_click(self, button=None) -> None: + """Handle click events to move printer to clicked position.""" + if not self.camera_view.camera.initialized: + return + + if self.M_inv is None: + self.controller.status("Cannot move: run calibration first (call run_calibration())", True) + return + + # Get mouse position + mouse_x, mouse_y = pygame.mouse.get_pos() + + # Convert to world delta from the CURRENT printer position + # The calibration matrix M_inv tells us how pixel deltas map to world deltas + result = self._click_to_world_delta(mouse_x, mouse_y) + if result is None: + return + + dx_ticks, dy_ticks = result + + # CRITICAL FIX: Negate X-axis because screen X and stage X are opposite + # When you click right (positive screen X), stage should move right (positive stage X) + # But empirically, clicking right moves left, so we need to flip it + dx_ticks = -dx_ticks + + # Get CURRENT position (where the camera actually is now) + current_pos = self.controller.get_position() + + # Debug: show pixel delta, world delta (before X flip), and world delta (after X flip) + if hasattr(self, '_last_pixel_delta') and hasattr(self, '_last_world_delta'): + if hasattr(self, '_last_pixel_delta_scaled'): + self.controller.status( + f"Pixel Δ (display): [{self._last_pixel_delta[0]:.1f}, {self._last_pixel_delta[1]:.1f}] → " + f"Pixel Δ (scaled): [{self._last_pixel_delta_scaled[0]:.1f}, {self._last_pixel_delta_scaled[1]:.1f}] → " + f"World (raw): [{self._last_world_delta[0]:.1f}, {self._last_world_delta[1]:.1f}] → " + f"World: [{dx_ticks:.1f}, {dy_ticks:.1f}] ticks", + True + ) + else: + self.controller.status( + f"Pixel delta: [{self._last_pixel_delta[0]:.1f}, {self._last_pixel_delta[1]:.1f}] → " + f"World (raw): [{self._last_world_delta[0]:.1f}, {self._last_world_delta[1]:.1f}] → " + f"World: [{dx_ticks:.1f}, {dy_ticks:.1f}] ticks", + True + ) + + # Calculate new position relative to CURRENT position + # The delta tells us how far from center we clicked + new_x_ticks = current_pos.x + int(round(dx_ticks)) + new_y_ticks = current_pos.y + int(round(dy_ticks)) + + # Convert to mm for G-code + new_x_mm = new_x_ticks / 100.0 + new_y_mm = new_y_ticks / 100.0 + + # Bounds check + max_x = self.controller.get_max_x() + max_y = self.controller.get_max_y() + + if not (0 <= new_x_mm <= max_x and 0 <= new_y_mm <= max_y): + self.controller.status( + f"Click position out of bounds: ({new_x_mm:.2f}, {new_y_mm:.2f})", + True + ) + return + + # Send move command + self.controller.enqueue_printer( + f"G0 X{new_x_mm:.2f} Y{new_y_mm:.2f}", + message=f"Moving to clicked position: X={new_x_mm:.2f}, Y={new_y_mm:.2f}", + log=True + ) + + def on_wheel(self, dx: int, dy: int, px: int, py: int) -> bool: + """ + Handle mousewheel events to adjust Z-axis position when hovering over camera view. + Uses the printer's minimum step size (0.04mm) for precise control. + + Args: + dx: Horizontal wheel movement (unused for Z-axis) + dy: Vertical wheel movement (positive = wheel up = Z up) + px: Mouse X position + py: Mouse Y position + + Returns: + True if the event was handled, False otherwise + """ + if not self.camera_view.camera.initialized: + return False + + # Check if mouse is over the camera frame + fr = self.camera_view.get_frame_rect() + if not fr: + return False + + fx, fy, fw, fh = fr + if not (fx <= px <= fx + fw and fy <= py <= fy + fh): + return False + + # Get current position + current_pos = self.controller.get_position() + + # Use minimum step size: 4 ticks = 0.04mm (printer's minimum step) + MIN_STEP_TICKS = 4 + + # Calculate Z change (positive dy = wheel up = move Z up) + # Each wheel tick moves by the minimum step size + dz_ticks = dy * MIN_STEP_TICKS + + # Calculate new Z position + new_z_ticks = current_pos.z + int(round(dz_ticks)) + new_z_mm = new_z_ticks / 100.0 + + # Bounds check + max_z = self.controller.get_max_z() + if not (0 <= new_z_mm <= max_z): + self.controller.status( + f"Z position out of bounds: {new_z_mm:.2f}mm (max: {max_z}mm)", + False # Don't log to console, just status + ) + return True # Still handled, just rejected + + # Send move command + self.controller.enqueue_printer( + f"G0 Z{new_z_mm:.2f}", + message=f"Z: {new_z_mm:.2f}mm", + log=False # Don't clutter the log with every wheel movement + ) + + return True # Event was handled + + # ==================== Drawing ==================== + + def draw(self, surface: pygame.Surface) -> None: + """Draw the crosshair overlay if visible and camera is initialized.""" + if not self.visible: + return + + # Only draw if camera is initialized + if not self.camera_view.camera.initialized: + return + + # Get the frame rectangle from camera view + fr = self.camera_view.get_frame_rect() + if not fr: + return + + fx, fy, fw, fh = fr + + # Build/resize overlay and clear it + overlay = self._get_overlay(surface.get_size()) + overlay.fill((0, 0, 0, 0)) + + # Calculate center point of the camera frame + center_x = fx + fw // 2 + center_y = fy + fh // 2 + + # Draw crosshair lines + # Horizontal line (left and right segments with gap in middle) + # Left segment + pygame.draw.line( + overlay, + self.crosshair_color, + (center_x - self.crosshair_gap - self.crosshair_length, center_y), + (center_x - self.crosshair_gap, center_y), + self.crosshair_thickness + ) + # Right segment + pygame.draw.line( + overlay, + self.crosshair_color, + (center_x + self.crosshair_gap, center_y), + (center_x + self.crosshair_gap + self.crosshair_length, center_y), + self.crosshair_thickness + ) + + # Vertical line (top and bottom segments with gap in middle) + # Top segment + pygame.draw.line( + overlay, + self.crosshair_color, + (center_x, center_y - self.crosshair_gap - self.crosshair_length), + (center_x, center_y - self.crosshair_gap), + self.crosshair_thickness + ) + # Bottom segment + pygame.draw.line( + overlay, + self.crosshair_color, + (center_x, center_y + self.crosshair_gap), + (center_x, center_y + self.crosshair_gap + self.crosshair_length), + self.crosshair_thickness + ) + + # Composite overlay onto the actual screen surface + surface.blit(overlay, (0, 0)) \ No newline at end of file diff --git a/UI/styles.py b/UI/styles.py index bb4a065..b816942 100644 --- a/UI/styles.py +++ b/UI/styles.py @@ -47,4 +47,13 @@ def make_settings_text_style() -> TextStyle: color=pygame.Color("#5a5a5a"), hover_color=pygame.Color("#5a5a5a"), disabled_color=pygame.Color("#5a5a5a"), -) \ No newline at end of file +) + + +# ---- Crosshair Styling ----------------------------------------------------- + +# Crosshair visual properties +CROSSHAIR_COLOR = pygame.Color(64, 64, 64, 200) # Dark grey with alpha +CROSSHAIR_LENGTH = 20 +CROSSHAIR_THICKNESS = 2 +CROSSHAIR_GAP = 5 \ No newline at end of file diff --git a/UI/ui_layout.py b/UI/ui_layout.py index faadc01..1c4f4bf 100644 --- a/UI/ui_layout.py +++ b/UI/ui_layout.py @@ -17,6 +17,8 @@ from UI.list_frame import ListFrame from UI.flex_frame import FlexFrame +from UI.overlays.interactive_camera_overlay import InteractiveCameraOverlay + from UI.input.text_field import TextField from UI.input.button import Button, ButtonShape, ButtonColors from UI.input.button_icon import ButtonIcon @@ -82,7 +84,7 @@ def create_control_panel( right_margin_px=RIGHT_PANEL_WIDTH # reserve space for the control panel ) machine_vision_overlay = FocusOverlay(camera_view, movementSystem.machine_vision) - + interactive_overlay = InteractiveCameraOverlay(camera_view, movementSystem) # --- Control Box --- control_box = Section( @@ -104,7 +106,7 @@ def create_control_panel( ) automation_settings_modal = Modal(parent=root_frame, title="Automation Settings", overlay=False, width=500, height=445) build_automation_settings_modal(automation_settings_modal, movementSystem) - _build_automation_control(automation_box, movementSystem, machine_vision_overlay, automation_settings_modal) + _build_automation_control(automation_box, movementSystem, machine_vision_overlay, interactive_overlay, automation_settings_modal) # --- Camera Settings Modal --- camera_settings_modal = Modal(parent=root_frame, title="Camera Settings", overlay=False, width=308, height=660) @@ -118,7 +120,7 @@ def create_control_panel( x=0, y=0, width=1.0, height=163, width_is_percent=True ) - _build_camera_control(camera_control, movementSystem, camera, camera_settings_modal) + _build_camera_control(camera_control, movementSystem, camera, interactive_overlay, camera_settings_modal) # --- Sample Box --- sample_box = Section( @@ -322,7 +324,7 @@ def on_state_changed(state: bool, btn: ToggleButton): return go_to_sample_button, decrement_button, increment_button, sample_label#, pos1_display, pos2_display -def _build_camera_control(camera_control, movementSystem: AutomatedPrinter, camera, camera_settings_modal): +def _build_camera_control(camera_control, movementSystem: AutomatedPrinter, camera, interactive_overlay, camera_settings_modal): # Header Settings Button settings = Button(lambda: camera_settings_modal.open(), x=0, y=0, @@ -364,6 +366,7 @@ def on_set_path(): Button(lambda: movementSystem.start_autofocus(), 10, 85, 117, 40, "Autofocus", parent=camera_control, text_style=make_button_text_style()) Button(lambda: movementSystem.start_fine_autofocus(), 132, 85, 167, 40, "Fine Autofocus", parent=camera_control, text_style=make_button_text_style()) + Button(lambda: interactive_overlay.go_to_calibration_pattern(), 132+167+5, 85, 80, 40, "Cal Pat", parent=camera_control, text_style=make_button_text_style()) def open_capture_folder(): """Open the capture folder in the system's default file explorer.""" @@ -386,7 +389,7 @@ def open_capture_folder(): Button(open_capture_folder,x=254, y=10, width=117, height=40, text="Open Path", parent=camera_control, text_style=make_button_text_style()) -def _build_automation_control(automation_box, movementSystem, machine_vision_overlay, automation_settings_modal): +def _build_automation_control(automation_box, movementSystem, machine_vision_overlay, interactive_overlay, automation_settings_modal): settings = Button(lambda: automation_settings_modal.open(), x=0, y=0, width=automation_box.header.height, @@ -429,8 +432,5 @@ def toggle_overlay(): Button(toggle_overlay,x=132, y=60, width=212, height=40, text="MV Hot Pixel Filter", parent=automation_box, text_style=make_button_text_style()) - - - - - + + Button(interactive_overlay.run_calibration,x=132+212+5, y=60, width=30, height=40, text="C", parent=automation_box, text_style=make_button_text_style()) \ No newline at end of file From 740d77996bf7dedb843d3fcbbcc8290d6b9c1d5b Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Thu, 8 Jan 2026 01:45:46 -0900 Subject: [PATCH 02/46] refactored automated controller --- printer/automated_controller.py | 843 +------------------------- printer/automation/autofocus_mixin.py | 709 ++++++++++++++++++++++ 2 files changed, 714 insertions(+), 838 deletions(-) create mode 100644 printer/automation/autofocus_mixin.py diff --git a/printer/automated_controller.py b/printer/automated_controller.py index bd54b46..92a41c2 100644 --- a/printer/automated_controller.py +++ b/printer/automated_controller.py @@ -5,6 +5,7 @@ from .models import Position, FocusScore from .base_controller import BasePrinterController +from .automation.autofocus_mixin import AutofocusMixin from image_processing.machine_vision import MachineVision from UI.list_frame import ListFrame @@ -27,153 +28,7 @@ from .automation_config import AutomationSettings, AutomationSettingsManager -def _scan_bounds_plotter(proc_queue, y_min: float, y_max: float): - """ - Messages accepted: - ("data", y, r, g, b, ylum) # add color sample - ("focus", y, hard_count, soft_count) # add focus counts - ("break",) # insert NaN gap in both graphs - ("title", text) - ("done",) # leave windows open (blocking show) - ("close",) # close immediately - """ - import time, math, matplotlib - - # ---- pick a GUI backend that exists ---- - import tkinter # noqa: F401 - backend = "TkAgg" - - if backend is None: - # No GUI: drain queue until done/close and exit quietly - t0 = time.time() - while True: - try: - msg = proc_queue.get(timeout=0.2) - if isinstance(msg, tuple) and msg and msg[0] in ("done", "close"): - break - except Exception: - if time.time() - t0 > 2.0: - break - return - - try: - matplotlib.use(backend, force=True) - except Exception: - return - - import matplotlib.pyplot as plt - - # ---- Figure 1: Color vs Y ---- - plt.ion() - fig1 = plt.figure(figsize=(8, 5), dpi=120) - ax1 = fig1.add_subplot(111) - base_title_1 = "Average Color vs Y (live)" - ax1.set_title(base_title_1) - ax1.set_xlabel("Y position (mm)") - ax1.set_ylabel("Value") - ax1.grid(True, alpha=0.3) - ax1.set_xlim(y_min, y_max) - ys, rs, gs, bs, yls = [], [], [], [], [] - (l_r,) = ax1.plot([], [], label="R") - (l_g,) = ax1.plot([], [], label="G") - (l_b,) = ax1.plot([], [], label="B") - (l_y,) = ax1.plot([], [], label="Y (luminance)") - ax1.legend(loc="best") - fig1.canvas.draw_idle() - try: plt.show(block=False) - except Exception: pass - - # ---- Figure 2: Focus counts vs Y ---- - fig2 = plt.figure(figsize=(8, 5), dpi=120) - ax2 = fig2.add_subplot(111) - base_title_2 = "Focus Tiles vs Y (live)" - ax2.set_title(base_title_2) - ax2.set_xlabel("Y position (mm)") - ax2.set_ylabel("Count") - ax2.grid(True, alpha=0.3) - ax2.set_xlim(y_min, y_max) - ys_f, hard_counts, soft_counts = [], [], [] - (l_hard,) = ax2.plot([], [], label="Hard (>= min_score)") - (l_soft,) = ax2.plot([], [], label="Soft (>= soft_min_score)") - ax2.legend(loc="best") - fig2.canvas.draw_idle() - try: plt.show(block=False) - except Exception: pass - - last_elapsed = None # seconds (float) - - running = True - while running: - try: - msg = proc_queue.get(timeout=0.05) - except Exception: - msg = None - - if msg: - tag = msg[0] - if tag == "data": - _, y, r, g, b, ylum = msg - ys.append(float(y)); rs.append(float(r)); gs.append(float(g)); bs.append(float(b)); yls.append(float(ylum)) - l_r.set_data(ys, rs); l_g.set_data(ys, gs); l_b.set_data(ys, bs); l_y.set_data(ys, yls) - ax1.relim(); ax1.autoscale_view(scalex=False, scaley=True) - - elif tag == "focus": - _, y, h, s = msg - ys_f.append(float(y)); hard_counts.append(int(h)); soft_counts.append(int(s)) - l_hard.set_data(ys_f, hard_counts); l_soft.set_data(ys_f, soft_counts) - ax2.relim(); ax2.autoscale_view(scalex=False, scaley=True) - - elif tag == "break": - # Insert NaNs to create a visual gap (no connecting line) - nan = math.nan - ys.append(nan); rs.append(nan); gs.append(nan); bs.append(nan); yls.append(nan) - ys_f.append(nan); hard_counts.append(nan); soft_counts.append(nan) - - elif tag == "title": - # allow setting a new base title text for fig1 if you want - base_title_1 = str(msg[1]) or base_title_1 - # re-apply elapsed if we have one - if last_elapsed is not None: - ax1.set_title(f"{base_title_1} (t={last_elapsed:.1f}s)") - else: - ax1.set_title(base_title_1) - - elif tag == "elapsed": - # NEW: show elapsed seconds on both figure titles - _, secs = msg - last_elapsed = float(secs) - ax1.set_title(f"{base_title_1} (t={last_elapsed:.1f}s)") - ax2.set_title(f"{base_title_2} (t={last_elapsed:.1f}s)") - elif tag == "close": - running = False - - elif tag == "done": - try: - plt.ioff() - plt.show() - except Exception: - pass - running = False - - # draw frames - try: - fig1.canvas.draw_idle() - fig2.canvas.draw_idle() - plt.pause(0.01) - except Exception: - break - - try: - plt.close(fig1); plt.close(fig2) - except Exception: - pass - - -_AFTPM = 100 # ticks/mm (0.01 mm units) -_AFSTEP = 4 # 0.04 mm (printer min step) -_AF_ZFLOOR = 0 # 0.00 mm -> 0 ticks - -class AutomatedPrinter(BasePrinterController): +class AutomatedPrinter(AutofocusMixin, BasePrinterController): """Extended printer controller with automation capabilities""" AUTOMATION_CONFIG_SUBDIR = "" def __init__(self, forgeConfig: ForgeSettings, camera): @@ -212,10 +67,8 @@ def __init__(self, forgeConfig: ForgeSettings, camera): self.current_sample_index = 1 self.live_plots_enabled: bool = False - # Autofocus - self.register_handler("AUTOFOCUS_DESCENT", self.autofocus_descent_macro) - self.register_handler("AUTOFOCUS", self.autofocus_macro) - self.register_handler("FINE_AUTOFOCUS", self.fine_autofocus) + # Initialize autofocus handlers from mixin + self._init_autofocus_handlers() # Automation Routines @@ -368,616 +221,6 @@ def get_enabled_samples(self) -> List[Tuple[int, str]]: def status(self, msg: str, log: bool = True) -> None: self._handle_status(self.status_cmd(msg), log) - def _af_quantize(self, z_ticks: int) -> int: - return int(round(z_ticks / _AFSTEP) * _AFSTEP) - - def _af_move_to_ticks(self, z_ticks: int) -> None: - z_ticks = max(z_ticks, _AF_ZFLOOR) - z_mm = z_ticks / _AFTPM - self._exec_gcode(f"G0 Z{z_mm:.2f}", wait=True) - - # Score frames - def _af_score_still(self) -> float: - """Capture a STILL and return its focus score (or -inf if unusable).""" - self._exec_gcode("M400", wait=True) - time.sleep(0.1) # vibration settle for stills - self.camera.capture_image() - while self.camera.is_taking_image: - time.sleep(0.01) - if self.machine_vision.is_black(source="still"): - return float("-inf") - try: - img = self.camera.get_last_frame(prefer="still", wait_for_still=False) - res = self.machine_vision.analyze_focus() - return float(getattr(res, "focus_score", float("-inf"))) - except Exception: - return float("-inf") - - def _af_score_preview(self) -> float: - """Score the live preview/stream (no still capture). Much faster.""" - self._exec_gcode("M400", wait=True) - time.sleep(0.05) # tiny settle is enough for stream - if self.machine_vision.is_black(source="stream"): - return float("-inf") - try: - img = self.camera.get_last_frame(prefer="stream", wait_for_still=False) - res = self.machine_vision.analyze_focus() - return float(getattr(res, "focus_score", float("-inf"))) - except Exception: - return float("-inf") - - def _af_score_at( - self, - zt: int, - cache: dict[int, float], - bounds_ok: Optional[Callable[[int], bool]] = None, - scorer: Optional[Callable[[], float]] = None, - ) -> float: - """ - Quantize → bounds → cache → move → score using the provided scorer. - Defaults to STILL scorer if not provided. - """ - scorer = scorer or self._af_score_still - zt = self._af_quantize(zt) - if zt < _AF_ZFLOOR: - return float("-inf") - if bounds_ok and not bounds_ok(zt): - return float("-inf") - if zt in cache: - return cache[zt] - self._af_move_to_ticks(zt) - s = scorer(zt, cache, bounds_ok) - cache[zt] = s - return s - - def _af_climb_fine( - self, - start: int, - step_ticks: int, - cache: dict[int, float], - bounds_ok: Optional[Callable[[int], bool]] = None, - no_improve_limit: int = 2, - scorer: Optional[Callable[[], float]] = None, - baseline: Optional[float] = None, - ) -> tuple[int, float]: - scorer = scorer or self._af_score_still - zt = start - best_z = start - best_s = cache.get(start, self._af_score_at(start, cache, bounds_ok, scorer)) - no_imp = 0 - while True: - nxt = self._af_quantize(zt + step_ticks) - if nxt < _AF_ZFLOOR or (bounds_ok and not bounds_ok(nxt)): - break - s = self._af_score_at(nxt, cache, bounds_ok, scorer) - delta = f" Δbase={s - baseline:+.1f}" if baseline is not None else "" - self.status( - f"[AF-Fine] {step_ticks/_AFTPM:.2f}mm step {'up' if step_ticks>0 else 'down'}: " - f"Z={nxt / _AFTPM:.2f} score={s:.1f}{delta}", - False - ) - if s > best_s + 1e-6: - best_z, best_s = nxt, s - zt = nxt - no_imp = 0 - else: - no_imp += 1 - zt = nxt - if no_imp >= no_improve_limit: - break - return best_z, best_s - - def _af_refine_around( - self, - center: int, - cache: dict[int, float], - bounds_ok: Optional[Callable[[int], bool]] = None, - fine_step_ticks: int = _AFSTEP, - no_improve_limit: int = 2, - scorer: Optional[Callable[[], float]] = None, - baseline: Optional[float] = None, - ) -> tuple[int, float]: - scorer = scorer or self._af_score_still - up_z, up_s = self._af_climb_fine(center, fine_step_ticks, cache, bounds_ok, no_improve_limit, scorer, baseline) - down_z, down_s = self._af_climb_fine(center, -fine_step_ticks, cache, bounds_ok, no_improve_limit, scorer, baseline) - return (up_z, up_s) if up_s >= down_s else (down_z, down_s) - - # Autofocus - def autofocus_descent_macro(self, cmd: command) -> None: - """ - Descent-only autofocus with configurable envelope, step sizes, and scoring. - Coarse: fixed downward march from the start position toward Z floor. - Refine: fine polish around the best coarse Z. - - Behavior mirrors the 'tunables' style of `autofocus_macro` and `fine_autofocus`. - """ - - # =========================== TUNABLES (easy to tweak) =========================== - # Focus/strategy thresholds - FOCUS_PREVIEW_THRESHOLD = 90000.0 # if baseline STILL < this → use PREVIEW during coarse - Z_FLOOR_MM = 0.00 # hard lower bound to protect hardware - - # Step sizes (mm) - COARSE_STEP_MM = 0.20 # coarse, fixed downward step - FINE_STEP_MM = 0.04 # fine polish - MAX_OFFSET_MM = 5.60 # max explore distance downward from start - - # Early-stop behavior (relative to baseline and local peak) - DROP_STOP_PEAK = 5000.0 # stop if drop from local peak exceeds this - DROP_STOP_BASE = 3000.0 # early stop if below baseline by this amount with no better peak - - # Settling (seconds) – only used inside the scoring helpers - SETTLE_STILL_S = 0.4 - SETTLE_PREVIEW_S = 0.4 - - # Fine search behavior - FINE_NO_IMPROVE_LIMIT = 2 # stop after this many non-improving steps per direction - FINE_ALLOW_PREVIEW = False # if True, allow PREVIEW fine search when baseline is weak (like fine_autofocus) - - # Messaging - LOG_VERBOSE = True - # ============================================================================== - - # ---- derived constants (ticks) ---- - _AFTPM = 100 - _AF_ZFLOOR = int(round(Z_FLOOR_MM * _AFTPM)) - COARSE_STEP = int(round(COARSE_STEP_MM * _AFTPM)) - _AFSTEP = int(round(FINE_STEP_MM * _AFTPM)) - MAX_OFFSET = int(round(MAX_OFFSET_MM * _AFTPM)) - - def quantize(zt: int) -> int: - # keep multiples of printer min step (0.04 mm = 4 ticks) - step = 4 - return (zt // step) * step - - # Envelope: allow [start - MAX_OFFSET, start], clamped to floor - def within_env(zt: int) -> bool: - return (start - MAX_OFFSET) <= zt <= start and zt >= _AF_ZFLOOR - - # ---- scorers (wrapped to match _af_score_at's current call style) ---- - def score_still_lambda(_z, _c, _b) -> float: - self._exec_gcode("M400", wait=True) - if SETTLE_STILL_S > 0: time.sleep(SETTLE_STILL_S) - self.camera.capture_image() - while self.camera.is_taking_image: - time.sleep(0.01) - if self.machine_vision.is_black(source="still"): - return float("-inf") - try: - img = self.camera.get_last_frame(prefer="still", wait_for_still=False) - res = self.machine_vision.analyze_focus() - return float(res.focus_score) - except Exception: - return float("-inf") - - def score_preview_lambda(_z, _c, _b) -> float: - self._exec_gcode("M400", wait=True) - if SETTLE_PREVIEW_S > 0: time.sleep(SETTLE_PREVIEW_S) - if self.machine_vision.is_black(source="stream"): - return float("-inf") - try: - img = self.camera.get_last_frame(prefer="stream", wait_for_still=False) - res = self.machine_vision.analyze_focus() - return float(res.focus_score) - except Exception: - return float("-inf") - - # ---- start ---- - self.status(cmd.message or "Autofocus (descent) starting…", cmd.log) - if self.pause_point(): return - - pos = self.get_position() - start = quantize(int(round(getattr(pos, "z", 1600)))) - self.status(f"Start @ Z={start / _AFTPM:.2f} mm (descent expected)", cmd.log) - - scores: dict[int, float] = {} - - # Baseline STILL (reliable for Δbase & scorer choice) - self._af_move_to_ticks(start) - baseline = self._af_score_at( - start, scores, within_env, - scorer=score_still_lambda - ) - scores[start] = baseline - best_z = start - best_s = baseline - self.status(f"[AF-Descent] Baseline Z={start / _AFTPM:.2f} score={baseline:.1f}", LOG_VERBOSE) - - # Choose coarse scorer based on baseline (like autofocus_macro) - coarse_scorer = score_preview_lambda if (baseline < FOCUS_PREVIEW_THRESHOLD) else score_still_lambda - self.status(f"[AF-Descent] Coarse scorer: " - f"{'PREVIEW' if coarse_scorer is score_preview_lambda else 'STILL'} " - f"(baseline={baseline:.1f} < thresh={FOCUS_PREVIEW_THRESHOLD:.1f})", LOG_VERBOSE) - - # -------- Coarse descent-only march -------- - peak_s = baseline - peak_z = start - steps = min(MAX_OFFSET // COARSE_STEP, (start - _AF_ZFLOOR) // COARSE_STEP) - - for k in range(1, steps + 1): - if self.pause_point(): - self.status("Autofocus paused/stopped.", True); return - - target = quantize(start - k * COARSE_STEP) - if target <= _AF_ZFLOOR: - target = _AF_ZFLOOR - - s = self._af_score_at(target, scores, within_env, scorer=coarse_scorer) - d_base = s - baseline - self.status( - f"[AF-Descent] ↓{COARSE_STEP_MM:.2f}mm Z={target / _AFTPM:.2f}" - f"{' (FLOOR)' if target == _AF_ZFLOOR else ''} score={s:.1f} Δbase={d_base:+.1f}", - LOG_VERBOSE - ) - - if s > best_s: best_s, best_z = s, target - if s > peak_s: peak_s, peak_z = s, target - - if best_z == start and (baseline - s) >= DROP_STOP_BASE: - self.status("[AF-Descent] Early stop (baseline-drop)", LOG_VERBOSE) - break - if (peak_s - s) >= DROP_STOP_PEAK: - self.status("[AF-Descent] Early stop (peak-drop)", LOG_VERBOSE) - break - if target == _AF_ZFLOOR: - break - - # -------- Fine polish around best -------- - if self.pause_point(): - self.status("Autofocus paused/stopped.", True); return - - # Optionally allow preview during fine if baseline is weak (like fine_autofocus) - if FINE_ALLOW_PREVIEW and baseline < FOCUS_PREVIEW_THRESHOLD: - fine_scorer = score_preview_lambda - scorer_name = "PREVIEW" - else: - fine_scorer = score_still_lambda - scorer_name = "STILL" - - self.status(f"[AF-Descent] Fine search using {scorer_name} (step={FINE_STEP_MM:.2f}mm)", LOG_VERBOSE) - - local_z, local_s = self._af_refine_around( - center=best_z, - cache=scores, - bounds_ok=within_env, - fine_step_ticks=_AFSTEP, - no_improve_limit=FINE_NO_IMPROVE_LIMIT, - scorer=fine_scorer, - baseline=baseline - ) - if local_s > best_s: - best_z, best_s = local_z, local_s - - if self.pause_point(): return - self._af_move_to_ticks(best_z) - self.status( - f"Autofocus (descent) complete: Best Z={best_z / _AFTPM:.2f} mm " - f"Score={best_s:.1f} Δbase={(best_s - baseline):+.1f} " - f"(coarse={'PREVIEW' if coarse_scorer is score_preview_lambda else 'STILL'}, " - f"fine={scorer_name}, step={FINE_STEP_MM:.2f}mm, max_offset={MAX_OFFSET_MM:.2f}mm)", - True - ) - - def fine_autofocus(self, cmd: command) -> None: - """ - Fine autofocus around current Z with configurable window, step, and scoring. - Behavior mirrors the 'tunables' style of `autofocus_macro`. - """ - - # =========================== TUNABLES (easy to tweak) =========================== - # Search window & step sizes (mm) - WINDOW_MM = 0.16 # half-range; searches center ± WINDOW_MM - FINE_STEP_MM = 0.04 # printer min step by default - - # Stopping behavior - NO_IMPROVE_LIMIT = 1 # stop after this many non-improving fine steps per direction - - # Scoring strategy - USE_PREVIEW_IF_BELOW = False # allow faster preview scoring if baseline is weak - FOCUS_PREVIEW_THRESHOLD= 90000.0 # if baseline STILL < this → use PREVIEW for the fine search - - # Messaging - LOG_VERBOSE = True - # ============================================================================== - - # ---- derived constants (ticks) ---- - _AFTPM = 100 # ticks per mm (0.01 mm units) - _AF_ZFLOOR = 0 - FINE_STEP_TICKS = int(round(FINE_STEP_MM * _AFTPM)) - WINDOW_TICKS = int(round(WINDOW_MM * _AFTPM)) - - # ---- local helpers that honor tunables ---- - def within_window(zt: int, center: int) -> bool: - return (center - WINDOW_TICKS) <= zt <= (center + WINDOW_TICKS) and zt >= _AF_ZFLOOR - - # ---- start ---- - self.status(cmd.message or "Fine autofocus…", cmd.log) - - pos = self.get_position() - center = self._af_quantize(int(round(getattr(pos, "z", 1600)))) # fallback 16.00 mm - self.status(f"[AF-Fine] Center Z={center / _AFTPM:.2f} mm Window=±{WINDOW_MM:.2f} mm Step={FINE_STEP_MM:.2f} mm", LOG_VERBOSE) - - scores: dict[int, float] = {} - - # Baseline with STILL (for reliable Δbase and scorer decision). - baseline = self._af_score_at(center, scores, lambda z: within_window(z, center), scorer=lambda _z, _c, _b: self._af_score_still()) - - # Choose scorer for the *search* (preview if baseline is weak and allowed) - if USE_PREVIEW_IF_BELOW and baseline < FOCUS_PREVIEW_THRESHOLD: - fine_scorer = lambda _z, _c, _b: self._af_score_preview() - scorer_name = "PREVIEW" - else: - fine_scorer = lambda _z, _c, _b: self._af_score_still() - scorer_name = "STILL" - - self.status(f"[AF-Fine] Using {scorer_name} scorer for search (baseline={baseline:.1f} thresh={FOCUS_PREVIEW_THRESHOLD:.1f})", LOG_VERBOSE) - - # Perform the fine search around center using the chosen scorer. - if self.pause_point(): # graceful stop - return - - best_z, best_s = self._af_refine_around( - center=center, - cache=scores, - bounds_ok=lambda z: within_window(z, center), - fine_step_ticks=FINE_STEP_TICKS, - no_improve_limit=NO_IMPROVE_LIMIT, - scorer=fine_scorer, - baseline=baseline - ) - - if self.pause_point(): # graceful stop - return - - # Move to best and report Δbase. - self._af_move_to_ticks(best_z) - self.status( - f"[AF-Fine] Best Z={best_z / _AFTPM:.2f} mm " - f"Score={best_s:.1f} Δbase={(best_s - baseline):+.1f} " - f"(search={scorer_name}, step={FINE_STEP_MM:.2f}mm, window=±{WINDOW_MM:.2f}mm, " - f"no_improve_limit={NO_IMPROVE_LIMIT})", - True - ) - - def autofocus_macro(self, cmd: command) -> None: - """ - Coarse (0.40 mm) alternating with bias → 0.20 mm refine march → 0.04 mm fine polish. - Coarse uses PREVIEW if a quick baseline STILL focus is below the configured threshold; - fine stage always uses STILLs. - """ - - # =========================== TUNABLES (easy to tweak) =========================== - # Focus/strategy thresholds - FOCUS_PREVIEW_THRESHOLD = 90000.0 # if baseline STILL < this → use PREVIEW during coarse/refine - COARSE_IMPROVE_THRESH = 1000.0 # improvement vs baseline that triggers biasing a side - COARSE_DROP_STOP_PEAK = 2000.0 # stop a biased march if drop from local peak exceeds this - COARSE_DROP_STOP_BASE = 3000.0 # early stop if below baseline by this amount with no better peak - Z_FLOOR_MM = 0.00 # hard lower bound to protect hardware - - # Step sizes (mm) - COARSE_STEP_MM = 0.20 # coarse alternating outward step - REFINE_COARSE_MM = 0.12 # directionally consistent refine march - FINE_STEP_MM = 0.04 # fine polish - MAX_OFFSET_MM = 5.60 # max explore distance from start - - # Settling (seconds) - SETTLE_STILL_S = 0.4 # wait before scoring a still - SETTLE_PREVIEW_S = 0.4 # small settle for preview scoring - - # Fine search behavior - FINE_NO_IMPROVE_LIMIT = 2 # stop after this many non-improving fine steps per direction - - # Messaging - LOG_VERBOSE = True # set False to quiet step-by-step logs - # ============================================================================== - - # ---- derived constants (ticks) ---- - _AFTPM = 100 # ticks per mm (0.01 mm units) – keep consistent with your code - _AF_ZFLOOR = int(round(Z_FLOOR_MM * _AFTPM)) - COARSE_STEP = int(round(COARSE_STEP_MM * _AFTPM)) - REFINE_COARSE = int(round(REFINE_COARSE_MM * _AFTPM)) - _AFSTEP = int(round(FINE_STEP_MM * _AFTPM)) - MAX_OFFSET = int(round(MAX_OFFSET_MM * _AFTPM)) - - # ---- local helpers that honor tunables ---- - def quantize(zt: int) -> int: - # ensure multiples of printer min step (0.04 mm = 4 ticks) - step = 4 - return (zt // step) * step - - def within_env(zt: int) -> bool: - return (start - MAX_OFFSET) <= zt <= (start + MAX_OFFSET) and zt >= _AF_ZFLOOR - - def score_still() -> float: - self._exec_gcode("M400", wait=True) - if SETTLE_STILL_S > 0: time.sleep(SETTLE_STILL_S) - self.camera.capture_image() - while self.camera.is_taking_image: - time.sleep(0.01) - if self.machine_vision.is_black(source="still"): - return float("-inf") - img = self.camera.get_last_frame(prefer="still", wait_for_still=False) - res = self.machine_vision.analyze_focus() - return float(res.focus_score) - - def score_preview() -> float: - self._exec_gcode("M400", wait=True) - if SETTLE_PREVIEW_S > 0: time.sleep(SETTLE_PREVIEW_S) - if self.machine_vision.is_black(source="stream"): - return float("-inf") - img = self.camera.get_last_frame(prefer="stream", wait_for_still=False) - res = self.machine_vision.analyze_focus() - return float(res.focus_score) - - def score_at(zt: int, cache: dict, scorer) -> float: - zt = quantize(zt) - if zt < _AF_ZFLOOR or not within_env(zt): - return float("-inf") - if zt in cache: - return cache[zt] - self._af_move_to_ticks(zt) - s = scorer() - cache[zt] = s - return s - - # ---- start ---- - self.status(cmd.message or "Autofocus starting…", cmd.log) - if self.pause_point(): return - - pos = self.get_position() - start = quantize(int(round(getattr(pos, "z", 1600)))) - self.status(f"Start @ Z={start / _AFTPM:.2f} mm", cmd.log) - - scores: dict[int, float] = {} - - # Baseline STILL and choose coarse scorer - self._af_move_to_ticks(start) - baseline = score_still() - scores[start] = baseline - best_z = start - best_s = baseline - self.status(f"[AF] Baseline Z={start / _AFTPM:.2f} score={baseline:.1f}", LOG_VERBOSE) - - coarse_scorer = score_preview if (baseline < FOCUS_PREVIEW_THRESHOLD) else score_still - self.status(f"[AF] Coarse scorer: " - f"{'PREVIEW' if coarse_scorer is score_preview else 'STILL'} " - f"(baseline={baseline:.1f} < thresh={FOCUS_PREVIEW_THRESHOLD:.1f})", - LOG_VERBOSE) - - # -------- Coarse alternating with bias -------- - k_right = 1; k_left = 1 - max_k = MAX_OFFSET // COARSE_STEP - left_max_safe = min(max_k, (start - _AF_ZFLOOR) // COARSE_STEP) - right_max_safe = max_k - bias_side = None - last_side = None - peak_on_bias = baseline - - while True: - if self.pause_point(): - self.status("Autofocus paused/stopped.", True); return - - right_has = k_right <= right_max_safe - left_has = k_left <= left_max_safe - if not right_has and not left_has: - break - - # choose side (alternate until bias is set) - if bias_side: - if bias_side == 'right' and right_has: - side = 'right' - elif bias_side == 'left' and left_has: - side = 'left' - else: - side = 'right' if right_has else 'left' - else: - if last_side == 'left' and right_has: side = 'right' - elif last_side == 'right' and left_has: side = 'left' - elif right_has: side = 'right' - else: side = 'left' - - target = quantize(start + (k_right * COARSE_STEP if side == 'right' else -k_left * COARSE_STEP)) - if side == 'left' and target < _AF_ZFLOOR: - self.status("[AF-Coarse] Reached Z floor; stop left.", LOG_VERBOSE) - k_left = left_max_safe + 1 - last_side = side - continue - - s = score_at(target, scores, coarse_scorer) - if s > best_s: best_s, best_z = s, target - - improv = s - baseline - self.status(f"[AF-Coarse] side={side:<5} Z={target / _AFTPM:.2f} score={s:.1f} Δbase={improv:+.1f}", LOG_VERBOSE) - - if best_z == start and (baseline - s) >= COARSE_DROP_STOP_BASE: - self.status("[AF-Coarse] Early stop (baseline-drop)", LOG_VERBOSE) - break - - if not bias_side and improv >= COARSE_IMPROVE_THRESH: - bias_side = side - peak_on_bias = s - self.status(f"[AF-Coarse] Bias → {bias_side.upper()} (≥+{COARSE_IMPROVE_THRESH:.0f})", LOG_VERBOSE) - - if bias_side and side == bias_side: - if s > peak_on_bias: - peak_on_bias = s - elif (peak_on_bias - s) >= COARSE_DROP_STOP_PEAK: - self.status("[AF-Coarse] Early stop (peak-drop)", LOG_VERBOSE) - break - - if side == 'right': k_right += 1 - else: k_left += 1 - last_side = side - - if bias_side and ((bias_side == 'right' and not (k_right <= max_k)) or - (bias_side == 'left' and not (k_left <= max_k))): - break - - # -------- 0.20 mm refine march (uses same coarse_scorer) -------- - if self.pause_point(): - self.status("Autofocus paused/stopped.", True); return - - up_zt = quantize(best_z + REFINE_COARSE) - down_zt = quantize(best_z - REFINE_COARSE) - up_s = score_at(up_zt, scores, coarse_scorer) - down_s = score_at(down_zt, scores, coarse_scorer) - dir1, z1, s1 = (('up', up_zt, up_s) if up_s >= down_s else ('down', down_zt, down_s)) - self.status(f"[AF-Refine] Probe {REFINE_COARSE_MM:.2f}mm {dir1}: Z={z1 / _AFTPM:.2f} score={s1:.1f}", LOG_VERBOSE) - if s1 > best_s: best_s, best_z = s1, z1 - - current, prev = z1, s1 - while True: - if self.pause_point(): - self.status("Autofocus paused/stopped.", True); return - step = REFINE_COARSE if dir1 == 'up' else -REFINE_COARSE - nxt = quantize(current + step) - if nxt < _AF_ZFLOOR or not within_env(nxt): - break - s = score_at(nxt, scores, coarse_scorer) - self.status(f"[AF-Refine] {REFINE_COARSE_MM:.2f}mm step {dir1}: Z={nxt / _AFTPM:.2f} score={s:.1f}", LOG_VERBOSE) - if s > best_s: best_s, best_z = s, nxt - if s + 1e-6 >= prev: - current, prev = nxt, s - else: - break - - # -------- Fine polish (ALWAYS STILLs) -------- - def climb_fine(start_zt: int, step_ticks: int) -> tuple[int, float]: - zt = start_zt - best_local_z = start_zt - best_local_s = scores.get(start_zt, score_at(start_zt, scores, score_still)) - no_imp = 0 - while True: - nxt = quantize(zt + step_ticks) - if nxt < _AF_ZFLOOR or not within_env(nxt): - break - s = score_at(nxt, scores, score_still) - self.status(f"[AF-Fine] {FINE_STEP_MM:.2f}mm step {'up' if step_ticks>0 else 'down'}: Z={nxt / _AFTPM:.2f} score={s:.1f}", LOG_VERBOSE) - if s > best_local_s + 1e-6: - best_local_z, best_local_s = nxt, s - zt = nxt - no_imp = 0 - else: - no_imp += 1 - zt = nxt - if no_imp >= FINE_NO_IMPROVE_LIMIT: - break - return best_local_z, best_local_s - - up_z, up_s = climb_fine(best_z, _AFSTEP) - down_z, down_s = climb_fine(best_z, -_AFSTEP) - if (up_s, up_z) >= (down_s, down_z): - local_z, local_s = up_z, up_s - else: - local_z, local_s = down_z, down_s - if local_s > best_s: - best_z, best_s = local_z, local_s - - if self.pause_point(): return - self._af_move_to_ticks(best_z) - self.status(f"Autofocus complete: Best Z={best_z / _AFTPM:.2f} mm Score={best_s:.1f}", True) - - # Automation # --- Handler -------------------------------------------------------------- def scan_sample_bounds(self, cmd: command) -> None: @@ -990,38 +233,6 @@ def report(msg: str, log: bool = True) -> None: # Folder name to save images into (from command.value, fallback to current index) sample_folder = str(cmd.value).strip() if (cmd and getattr(cmd, "value", "")) else f"sample_{self.current_sample_index}" - - # --- start plotter process (spawn-safe) --- - plot_ok = [False] - plot_queue = None - if self.live_plots_enabled: - try: - import multiprocessing as mp - ctx = mp.get_context("spawn") - plot_queue = ctx.Queue() - plot_proc = ctx.Process(target=_scan_bounds_plotter, args=(plot_queue, 0, Y_MAX_MM), daemon=True) - plot_proc.start() - plot_ok[0] = True - plot_queue.put(("title", "Average Color vs Y (live)")) - except Exception as e: - report(f"[SCAN_SAMPLE_BOUNDS] Live plot process unavailable: {e}") - - def send_data(y_now: float, r: float, g: float, b: float, ylum: float, hard_ct: int, soft_ct: int) -> None: - if plot_ok[0] and plot_queue is not None: - try: - plot_queue.put(("data", float(y_now), float(r), float(g), float(b), float(ylum))) - plot_queue.put(("focus", float(y_now), int(hard_ct), int(soft_ct))) - except Exception: - plot_ok[0] = False - - - def send_break(): - if plot_ok[0] and plot_queue is not None: - try: - plot_queue.put(("break",)) - except Exception: - plot_ok[0] = False - # --- capture start Y --- start_y = float(self.position.y) / 100 start_z = float(self.position.z) / 100 @@ -1035,13 +246,6 @@ def send_break(): self.autofocus_descent_macro(cmd) self.pause_point() - def send_elapsed(): - if plot_ok[0] and plot_queue is not None: - try: - plot_queue.put(("elapsed", time.time() - start_time)) - except Exception: - pass - # --- measurement helper: color + focus counts --- def refine_and_measure(y_now: float) -> None: """ @@ -1092,10 +296,6 @@ def refine_and_measure(y_now: float) -> None: f"{'(fine AF skipped)' if not run_fine else ''}" ) - # Stream to both graphs - send_data(y_now, r, g, b, ylum, hard_tiles, soft_tiles) - send_elapsed() - except Exception as e: report(f"[SCAN_SAMPLE_BOUNDS] Y={y_now:.3f} → measurement failed: {e}", True) @@ -1112,15 +312,14 @@ def refine_and_measure(y_now: float) -> None: # 3) Return to start **without drawing a connecting line** if abs(y - start_y) > 1e-9: - send_break() # prevents the line from connecting the last +Y point to start self._exec_gcode(f"G0 Y{start_y:.3f} Z{start_z:.3f}") self.pause_point() report("[SCAN_SAMPLE_BOUNDS] Running autofocus_macro at start position…") self.autofocus_descent_macro(cmd) + # Measure at start after autofocus_macro (with skip logic inside refine_and_measure) refine_and_measure(start_y) - # 4) Sweep -Y until sample end or 0 y = start_y sample_done = False @@ -1147,13 +346,7 @@ def refine_and_measure(y_now: float) -> None: total_time = time.time() - start_time report(f"[SCAN_SAMPLE_BOUNDS] Scan complete. Total time: {total_time:.2f} seconds") - send_elapsed() # final elapsed push so titles show the final time - if plot_ok[0] and plot_queue is not None: - try: - plot_queue.put(("done",)) - except Exception: - pass def start_scan_sample_bounds(self, folder_name: str | None = None) -> None: """ @@ -1168,32 +361,6 @@ def start_scan_sample_bounds(self, folder_name: str | None = None) -> None: )) - def start_autofocus(self) -> None: - """Start the automation process""" - - self.reset_after_stop() - - # Enqueue the macro like any other command - self.enqueue_cmd(command( - kind="AUTOFOCUS", - value="", - message= "Beginning Autofocus Macro", - log=True - )) - - def start_fine_autofocus(self) -> None: - """Start the automation process""" - - self.reset_after_stop() - - # Enqueue the macro like any other command - self.enqueue_cmd(command( - kind="FINE_AUTOFOCUS", - value="", - message= "Beginning Fine Autofocus Macro", - log=True - )) - def start_automation(self) -> None: """Home, then iterate enabled samples and scan each with progress messaging.""" self.reset_after_stop() diff --git a/printer/automation/autofocus_mixin.py b/printer/automation/autofocus_mixin.py new file mode 100644 index 0000000..a654beb --- /dev/null +++ b/printer/automation/autofocus_mixin.py @@ -0,0 +1,709 @@ +""" +Autofocus functionality for automated 3D printer control. + +This module contains all autofocus-related methods that can be mixed into +the main AutomatedPrinter controller class. +""" + +import time +from typing import Optional, Callable + +from printer.base_controller import command + + +# Autofocus constants (ticks per mm = 100, meaning 0.01 mm units) +_AFTPM = 100 # ticks/mm (0.01 mm units) +_AFSTEP = 4 # 0.04 mm (printer min step) +_AF_ZFLOOR = 0 # 0.00 mm -> 0 ticks + + +class AutofocusMixin: + """ + Mixin class containing autofocus functionality. + + This class assumes it will be mixed into a controller that has: + - self.machine_vision (MachineVision instance) + - self.camera (camera instance with capture_image, get_last_frame, is_taking_image) + - self._exec_gcode(gcode, wait=False) method + - self.status(message, log=True) method + - self.pause_point() method that returns True if stopped + - self.register_handler(kind, function) method + """ + + def _init_autofocus_handlers(self): + """Register autofocus command handlers. Call this from __init__.""" + self.register_handler("AUTOFOCUS_DESCENT", self.autofocus_descent_macro) + self.register_handler("AUTOFOCUS", self.autofocus_macro) + self.register_handler("FINE_AUTOFOCUS", self.fine_autofocus) + + # ======================================================================== + # Core autofocus helper methods + # ======================================================================== + + def _af_quantize(self, z_ticks: int) -> int: + """Quantize Z position to printer's minimum step size.""" + return int(round(z_ticks / _AFSTEP) * _AFSTEP) + + def _af_move_to_ticks(self, z_ticks: int) -> None: + """Move Z axis to specified position in ticks.""" + z_ticks = max(z_ticks, _AF_ZFLOOR) + z_mm = z_ticks / _AFTPM + self._exec_gcode(f"G0 Z{z_mm:.2f}", wait=True) + + def _af_score_still(self) -> float: + """Capture a STILL image and return its focus score.""" + self._exec_gcode("M400", wait=True) + time.sleep(0.1) # vibration settle for stills + self.camera.capture_image() + while self.camera.is_taking_image: + time.sleep(0.01) + if self.machine_vision.is_black(source="still"): + return float("-inf") + try: + img = self.camera.get_last_frame(prefer="still", wait_for_still=False) + res = self.machine_vision.analyze_focus() + return float(getattr(res, "focus_score", float("-inf"))) + except Exception: + return float("-inf") + + def _af_score_preview(self) -> float: + """Score the live preview/stream (no still capture). Much faster.""" + self._exec_gcode("M400", wait=True) + time.sleep(0.05) # tiny settle is enough for stream + if self.machine_vision.is_black(source="stream"): + return float("-inf") + try: + img = self.camera.get_last_frame(prefer="stream", wait_for_still=False) + res = self.machine_vision.analyze_focus() + return float(getattr(res, "focus_score", float("-inf"))) + except Exception: + return float("-inf") + + def _af_score_at( + self, + zt: int, + cache: dict[int, float], + bounds_ok: Optional[Callable[[int], bool]] = None, + scorer: Optional[Callable[[], float]] = None, + ) -> float: + """ + Quantize, check bounds, check cache, move, and score using the provided scorer. + Defaults to STILL scorer if not provided. + """ + scorer = scorer or self._af_score_still + zt = self._af_quantize(zt) + if zt < _AF_ZFLOOR: + return float("-inf") + if bounds_ok and not bounds_ok(zt): + return float("-inf") + if zt in cache: + return cache[zt] + self._af_move_to_ticks(zt) + s = scorer(zt, cache, bounds_ok) + cache[zt] = s + return s + + def _af_climb_fine( + self, + start: int, + step_ticks: int, + cache: dict[int, float], + bounds_ok: Optional[Callable[[int], bool]] = None, + no_improve_limit: int = 2, + scorer: Optional[Callable[[], float]] = None, + baseline: Optional[float] = None, + ) -> tuple[int, float]: + """ + Climb in one direction with fine steps until no improvement is found. + Returns (best_z, best_score). + """ + scorer = scorer or self._af_score_still + zt = start + best_z = start + best_s = cache.get(start, self._af_score_at(start, cache, bounds_ok, scorer)) + no_imp = 0 + + while True: + nxt = self._af_quantize(zt + step_ticks) + if nxt < _AF_ZFLOOR or (bounds_ok and not bounds_ok(nxt)): + break + s = self._af_score_at(nxt, cache, bounds_ok, scorer) + delta = f" Δbase={s - baseline:+.1f}" if baseline is not None else "" + self.status( + f"[AF-Fine] {step_ticks/_AFTPM:.2f}mm step {'up' if step_ticks>0 else 'down'}: " + f"Z={nxt / _AFTPM:.2f} score={s:.1f}{delta}", + False + ) + if s > best_s + 1e-6: + best_z, best_s = nxt, s + zt = nxt + no_imp = 0 + else: + no_imp += 1 + zt = nxt + if no_imp >= no_improve_limit: + break + return best_z, best_s + + def _af_refine_around( + self, + center: int, + cache: dict[int, float], + bounds_ok: Optional[Callable[[int], bool]] = None, + fine_step_ticks: int = _AFSTEP, + no_improve_limit: int = 2, + scorer: Optional[Callable[[], float]] = None, + baseline: Optional[float] = None, + ) -> tuple[int, float]: + """ + Refine focus by climbing both up and down from center position. + Returns (best_z, best_score) from both directions. + """ + scorer = scorer or self._af_score_still + up_z, up_s = self._af_climb_fine( + center, fine_step_ticks, cache, bounds_ok, no_improve_limit, scorer, baseline + ) + down_z, down_s = self._af_climb_fine( + center, -fine_step_ticks, cache, bounds_ok, no_improve_limit, scorer, baseline + ) + return (up_z, up_s) if up_s >= down_s else (down_z, down_s) + + # ======================================================================== + # Main autofocus macros + # ======================================================================== + + def autofocus_descent_macro(self, cmd: command) -> None: + """ + Descent-only autofocus with configurable envelope, step sizes, and scoring. + Coarse: fixed downward march from the start position toward Z floor. + Refine: fine polish around the best coarse Z. + """ + # Tunables + FOCUS_PREVIEW_THRESHOLD = 90000.0 + Z_FLOOR_MM = 0.00 + COARSE_STEP_MM = 0.20 + FINE_STEP_MM = 0.04 + MAX_OFFSET_MM = 5.60 + DROP_STOP_PEAK = 5000.0 + DROP_STOP_BASE = 3000.0 + SETTLE_STILL_S = 0.4 + SETTLE_PREVIEW_S = 0.4 + FINE_NO_IMPROVE_LIMIT = 2 + FINE_ALLOW_PREVIEW = False + LOG_VERBOSE = True + + # Derived constants + _AF_ZFLOOR = int(round(Z_FLOOR_MM * _AFTPM)) + COARSE_STEP = int(round(COARSE_STEP_MM * _AFTPM)) + _AFSTEP = int(round(FINE_STEP_MM * _AFTPM)) + MAX_OFFSET = int(round(MAX_OFFSET_MM * _AFTPM)) + + def quantize(zt: int) -> int: + step = 4 + return (zt // step) * step + + def within_env(zt: int) -> bool: + return (start - MAX_OFFSET) <= zt <= start and zt >= _AF_ZFLOOR + + # Scorers + def score_still_lambda(_z, _c, _b) -> float: + self._exec_gcode("M400", wait=True) + if SETTLE_STILL_S > 0: + time.sleep(SETTLE_STILL_S) + self.camera.capture_image() + while self.camera.is_taking_image: + time.sleep(0.01) + if self.machine_vision.is_black(source="still"): + return float("-inf") + try: + img = self.camera.get_last_frame(prefer="still", wait_for_still=False) + res = self.machine_vision.analyze_focus() + return float(res.focus_score) + except Exception: + return float("-inf") + + def score_preview_lambda(_z, _c, _b) -> float: + self._exec_gcode("M400", wait=True) + if SETTLE_PREVIEW_S > 0: + time.sleep(SETTLE_PREVIEW_S) + if self.machine_vision.is_black(source="stream"): + return float("-inf") + try: + img = self.camera.get_last_frame(prefer="stream", wait_for_still=False) + res = self.machine_vision.analyze_focus() + return float(res.focus_score) + except Exception: + return float("-inf") + + # Start + self.status(cmd.message or "Autofocus (descent) starting...", cmd.log) + if self.pause_point(): + return + + pos = self.get_position() + start = quantize(int(round(getattr(pos, "z", 1600)))) + self.status(f"Start @ Z={start / _AFTPM:.2f} mm (descent expected)", cmd.log) + + scores: dict[int, float] = {} + + # Baseline STILL + self._af_move_to_ticks(start) + baseline = self._af_score_at(start, scores, within_env, scorer=score_still_lambda) + scores[start] = baseline + best_z = start + best_s = baseline + self.status(f"[AF-Descent] Baseline Z={start / _AFTPM:.2f} score={baseline:.1f}", LOG_VERBOSE) + + # Choose coarse scorer + coarse_scorer = ( + score_preview_lambda if (baseline < FOCUS_PREVIEW_THRESHOLD) else score_still_lambda + ) + self.status( + f"[AF-Descent] Coarse scorer: " + f"{'PREVIEW' if coarse_scorer is score_preview_lambda else 'STILL'} " + f"(baseline={baseline:.1f} < thresh={FOCUS_PREVIEW_THRESHOLD:.1f})", + LOG_VERBOSE + ) + + # Coarse descent + peak_s = baseline + peak_z = start + steps = min(MAX_OFFSET // COARSE_STEP, (start - _AF_ZFLOOR) // COARSE_STEP) + + for k in range(1, steps + 1): + if self.pause_point(): + self.status("Autofocus paused/stopped.", True) + return + + target = quantize(start - k * COARSE_STEP) + if target <= _AF_ZFLOOR: + target = _AF_ZFLOOR + + s = self._af_score_at(target, scores, within_env, scorer=coarse_scorer) + d_base = s - baseline + self.status( + f"[AF-Descent] ↓{COARSE_STEP_MM:.2f}mm Z={target / _AFTPM:.2f}" + f"{' (FLOOR)' if target == _AF_ZFLOOR else ''} score={s:.1f} Δbase={d_base:+.1f}", + LOG_VERBOSE + ) + + if s > best_s: + best_s, best_z = s, target + if s > peak_s: + peak_s, peak_z = s, target + + if best_z == start and (baseline - s) >= DROP_STOP_BASE: + self.status("[AF-Descent] Early stop (baseline-drop)", LOG_VERBOSE) + break + if (peak_s - s) >= DROP_STOP_PEAK: + self.status("[AF-Descent] Early stop (peak-drop)", LOG_VERBOSE) + break + if target == _AF_ZFLOOR: + break + + # Fine polish + if self.pause_point(): + self.status("Autofocus paused/stopped.", True) + return + + if FINE_ALLOW_PREVIEW and baseline < FOCUS_PREVIEW_THRESHOLD: + fine_scorer = score_preview_lambda + scorer_name = "PREVIEW" + else: + fine_scorer = score_still_lambda + scorer_name = "STILL" + + self.status( + f"[AF-Descent] Fine search using {scorer_name} (step={FINE_STEP_MM:.2f}mm)", + LOG_VERBOSE + ) + + local_z, local_s = self._af_refine_around( + center=best_z, + cache=scores, + bounds_ok=within_env, + fine_step_ticks=_AFSTEP, + no_improve_limit=FINE_NO_IMPROVE_LIMIT, + scorer=fine_scorer, + baseline=baseline + ) + if local_s > best_s: + best_z, best_s = local_z, local_s + + if self.pause_point(): + return + self._af_move_to_ticks(best_z) + self.status( + f"Autofocus (descent) complete: Best Z={best_z / _AFTPM:.2f} mm " + f"Score={best_s:.1f} Δbase={(best_s - baseline):+.1f} " + f"(coarse={'PREVIEW' if coarse_scorer is score_preview_lambda else 'STILL'}, " + f"fine={scorer_name}, step={FINE_STEP_MM:.2f}mm, max_offset={MAX_OFFSET_MM:.2f}mm)", + True + ) + + def fine_autofocus(self, cmd: command) -> None: + """ + Fine autofocus around current Z with configurable window, step, and scoring. + """ + # Tunables + WINDOW_MM = 0.16 + FINE_STEP_MM = 0.04 + NO_IMPROVE_LIMIT = 1 + USE_PREVIEW_IF_BELOW = False + FOCUS_PREVIEW_THRESHOLD = 90000.0 + LOG_VERBOSE = True + + # Derived constants + _AF_ZFLOOR = 0 + FINE_STEP_TICKS = int(round(FINE_STEP_MM * _AFTPM)) + WINDOW_TICKS = int(round(WINDOW_MM * _AFTPM)) + + def within_window(zt: int, center: int) -> bool: + return (center - WINDOW_TICKS) <= zt <= (center + WINDOW_TICKS) and zt >= _AF_ZFLOOR + + # Start + self.status(cmd.message or "Fine autofocus...", cmd.log) + + pos = self.get_position() + center = self._af_quantize(int(round(getattr(pos, "z", 1600)))) + self.status( + f"[AF-Fine] Center Z={center / _AFTPM:.2f} mm Window=±{WINDOW_MM:.2f} mm " + f"Step={FINE_STEP_MM:.2f} mm", + LOG_VERBOSE + ) + + scores: dict[int, float] = {} + + # Baseline with STILL + baseline = self._af_score_at( + center, scores, lambda z: within_window(z, center), + scorer=lambda _z, _c, _b: self._af_score_still() + ) + + # Choose scorer + if USE_PREVIEW_IF_BELOW and baseline < FOCUS_PREVIEW_THRESHOLD: + fine_scorer = lambda _z, _c, _b: self._af_score_preview() + scorer_name = "PREVIEW" + else: + fine_scorer = lambda _z, _c, _b: self._af_score_still() + scorer_name = "STILL" + + self.status( + f"[AF-Fine] Using {scorer_name} scorer for search " + f"(baseline={baseline:.1f} thresh={FOCUS_PREVIEW_THRESHOLD:.1f})", + LOG_VERBOSE + ) + + # Fine search + if self.pause_point(): + return + + best_z, best_s = self._af_refine_around( + center=center, + cache=scores, + bounds_ok=lambda z: within_window(z, center), + fine_step_ticks=FINE_STEP_TICKS, + no_improve_limit=NO_IMPROVE_LIMIT, + scorer=fine_scorer, + baseline=baseline + ) + + if self.pause_point(): + return + + self._af_move_to_ticks(best_z) + self.status( + f"[AF-Fine] Best Z={best_z / _AFTPM:.2f} mm " + f"Score={best_s:.1f} Δbase={(best_s - baseline):+.1f} " + f"(search={scorer_name}, step={FINE_STEP_MM:.2f}mm, window=±{WINDOW_MM:.2f}mm, " + f"no_improve_limit={NO_IMPROVE_LIMIT})", + True + ) + + def autofocus_macro(self, cmd: command) -> None: + """ + Coarse (0.40 mm) alternating with bias, then 0.20 mm refine march, + then 0.04 mm fine polish. + """ + # Tunables + FOCUS_PREVIEW_THRESHOLD = 90000.0 + COARSE_IMPROVE_THRESH = 1000.0 + COARSE_DROP_STOP_PEAK = 2000.0 + COARSE_DROP_STOP_BASE = 3000.0 + Z_FLOOR_MM = 0.00 + COARSE_STEP_MM = 0.20 + REFINE_COARSE_MM = 0.12 + FINE_STEP_MM = 0.04 + MAX_OFFSET_MM = 5.60 + SETTLE_STILL_S = 0.4 + SETTLE_PREVIEW_S = 0.4 + FINE_NO_IMPROVE_LIMIT = 2 + LOG_VERBOSE = True + + # Derived constants + _AF_ZFLOOR = int(round(Z_FLOOR_MM * _AFTPM)) + COARSE_STEP = int(round(COARSE_STEP_MM * _AFTPM)) + REFINE_COARSE = int(round(REFINE_COARSE_MM * _AFTPM)) + _AFSTEP = int(round(FINE_STEP_MM * _AFTPM)) + MAX_OFFSET = int(round(MAX_OFFSET_MM * _AFTPM)) + + def quantize(zt: int) -> int: + step = 4 + return (zt // step) * step + + def within_env(zt: int) -> bool: + return (start - MAX_OFFSET) <= zt <= (start + MAX_OFFSET) and zt >= _AF_ZFLOOR + + def score_still() -> float: + self._exec_gcode("M400", wait=True) + if SETTLE_STILL_S > 0: + time.sleep(SETTLE_STILL_S) + self.camera.capture_image() + while self.camera.is_taking_image: + time.sleep(0.01) + if self.machine_vision.is_black(source="still"): + return float("-inf") + img = self.camera.get_last_frame(prefer="still", wait_for_still=False) + res = self.machine_vision.analyze_focus() + return float(res.focus_score) + + def score_preview() -> float: + self._exec_gcode("M400", wait=True) + if SETTLE_PREVIEW_S > 0: + time.sleep(SETTLE_PREVIEW_S) + if self.machine_vision.is_black(source="stream"): + return float("-inf") + img = self.camera.get_last_frame(prefer="stream", wait_for_still=False) + res = self.machine_vision.analyze_focus() + return float(res.focus_score) + + def score_at(zt: int, cache: dict, scorer) -> float: + zt = quantize(zt) + if zt < _AF_ZFLOOR or not within_env(zt): + return float("-inf") + if zt in cache: + return cache[zt] + self._af_move_to_ticks(zt) + s = scorer() + cache[zt] = s + return s + + # Start + self.status(cmd.message or "Autofocus starting...", cmd.log) + if self.pause_point(): + return + + pos = self.get_position() + start = quantize(int(round(getattr(pos, "z", 1600)))) + self.status(f"Start @ Z={start / _AFTPM:.2f} mm", cmd.log) + + scores: dict[int, float] = {} + + # Baseline STILL + self._af_move_to_ticks(start) + baseline = score_still() + scores[start] = baseline + best_z = start + best_s = baseline + self.status(f"[AF] Baseline Z={start / _AFTPM:.2f} score={baseline:.1f}", LOG_VERBOSE) + + coarse_scorer = score_preview if (baseline < FOCUS_PREVIEW_THRESHOLD) else score_still + self.status( + f"[AF] Coarse scorer: " + f"{'PREVIEW' if coarse_scorer is score_preview else 'STILL'} " + f"(baseline={baseline:.1f} < thresh={FOCUS_PREVIEW_THRESHOLD:.1f})", + LOG_VERBOSE + ) + + # Coarse alternating with bias + k_right = 1 + k_left = 1 + max_k = MAX_OFFSET // COARSE_STEP + left_max_safe = min(max_k, (start - _AF_ZFLOOR) // COARSE_STEP) + right_max_safe = max_k + bias_side = None + last_side = None + peak_on_bias = baseline + + while True: + if self.pause_point(): + self.status("Autofocus paused/stopped.", True) + return + + right_has = k_right <= right_max_safe + left_has = k_left <= left_max_safe + if not right_has and not left_has: + break + + # Choose side + if bias_side: + if bias_side == 'right' and right_has: + side = 'right' + elif bias_side == 'left' and left_has: + side = 'left' + else: + side = 'right' if right_has else 'left' + else: + if last_side == 'left' and right_has: + side = 'right' + elif last_side == 'right' and left_has: + side = 'left' + elif right_has: + side = 'right' + else: + side = 'left' + + target = quantize( + start + (k_right * COARSE_STEP if side == 'right' else -k_left * COARSE_STEP) + ) + if side == 'left' and target < _AF_ZFLOOR: + self.status("[AF-Coarse] Reached Z floor; stop left.", LOG_VERBOSE) + k_left = left_max_safe + 1 + last_side = side + continue + + s = score_at(target, scores, coarse_scorer) + if s > best_s: + best_s, best_z = s, target + + improv = s - baseline + self.status( + f"[AF-Coarse] side={side:<5} Z={target / _AFTPM:.2f} " + f"score={s:.1f} Δbase={improv:+.1f}", + LOG_VERBOSE + ) + + if best_z == start and (baseline - s) >= COARSE_DROP_STOP_BASE: + self.status("[AF-Coarse] Early stop (baseline-drop)", LOG_VERBOSE) + break + + if not bias_side and improv >= COARSE_IMPROVE_THRESH: + bias_side = side + peak_on_bias = s + self.status( + f"[AF-Coarse] Bias → {bias_side.upper()} (≥+{COARSE_IMPROVE_THRESH:.0f})", + LOG_VERBOSE + ) + + if bias_side and side == bias_side: + if s > peak_on_bias: + peak_on_bias = s + elif (peak_on_bias - s) >= COARSE_DROP_STOP_PEAK: + self.status("[AF-Coarse] Early stop (peak-drop)", LOG_VERBOSE) + break + + if side == 'right': + k_right += 1 + else: + k_left += 1 + last_side = side + + if bias_side and ((bias_side == 'right' and not (k_right <= max_k)) or + (bias_side == 'left' and not (k_left <= max_k))): + break + + # Refine march (0.20 mm) + if self.pause_point(): + self.status("Autofocus paused/stopped.", True) + return + + up_zt = quantize(best_z + REFINE_COARSE) + down_zt = quantize(best_z - REFINE_COARSE) + up_s = score_at(up_zt, scores, coarse_scorer) + down_s = score_at(down_zt, scores, coarse_scorer) + dir1, z1, s1 = (('up', up_zt, up_s) if up_s >= down_s else ('down', down_zt, down_s)) + self.status( + f"[AF-Refine] Probe {REFINE_COARSE_MM:.2f}mm {dir1}: Z={z1 / _AFTPM:.2f} score={s1:.1f}", + LOG_VERBOSE + ) + if s1 > best_s: + best_s, best_z = s1, z1 + + current, prev = z1, s1 + while True: + if self.pause_point(): + self.status("Autofocus paused/stopped.", True) + return + step = REFINE_COARSE if dir1 == 'up' else -REFINE_COARSE + nxt = quantize(current + step) + if nxt < _AF_ZFLOOR or not within_env(nxt): + break + s = score_at(nxt, scores, coarse_scorer) + self.status( + f"[AF-Refine] {REFINE_COARSE_MM:.2f}mm step {dir1}: Z={nxt / _AFTPM:.2f} score={s:.1f}", + LOG_VERBOSE + ) + if s > best_s: + best_s, best_z = s, nxt + if s + 1e-6 >= prev: + current, prev = nxt, s + else: + break + + # Fine polish (ALWAYS STILLs) + def climb_fine(start_zt: int, step_ticks: int) -> tuple[int, float]: + zt = start_zt + best_local_z = start_zt + best_local_s = scores.get(start_zt, score_at(start_zt, scores, score_still)) + no_imp = 0 + while True: + nxt = quantize(zt + step_ticks) + if nxt < _AF_ZFLOOR or not within_env(nxt): + break + s = score_at(nxt, scores, score_still) + self.status( + f"[AF-Fine] {FINE_STEP_MM:.2f}mm step {'up' if step_ticks>0 else 'down'}: " + f"Z={nxt / _AFTPM:.2f} score={s:.1f}", + LOG_VERBOSE + ) + if s > best_local_s + 1e-6: + best_local_z, best_local_s = nxt, s + zt = nxt + no_imp = 0 + else: + no_imp += 1 + zt = nxt + if no_imp >= FINE_NO_IMPROVE_LIMIT: + break + return best_local_z, best_local_s + + up_z, up_s = climb_fine(best_z, _AFSTEP) + down_z, down_s = climb_fine(best_z, -_AFSTEP) + if (up_s, up_z) >= (down_s, down_z): + local_z, local_s = up_z, up_s + else: + local_z, local_s = down_z, down_s + if local_s > best_s: + best_z, best_s = local_z, local_s + + if self.pause_point(): + return + self._af_move_to_ticks(best_z) + self.status( + f"Autofocus complete: Best Z={best_z / _AFTPM:.2f} mm Score={best_s:.1f}", + True + ) + + # ======================================================================== + # Public convenience methods + # ======================================================================== + + def start_autofocus(self) -> None: + """Start the autofocus macro.""" + self.reset_after_stop() + self.enqueue_cmd(command( + kind="AUTOFOCUS", + value="", + message="Beginning Autofocus Macro", + log=True + )) + + def start_fine_autofocus(self) -> None: + """Start the fine autofocus macro.""" + self.reset_after_stop() + self.enqueue_cmd(command( + kind="FINE_AUTOFOCUS", + value="", + message="Beginning Fine Autofocus Macro", + log=True + )) \ No newline at end of file From 66208e1e92f2078b45eb0f100e10d4c6414d35e5 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Thu, 8 Jan 2026 22:22:53 -0900 Subject: [PATCH 03/46] refactored the camera calibration --- UI/overlays/interactive_camera_overlay.py | 654 +++--------------- UI/ui_layout.py | 41 +- printer/automated_controller.py | 17 +- .../automation/camera_calibration_mixin.py | 589 ++++++++++++++++ printer/printerConfig.py | 4 + 5 files changed, 723 insertions(+), 582 deletions(-) create mode 100644 printer/automation/camera_calibration_mixin.py diff --git a/UI/overlays/interactive_camera_overlay.py b/UI/overlays/interactive_camera_overlay.py index 5ee96dd..78f4127 100644 --- a/UI/overlays/interactive_camera_overlay.py +++ b/UI/overlays/interactive_camera_overlay.py @@ -1,40 +1,35 @@ +""" +Interactive camera overlay UI component. + +This overlay provides a crosshair for visual reference and handles user +interactions (click-to-move, wheel-to-zoom) by delegating to the controller's +calibration and movement system. +""" + import pygame -import numpy as np -import cv2 -import time -from typing import Optional, Tuple +from typing import Optional from UI.frame import Frame from UI.camera_view import CameraView -from UI.styles import ( - CROSSHAIR_COLOR, - CROSSHAIR_LENGTH, - CROSSHAIR_THICKNESS, - CROSSHAIR_GAP, -) - +from UI.styles import CROSSHAIR_COLOR class InteractiveCameraOverlay(Frame): """ UI overlay that renders a crosshair in the center of the camera view. - Supports click-to-move and camera calibration using phase correlation. - Only displays when the camera is initialized. + Supports click-to-move and mousewheel Z-axis control. + Delegates calibration and movement logic to the controller. """ def __init__( self, camera_view: CameraView, - controller, # AutomatedPrinter instance + controller, # AutomatedPrinter instance with CameraCalibrationMixin visible: bool = True, - # Crosshair visual properties (defaults from styles.py) + # Crosshair visual properties crosshair_color: Optional[pygame.Color] = None, - crosshair_length: Optional[int] = None, - crosshair_thickness: Optional[int] = None, - crosshair_gap: Optional[int] = None, - - # Calibration parameters (world units in 0.01mm) - cal_move_x_ticks: int = 100, # 1.00mm in 0.01mm units - cal_move_y_ticks: int = 100, # 1.00mm in 0.01mm units (increased from 75 for better correlation) + crosshair_length: int = 20, + crosshair_thickness: int = 2, + crosshair_gap: int = 5, ): super().__init__( parent=camera_view, @@ -50,35 +45,21 @@ def __init__( self.controller = controller self.visible = visible - # Enable click handling - CRITICAL for receiving mouse events + # Enable click handling self.mouse_passthrough = False - # Crosshair properties (use styles.py defaults if not provided) - self.crosshair_color = crosshair_color if crosshair_color is not None else CROSSHAIR_COLOR - self.crosshair_length = crosshair_length if crosshair_length is not None else CROSSHAIR_LENGTH - self.crosshair_thickness = crosshair_thickness if crosshair_thickness is not None else CROSSHAIR_THICKNESS - self.crosshair_gap = crosshair_gap if crosshair_gap is not None else CROSSHAIR_GAP + # Crosshair properties + self.crosshair_color = crosshair_color or CROSSHAIR_COLOR + self.crosshair_length = crosshair_length + self.crosshair_thickness = crosshair_thickness + self.crosshair_gap = crosshair_gap # Cache overlay surface self._overlay = None self._overlay_size = None - - # Calibration state - self.M_est = None # 2x2 estimated mapping matrix (pixels = M * world_delta) - self.M_inv = None # Inverse mapping (world_delta = M_inv * pixel_delta) - self._cal_move_x = cal_move_x_ticks - self._cal_move_y = cal_move_y_ticks - self._calibrating = False - self._cal_ref_pos = None # Position where calibration was performed (camera center reference) - self._cal_start_x_mm = None # Starting X position for absolute moves - self._cal_start_y_mm = None # Starting Y position for absolute moves - - # Store calibration data during process - self._cal_base_pos = None - self._cal_edges_base = None - self._dp1 = None - self._dp2 = None + # ==================== Visibility Control ==================== + def toggle_overlay(self) -> None: """Toggle visibility of the crosshair overlay.""" self.visible = not self.visible @@ -105,520 +86,110 @@ def set_crosshair_properties( if gap is not None: self.crosshair_gap = gap - def _get_overlay(self, surface_size: tuple[int, int]) -> pygame.Surface: - """Return an RGBA overlay the size of the target surface (recreate on resize).""" - if self._overlay is None or self._overlay_size != surface_size: - self._overlay_size = surface_size - self._overlay = pygame.Surface(surface_size, flags=pygame.SRCALPHA) - else: - # Clear with fully transparent color - self._overlay.fill((0, 0, 0, 0)) - return self._overlay - - # ==================== Calibration Methods ==================== + # ==================== Calibration Helpers ==================== - def _surface_to_gray_cv(self, arr: np.ndarray) -> np.ndarray: - """Convert RGB numpy array to grayscale for OpenCV.""" - if arr.ndim == 2: - return arr - gray = cv2.cvtColor(arr, cv2.COLOR_RGB2GRAY) - return gray - - def _edges_canny(self, gray_u8: np.ndarray) -> np.ndarray: - """Compute normalized Canny edges.""" - g = cv2.GaussianBlur(gray_u8, (5, 5), 0) - e = cv2.Canny(g, 60, 180) - ef = e.astype(np.float32) - ef -= ef.mean() - ef /= (ef.std() + 1e-6) - return ef - - def _phase_corr_shift(self, img_a_f32: np.ndarray, img_b_f32: np.ndarray) -> Tuple[float, float, float]: - """Compute phase correlation shift between two images.""" - win = cv2.createHanningWindow((img_a_f32.shape[1], img_a_f32.shape[0]), cv2.CV_32F) - (dx, dy), response = cv2.phaseCorrelate(img_a_f32, img_b_f32, win) - return float(dx), float(dy), float(response) - - def _capture_and_process_edges(self) -> Optional[np.ndarray]: - """Capture a still image and return its edge map.""" - try: - # Capture still - self.camera_view.camera.capture_image() - while self.camera_view.camera.is_taking_image: - time.sleep(0.01) - - # Get frame as numpy array - arr = self.camera_view.camera.get_last_frame(prefer="still", wait_for_still=False) - if arr is None: - return None - - # Store the calibration image resolution (the actual image we're correlating) - if not hasattr(self, '_cal_image_height'): - self._cal_image_height = arr.shape[0] - self._cal_image_width = arr.shape[1] - - # Also get the current display frame size - fr = self.camera_view.get_frame_rect() - if fr: - _, _, display_w, display_h = fr - self._cal_display_width = display_w - self._cal_display_height = display_h - - self.controller.status( - f"Calibration image: {self._cal_image_width}x{self._cal_image_height}, " - f"Display frame: {display_w:.0f}x{display_h:.0f}", - True - ) - else: - self.controller.status( - f"Calibration using image resolution: {self._cal_image_width}x{self._cal_image_height}", - True - ) - - # Convert to grayscale and compute edges - gray = self._surface_to_gray_cv(arr) - edges = self._edges_canny(gray) - return edges - except Exception as e: - self.controller.status(f"Edge capture failed: {e}", True) - return None - def run_calibration(self) -> None: """ - Run the calibration routine to determine the mapping between screen pixels and world coordinates. - This will move the printer to two positions and use phase correlation to determine the transformation. - Similar to start_autofocus in automated_controller.py. + Trigger camera calibration through the controller. + This will move the printer and determine the pixel-to-world mapping. """ - if self._calibrating: - self.controller.status("Calibration already in progress.", True) - return - - self._calibrating = True - self.controller.status("Starting camera calibration...", True) - - # Reset calibration state - self.M_est = None - self.M_inv = None - self._dp1 = None - self._dp2 = None - - # Store the starting position BEFORE building the macro - # This ensures we know where we started from - start_pos = self.controller.get_position() - self._cal_start_x_mm = start_pos.x / 100.0 - self._cal_start_y_mm = start_pos.y / 100.0 - - # Build calibration macro using ABSOLUTE positioning - steps = [] - - # Step 1: Capture base image - steps.append(self.controller.create_cmd( - kind="CALIBRATION_BASE", - value="", - message="Capturing base calibration image...", - log=True - )) - - # Step 2: Move to X+ offset (absolute positioning) - move_x_mm = self._cal_move_x / 100.0 - target_x_mm = self._cal_start_x_mm + move_x_mm - steps.append(self.controller.printer_cmd( - f"G0 X{target_x_mm:.2f}", - message=f"Moving +X {move_x_mm:.2f}mm for calibration...", - log=True - )) - - # Step 3: Capture moved image 1 - steps.append(self.controller.create_cmd( - kind="CALIBRATION_MOVE1", - value="", - message="Capturing X-moved calibration image...", - log=True - )) - - # Step 4: Return to base X - steps.append(self.controller.printer_cmd( - f"G0 X{self._cal_start_x_mm:.2f}", - message="Returning to base X position...", - log=True - )) - - # Step 5: Move to Y+ offset (absolute positioning) - move_y_mm = self._cal_move_y / 100.0 - target_y_mm = self._cal_start_y_mm + move_y_mm - steps.append(self.controller.printer_cmd( - f"G0 Y{target_y_mm:.2f}", - message=f"Moving +Y {move_y_mm:.2f}mm for calibration...", - log=True - )) - - # Step 6: Capture moved image 2 - steps.append(self.controller.create_cmd( - kind="CALIBRATION_MOVE2", - value="", - message="Capturing Y-moved calibration image...", - log=True - )) - - # Step 7: Return to base Y - steps.append(self.controller.printer_cmd( - f"G0 Y{self._cal_start_y_mm:.2f}", - message="Returning to base position...", - log=True - )) - - # Step 8: Finish calibration - steps.append(self.controller.create_cmd( - kind="CALIBRATION_FINISH", - value="", - message="Computing calibration matrix...", - log=True - )) - - # Register handlers for calibration commands - self.controller.register_handler("CALIBRATION_BASE", self._handle_calibration_base) - self.controller.register_handler("CALIBRATION_MOVE1", self._handle_calibration_move1) - self.controller.register_handler("CALIBRATION_MOVE2", self._handle_calibration_move2) - self.controller.register_handler("CALIBRATION_FINISH", self._handle_calibration_finish) - - # Create and enqueue macro - macro = self.controller.macro_cmd( - steps, - wait_printer=True, - message="Camera calibration routine", - log=True - ) - self.controller.enqueue_cmd(macro) - - def _handle_calibration_base(self, cmd) -> None: - """Handler: Capture base image.""" - time.sleep(0.5) # Allow printer to fully settle (increased for Y-axis stability) - self._cal_base_pos = self.controller.get_position() - self._cal_edges_base = self._capture_and_process_edges() - if self._cal_edges_base is None: - self.controller.status("Failed to capture base calibration image.", True) - self._calibrating = False - - def _handle_calibration_move1(self, cmd) -> None: - """Handler: Capture first moved image and compute shift.""" - time.sleep(0.5) # Allow printer to fully settle (increased for Y-axis stability) - edges = self._capture_and_process_edges() - if edges is None or self._cal_edges_base is None: - self.controller.status("Failed to capture first calibration image.", True) - self._calibrating = False - return - - dx, dy, response = self._phase_corr_shift(self._cal_edges_base, edges) - self._dp1 = np.array([dx, dy], dtype=np.float64) - self._response1 = response - self.controller.status(f"X-move shift: dx={dx:.2f}, dy={dy:.2f}, response={response:.3f}", True) - - # Warn if correlation confidence is low - if response < 0.3: - self.controller.status("WARNING: Low phase correlation confidence for X-move. Calibration may be inaccurate.", True) - - def _handle_calibration_move2(self, cmd) -> None: - """Handler: Capture second moved image and compute shift.""" - time.sleep(0.5) # Allow printer to fully settle (increased for Y-axis stability) - edges = self._capture_and_process_edges() - if edges is None or self._cal_edges_base is None: - self.controller.status("Failed to capture second calibration image.", True) - self._calibrating = False - return - - dx, dy, response = self._phase_corr_shift(self._cal_edges_base, edges) - self._dp2 = np.array([dx, dy], dtype=np.float64) - self._response2 = response - self.controller.status(f"Y-move shift: dx={dx:.2f}, dy={dy:.2f}, response={response:.3f}", True) - - # Warn if correlation confidence is low - if response < 0.3: - self.controller.status("WARNING: Low phase correlation confidence for Y-move. Calibration may be inaccurate.", True) - - def _handle_calibration_finish(self, cmd) -> None: - """Handler: Compute final calibration matrix.""" - if self._dp1 is None or self._dp2 is None: - self.controller.status("Calibration failed: missing measurements.", True) - self._calibrating = False - return - - # World deltas (in 0.01mm units) - these are what we commanded - DW1 = np.array([self._cal_move_x, 0.0], dtype=np.float64) - DW2 = np.array([0.0, self._cal_move_y], dtype=np.float64) - - # Pixel deltas - these are what we measured - # Phase correlation: positive dx/dy means second image shifted right/down - # - # For a standard camera (not inverted): - # Stage +X → Image shifts LEFT (negative dx) - # Stage +Y → Image shifts DOWN (positive dy) - # - # But camera orientation varies! We need to check the SIGN of the correlation: - # - If stage +X causes image +dx → camera X follows stage (don't negate) - # - If stage +X causes image -dx → camera X opposes stage (negate) - # - Same logic for Y - # - # From your calibration: - # X-move: dx=+452.95 → camera X follows stage → use as-is - # Y-move: dy=-341.63 → camera Y opposes stage → negate - - # Detect orientation by checking correlation signs - x_inverted = (self._dp1[0] < 0) # True if camera X opposes stage X - y_inverted = (self._dp2[1] < 0) # True if camera Y opposes stage Y - - dp1_corrected = self._dp1.copy() - dp2_corrected = self._dp2.copy() - - if x_inverted: - dp1_corrected[0] = -dp1_corrected[0] - if y_inverted: - dp2_corrected[1] = -dp2_corrected[1] - - # Debug: show raw and corrected deltas - self.controller.status( - f"Raw pixel deltas: X-move=[{self._dp1[0]:.2f}, {self._dp1[1]:.2f}], " - f"Y-move=[{self._dp2[0]:.2f}, {self._dp2[1]:.2f}]", - True - ) - self.controller.status( - f"Corrected deltas: X-move=[{dp1_corrected[0]:.2f}, {dp1_corrected[1]:.2f}], " - f"Y-move=[{dp2_corrected[0]:.2f}, {dp2_corrected[1]:.2f}]", - True - ) - - # Report detected orientation - orient_msg = f"Camera orientation: X={'inverted' if x_inverted else 'normal'}, Y={'inverted' if y_inverted else 'normal'}" - self.controller.status(orient_msg, True) - - # Build matrices: columns are the basis vectors - # DP: pixel space basis (each column is pixel response to a world move) - # DW: world space basis (each column is a world delta) - DP = np.column_stack([dp1_corrected, dp2_corrected]) # 2x2 - DW = np.column_stack([DW1, DW2]) # 2x2 - - # Check if DW is invertible - det_dw = np.linalg.det(DW) - if abs(det_dw) < 1e-9: - self.controller.status("Calibration failed: DW matrix singular.", True) - self._calibrating = False - return - - # The relationship is: dp = M @ dw - # Therefore: M = DP @ inv(DW) - # This maps world deltas (in 0.01mm units) to pixel deltas - self.M_est = DP @ np.linalg.inv(DW) - - # Check if M is invertible - detM = np.linalg.det(self.M_est) - if abs(detM) < 1e-9: - self.controller.status("Calibration failed: M not invertible.", True) - self.M_inv = None - self._calibrating = False + if not hasattr(self.controller, 'start_camera_calibration'): + self.controller.status( + "Controller does not support camera calibration", + True + ) return - # Invert to get world = M_inv @ pixel - self.M_inv = np.linalg.inv(self.M_est) - - # Debug: show M_inv for understanding click-to-world mapping - self.controller.status( - f"M_inv (pixel→world) = [[{self.M_inv[0,0]:.6f}, {self.M_inv[0,1]:.6f}], " - f"[{self.M_inv[1,0]:.6f}, {self.M_inv[1,1]:.6f}]]", - True - ) - - # Store the calibration reference position (where camera center corresponds to) - # This is the position at the END of calibration (after returning to base) - self._cal_ref_pos = self.controller.get_position() - - # Calculate DPI for full resolution (2592x1944) - # Extract pixels per 0.01mm unit from M_est - # M_est[0,0] is dx_pixels per 1.00 unit of world X (which is 0.01mm) - # M_est[1,1] is dy_pixels per 1.00 unit of world Y (which is 0.01mm) - px_per_0p01mm_x = abs(self.M_est[0, 0]) - px_per_0p01mm_y = abs(self.M_est[1, 1]) - - # Convert to pixels per mm - px_per_mm_x = px_per_0p01mm_x * 100.0 - px_per_mm_y = px_per_0p01mm_y * 100.0 - px_per_mm_avg = (px_per_mm_x + px_per_mm_y) / 2 - - # DPI = pixels per mm * mm per inch (25.4) - dpi = px_per_mm_avg * 25.4 - - # Log results - self.controller.status( - f"Calibration complete! M = [[{self.M_est[0,0]:.3f}, {self.M_est[0,1]:.3f}], " - f"[{self.M_est[1,0]:.3f}, {self.M_est[1,1]:.3f}]]", - True - ) - self.controller.status( - f"Estimated full-res DPI: {dpi:.1f} (avg {px_per_mm_avg:.2f} px/mm, " - f"X: {px_per_mm_x:.2f} px/mm, Y: {px_per_mm_y:.2f} px/mm)", - True - ) - - self._calibrating = False - - # ==================== Click-to-Move ==================== - - def go_to_calibration_pattern(self) -> None: - """ - Move the printer to the overlay calibration pattern position. - First moves Z up to 33.12mm, then moves to X=226.24mm, Y=187.08mm. - """ - self.controller.reset_after_stop() - - # Move Z up first (safe height) - self.controller.enqueue_printer( - "G0 Z33.12", - message="Moving to calibration height Z=33.12mm", - log=True - ) - - # Then move to XY position - self.controller.enqueue_printer( - "G0 X226.08 Y186.90", - message="Moving to calibration pattern at X=226.24mm, Y=187.08mm", - log=True - ) + self.controller.start_camera_calibration() - def _click_to_world_delta(self, screen_x: int, screen_y: int) -> Optional[Tuple[float, float]]: - """ - Convert a screen click position to world delta (in 0.01mm units). - Returns None if calibration hasn't been run yet. - """ - if self.M_inv is None: - return None - - # Get frame rectangle - fr = self.camera_view.get_frame_rect() - if not fr: - return None - - fx, fy, fw, fh = fr - - # Calculate center of camera frame - center_x = fx + fw / 2 - center_y = fy + fh / 2 - - # Pixel delta from center (in DISPLAY coordinates) - pixel_delta = np.array([screen_x - center_x, screen_y - center_y], dtype=np.float64) - - # CRITICAL: Scale pixel delta to match calibration image coordinates - # During calibration, we measured pixel shifts in the calibration image resolution - # But clicks are measured in the displayed frame resolution - # We need to scale clicks by (calibration_image_size / displayed_frame_size) - if hasattr(self, '_cal_image_width') and hasattr(self, '_cal_image_height'): - # Scale factor = calibration image pixels / display frame pixels - scale_x = self._cal_image_width / fw - scale_y = self._cal_image_height / fh - - pixel_delta_scaled = np.array([ - pixel_delta[0] * scale_x, - pixel_delta[1] * scale_y - ], dtype=np.float64) - - # Debug: show scaling (only once) - if not hasattr(self, '_scaling_reported'): - self._scaling_reported = True - self.controller.status( - f"Click scaling: Display {fw:.0f}x{fh:.0f} → Calibration {self._cal_image_width}x{self._cal_image_height} " - f"(scale X={scale_x:.3f}, Y={scale_y:.3f})", - True - ) - else: - pixel_delta_scaled = pixel_delta - - # Convert to world delta using inverse mapping - dw = self.M_inv @ pixel_delta_scaled - - # Store for debug output (will be printed in on_click) - self._last_pixel_delta = pixel_delta - self._last_pixel_delta_scaled = pixel_delta_scaled - self._last_world_delta = dw - - return float(dw[0]), float(dw[1]) + def is_calibrated(self) -> bool: + """Check if camera calibration is available.""" + if not hasattr(self.controller, 'M_inv'): + return False + return self.controller.M_inv is not None + # ==================== Mouse Event Handlers ==================== + def on_click(self, button=None) -> None: """Handle click events to move printer to clicked position.""" if not self.camera_view.camera.initialized: return - if self.M_inv is None: - self.controller.status("Cannot move: run calibration first (call run_calibration())", True) + if not self.is_calibrated(): + self.controller.status( + "Cannot move: run calibration first (call run_calibration())", + True + ) return # Get mouse position mouse_x, mouse_y = pygame.mouse.get_pos() - # Convert to world delta from the CURRENT printer position - # The calibration matrix M_inv tells us how pixel deltas map to world deltas - result = self._click_to_world_delta(mouse_x, mouse_y) - if result is None: + # Get the camera frame rectangle + fr = self.camera_view.get_frame_rect() + if not fr: return - dx_ticks, dy_ticks = result - - # CRITICAL FIX: Negate X-axis because screen X and stage X are opposite - # When you click right (positive screen X), stage should move right (positive stage X) - # But empirically, clicking right moves left, so we need to flip it - dx_ticks = -dx_ticks - - # Get CURRENT position (where the camera actually is now) - current_pos = self.controller.get_position() + fx, fy, fw, fh = fr - # Debug: show pixel delta, world delta (before X flip), and world delta (after X flip) - if hasattr(self, '_last_pixel_delta') and hasattr(self, '_last_world_delta'): - if hasattr(self, '_last_pixel_delta_scaled'): - self.controller.status( - f"Pixel Δ (display): [{self._last_pixel_delta[0]:.1f}, {self._last_pixel_delta[1]:.1f}] → " - f"Pixel Δ (scaled): [{self._last_pixel_delta_scaled[0]:.1f}, {self._last_pixel_delta_scaled[1]:.1f}] → " - f"World (raw): [{self._last_world_delta[0]:.1f}, {self._last_world_delta[1]:.1f}] → " - f"World: [{dx_ticks:.1f}, {dy_ticks:.1f}] ticks", - True - ) - else: - self.controller.status( - f"Pixel delta: [{self._last_pixel_delta[0]:.1f}, {self._last_pixel_delta[1]:.1f}] → " - f"World (raw): [{self._last_world_delta[0]:.1f}, {self._last_world_delta[1]:.1f}] → " - f"World: [{dx_ticks:.1f}, {dy_ticks:.1f}] ticks", - True - ) + # Check if click is within camera frame + if not (fx <= mouse_x <= fx + fw and fy <= mouse_y <= fy + fh): + return - # Calculate new position relative to CURRENT position - # The delta tells us how far from center we clicked - new_x_ticks = current_pos.x + int(round(dx_ticks)) - new_y_ticks = current_pos.y + int(round(dy_ticks)) + # Convert display coordinates to image coordinates + # The camera_view may scale/letterbox the image, so we need to account for that + rel_x = mouse_x - fx + rel_y = mouse_y - fy - # Convert to mm for G-code - new_x_mm = new_x_ticks / 100.0 - new_y_mm = new_y_ticks / 100.0 + # Get calibration image dimensions from controller + cal_status = self.controller.get_calibration_status() + img_w = cal_status.get('image_width') + img_h = cal_status.get('image_height') - # Bounds check - max_x = self.controller.get_max_x() - max_y = self.controller.get_max_y() + if img_w is None or img_h is None: + self.controller.status("Calibration image dimensions not available", True) + return - if not (0 <= new_x_mm <= max_x and 0 <= new_y_mm <= max_y): - self.controller.status( - f"Click position out of bounds: ({new_x_mm:.2f}, {new_y_mm:.2f})", - True - ) + # Calculate scaling factor (camera_view letterboxes to fit) + # The displayed image maintains aspect ratio within the frame + img_aspect = img_w / img_h if img_h > 0 else 1.0 + frame_aspect = fw / fh if fh > 0 else 1.0 + + if img_aspect > frame_aspect: + # Image is wider - letterbox top/bottom + display_w = fw + display_h = fw / img_aspect + offset_x = 0 + offset_y = (fh - display_h) / 2 + else: + # Image is taller - letterbox left/right + display_w = fh * img_aspect + display_h = fh + offset_x = (fw - display_w) / 2 + offset_y = 0 + + # Adjust for letterboxing + adj_x = rel_x - offset_x + adj_y = rel_y - offset_y + + # Check if click is in the actual image area + if not (0 <= adj_x <= display_w and 0 <= adj_y <= display_h): + self.controller.status("Click outside image area", False) return - # Send move command - self.controller.enqueue_printer( - f"G0 X{new_x_mm:.2f} Y{new_y_mm:.2f}", - message=f"Moving to clicked position: X={new_x_mm:.2f}, Y={new_y_mm:.2f}", - log=True - ) + # Scale to image coordinates + pixel_x = (adj_x / display_w) * img_w + pixel_y = (adj_y / display_h) * img_h + + # Use controller's vision movement (relative to current position) + self.controller.move_to_vision_point(pixel_x, pixel_y, relative=True) def on_wheel(self, dx: int, dy: int, px: int, py: int) -> bool: """ Handle mousewheel events to adjust Z-axis position when hovering over camera view. - Uses the printer's minimum step size (0.04mm) for precise control. Args: - dx: Horizontal wheel movement (unused for Z-axis) + dx: Horizontal wheel movement (unused) dy: Vertical wheel movement (positive = wheel up = Z up) px: Mouse X position py: Mouse Y position @@ -645,7 +216,6 @@ def on_wheel(self, dx: int, dy: int, px: int, py: int) -> bool: MIN_STEP_TICKS = 4 # Calculate Z change (positive dy = wheel up = move Z up) - # Each wheel tick moves by the minimum step size dz_ticks = dy * MIN_STEP_TICKS # Calculate new Z position @@ -657,31 +227,38 @@ def on_wheel(self, dx: int, dy: int, px: int, py: int) -> bool: if not (0 <= new_z_mm <= max_z): self.controller.status( f"Z position out of bounds: {new_z_mm:.2f}mm (max: {max_z}mm)", - False # Don't log to console, just status + False ) - return True # Still handled, just rejected + return True # Send move command self.controller.enqueue_printer( f"G0 Z{new_z_mm:.2f}", message=f"Z: {new_z_mm:.2f}mm", - log=False # Don't clutter the log with every wheel movement + log=False ) - return True # Event was handled + return True # ==================== Drawing ==================== + def _get_overlay(self, surface_size: tuple[int, int]) -> pygame.Surface: + """Return an RGBA overlay the size of the target surface (recreate on resize).""" + if self._overlay is None or self._overlay_size != surface_size: + self._overlay_size = surface_size + self._overlay = pygame.Surface(surface_size, flags=pygame.SRCALPHA) + else: + self._overlay.fill((0, 0, 0, 0)) + return self._overlay + def draw(self, surface: pygame.Surface) -> None: """Draw the crosshair overlay if visible and camera is initialized.""" if not self.visible: return - # Only draw if camera is initialized if not self.camera_view.camera.initialized: return - # Get the frame rectangle from camera view fr = self.camera_view.get_frame_rect() if not fr: return @@ -696,9 +273,9 @@ def draw(self, surface: pygame.Surface) -> None: center_x = fx + fw // 2 center_y = fy + fh // 2 - # Draw crosshair lines - # Horizontal line (left and right segments with gap in middle) - # Left segment + # Draw crosshair lines with gap in the middle + + # Horizontal line (left and right segments) pygame.draw.line( overlay, self.crosshair_color, @@ -706,7 +283,6 @@ def draw(self, surface: pygame.Surface) -> None: (center_x - self.crosshair_gap, center_y), self.crosshair_thickness ) - # Right segment pygame.draw.line( overlay, self.crosshair_color, @@ -715,8 +291,7 @@ def draw(self, surface: pygame.Surface) -> None: self.crosshair_thickness ) - # Vertical line (top and bottom segments with gap in middle) - # Top segment + # Vertical line (top and bottom segments) pygame.draw.line( overlay, self.crosshair_color, @@ -724,7 +299,6 @@ def draw(self, surface: pygame.Surface) -> None: (center_x, center_y - self.crosshair_gap), self.crosshair_thickness ) - # Bottom segment pygame.draw.line( overlay, self.crosshair_color, @@ -733,5 +307,5 @@ def draw(self, surface: pygame.Surface) -> None: self.crosshair_thickness ) - # Composite overlay onto the actual screen surface + # Composite overlay onto the screen surface surface.blit(overlay, (0, 0)) \ No newline at end of file diff --git a/UI/ui_layout.py b/UI/ui_layout.py index 1c4f4bf..c43c755 100644 --- a/UI/ui_layout.py +++ b/UI/ui_layout.py @@ -106,7 +106,7 @@ def create_control_panel( ) automation_settings_modal = Modal(parent=root_frame, title="Automation Settings", overlay=False, width=500, height=445) build_automation_settings_modal(automation_settings_modal, movementSystem) - _build_automation_control(automation_box, movementSystem, machine_vision_overlay, interactive_overlay, automation_settings_modal) + _build_automation_control(automation_box, movementSystem, machine_vision_overlay, automation_settings_modal) # --- Camera Settings Modal --- camera_settings_modal = Modal(parent=root_frame, title="Camera Settings", overlay=False, width=308, height=660) @@ -117,7 +117,7 @@ def create_control_panel( parent=control_frame, title="Camera Control", collapsible=True, - x=0, y=0, width=1.0, height=163, + x=0, y=0, width=1.0, height=258, width_is_percent=True ) _build_camera_control(camera_control, movementSystem, camera, interactive_overlay, camera_settings_modal) @@ -259,29 +259,8 @@ def _build_sample_box(sample_box, movementSystem, camera, current_sample_index): increment_button = Button(None, parent=sample_box, x=330, y=10, width=40, height=button_height, text="+", text_style=make_button_text_style()) - + # 2nd Row - """ - Button(movementSystem.setPosition1, 10 , 60, 150, button_height, "Set Position 1", parent=sample_box, text_style=make_button_text_style()) - - pos1_display = Text( - text=f"X: {movementSystem.automation_config.x_start/100:.2f} Y: {movementSystem.automation_config.y_start/100:.2f} Z: {movementSystem.automation_config.z_start/100:.2f}", - parent=sample_box, - x=170, y=75, - style=make_display_text_style() - ) - - # 3rd Row - Button(movementSystem.setPosition2, 10, 110, 150, button_height, "Set Position 2", parent=sample_box, text_style=make_button_text_style()) - - pos2_display = Text( - text=f"X: {movementSystem.automation_config.x_end/100:.2f} Y: {movementSystem.automation_config.y_end/100:.2f} Z: {movementSystem.automation_config.z_end/100:.2f}", - parent=sample_box, - x=170, y=125, - style=make_display_text_style() - ) - """ - # 4th Row def build_row(i: int, parent: Frame) -> None: on_overrides = ToggledColors( background=pygame.Color("#7ed957"), @@ -366,7 +345,6 @@ def on_set_path(): Button(lambda: movementSystem.start_autofocus(), 10, 85, 117, 40, "Autofocus", parent=camera_control, text_style=make_button_text_style()) Button(lambda: movementSystem.start_fine_autofocus(), 132, 85, 167, 40, "Fine Autofocus", parent=camera_control, text_style=make_button_text_style()) - Button(lambda: interactive_overlay.go_to_calibration_pattern(), 132+167+5, 85, 80, 40, "Cal Pat", parent=camera_control, text_style=make_button_text_style()) def open_capture_folder(): """Open the capture folder in the system's default file explorer.""" @@ -387,9 +365,15 @@ def open_capture_folder(): print("Opened Image Output Folder") Button(open_capture_folder,x=254, y=10, width=117, height=40, text="Open Path", parent=camera_control, text_style=make_button_text_style()) + #3rd Row + Button(lambda: movementSystem.go_to_calibration_pattern(), 10, 130, 117, 40, "Go to Slide", parent=camera_control, text_style=make_button_text_style()) + Button(lambda: movementSystem.start_camera_calibration(), 132, 130, 207, 40, "Calibrate Movement", parent=camera_control, text_style=make_button_text_style()) + #4th row + Button(lambda: movementSystem.start_autofocus(), 10, 175, 127, 40, "Sample Cal.", parent=camera_control, text_style=make_button_text_style()) + Button(lambda: movementSystem.start_autofocus(), 142, 175, 167, 40, "Sample Settings", parent=camera_control, text_style=make_button_text_style()) -def _build_automation_control(automation_box, movementSystem, machine_vision_overlay, interactive_overlay, automation_settings_modal): +def _build_automation_control(automation_box, movementSystem, machine_vision_overlay, automation_settings_modal): settings = Button(lambda: automation_settings_modal.open(), x=0, y=0, width=automation_box.header.height, @@ -430,7 +414,4 @@ def toggle_overlay(): count = machine_vision_overlay.build_hot_pixel_map(include_soft=True) print(f"Marked {count} hot tiles invalid") - Button(toggle_overlay,x=132, y=60, width=212, height=40, text="MV Hot Pixel Filter", parent=automation_box, text_style=make_button_text_style()) - - - Button(interactive_overlay.run_calibration,x=132+212+5, y=60, width=30, height=40, text="C", parent=automation_box, text_style=make_button_text_style()) \ No newline at end of file + Button(toggle_overlay,x=132, y=60, width=212, height=40, text="MV Hot Pixel Filter", parent=automation_box, text_style=make_button_text_style()) \ No newline at end of file diff --git a/printer/automated_controller.py b/printer/automated_controller.py index 92a41c2..c5276d3 100644 --- a/printer/automated_controller.py +++ b/printer/automated_controller.py @@ -6,6 +6,7 @@ from .models import Position, FocusScore from .base_controller import BasePrinterController from .automation.autofocus_mixin import AutofocusMixin +from .automation.camera_calibration_mixin import CameraCalibrationMixin from image_processing.machine_vision import MachineVision from UI.list_frame import ListFrame @@ -28,7 +29,7 @@ from .automation_config import AutomationSettings, AutomationSettingsManager -class AutomatedPrinter(AutofocusMixin, BasePrinterController): +class AutomatedPrinter(CameraCalibrationMixin, AutofocusMixin, BasePrinterController): """Extended printer controller with automation capabilities""" AUTOMATION_CONFIG_SUBDIR = "" def __init__(self, forgeConfig: ForgeSettings, camera): @@ -69,6 +70,9 @@ def __init__(self, forgeConfig: ForgeSettings, camera): # Initialize autofocus handlers from mixin self._init_autofocus_handlers() + + # Initialize camera calibration handlers from mixin + self._init_camera_calibration_handlers() # Automation Routines @@ -419,17 +423,6 @@ def start_automation(self) -> None: # 4) Enqueue the macro self.enqueue_cmd(macro) - ''' - def setPosition1(self) -> None: - self.automation_config.x_start = self.position.x - self.automation_config.y_start = self.position.y - self.automation_config.z_start = self.position.z - - def setPosition2(self) -> None: - self.automation_config.x_end = self.position.x - self.automation_config.y_end = self.position.y - self.automation_config.z_end = self.position.z - ''' def _get_range(self, start: int, end: int, step: int) -> range: """Get appropriate range based on start and end positions""" if start < end: diff --git a/printer/automation/camera_calibration_mixin.py b/printer/automation/camera_calibration_mixin.py new file mode 100644 index 0000000..3d1c0ad --- /dev/null +++ b/printer/automation/camera_calibration_mixin.py @@ -0,0 +1,589 @@ +""" +Camera calibration and vision-guided movement for automated 3D printer control. + +This module contains camera calibration and vision-based positioning methods +that can be mixed into the main AutomatedPrinter controller class. +""" + +import time +import numpy as np +import cv2 +from typing import Optional, Tuple + +from printer.base_controller import command +from printer.models import Position + + +class CameraCalibrationMixin: + """ + Mixin class containing camera calibration and vision-guided movement functionality. + + This class assumes it will be mixed into a controller that has: + - self.camera (camera instance with capture_image, get_last_frame, is_taking_image) + - self._exec_gcode(gcode, wait=False) method + - self.status(message, log=True) method + - self.pause_point() method that returns True if stopped + - self.register_handler(kind, function) method + - self.get_position() -> Position method + - self.get_max_x/y/z() methods + - self.enqueue_cmd(command) method + """ + + def _init_camera_calibration_handlers(self): + """Register camera calibration command handlers. Call this from __init__.""" + self.register_handler("CAMERA_CALIBRATE", self._handle_camera_calibrate) + self.register_handler("MOVE_TO_VISION_POINT", self._handle_move_to_vision_point) + + # Initialize calibration state + self.M_est = None # 2x2 estimated mapping matrix (pixels = M * world_delta) + self.M_inv = None # Inverse mapping (world_delta = M_inv * pixel_delta) + self._cal_ref_pos = None # Position where calibration was performed + self._cal_image_width = None # Image width used during calibration + self._cal_image_height = None # Image height used during calibration + + # Calibration parameters (can be overridden) + self._cal_move_x_ticks = 100 # 1.00mm in 0.01mm units + self._cal_move_y_ticks = 100 # 1.00mm in 0.01mm units + + # Try to load saved calibration + self._load_camera_calibration() + + # ======================================================================== + # Save/Load calibration methods + # ======================================================================== + + def _save_camera_calibration(self) -> None: + """Save the current calibration matrix to printer config.""" + if self.M_est is None or self.M_inv is None: + return + + calibration_data = { + 'M_est': self.M_est.tolist(), + 'M_inv': self.M_inv.tolist(), + 'ref_pos_x': int(self._cal_ref_pos.x) if self._cal_ref_pos else None, + 'ref_pos_y': int(self._cal_ref_pos.y) if self._cal_ref_pos else None, + 'ref_pos_z': int(self._cal_ref_pos.z) if self._cal_ref_pos else None, + 'image_width': self._cal_image_width, + 'image_height': self._cal_image_height, + 'move_x_ticks': self._cal_move_x_ticks, + 'move_y_ticks': self._cal_move_y_ticks, + } + + # Save to printer config + self.config.camera_calibration = calibration_data + + # Persist to disk using the PrinterSettingsManager + from printer.printerConfig import PrinterSettingsManager + PrinterSettingsManager.save(self.CONFIG_SUBDIR, self.config) + + self.status("Camera calibration saved to config", True) + + def _load_camera_calibration(self) -> bool: + """ + Load saved calibration from printer config. + Returns True if calibration was loaded successfully. + """ + if not hasattr(self.config, 'camera_calibration'): + return False + + cal_data = self.config.camera_calibration + if not cal_data or not isinstance(cal_data, dict): + return False + + try: + # Load matrices + M_est_list = cal_data.get('M_est') + M_inv_list = cal_data.get('M_inv') + + if M_est_list is None or M_inv_list is None: + return False + + self.M_est = np.array(M_est_list, dtype=np.float64) + self.M_inv = np.array(M_inv_list, dtype=np.float64) + + # Load reference position + ref_x = cal_data.get('ref_pos_x') + ref_y = cal_data.get('ref_pos_y') + ref_z = cal_data.get('ref_pos_z') + + if ref_x is not None and ref_y is not None and ref_z is not None: + self._cal_ref_pos = Position(x=ref_x, y=ref_y, z=ref_z) + + # Load image dimensions + self._cal_image_width = cal_data.get('image_width') + self._cal_image_height = cal_data.get('image_height') + + # Load calibration parameters + self._cal_move_x_ticks = cal_data.get('move_x_ticks', 100) + self._cal_move_y_ticks = cal_data.get('move_y_ticks', 100) + + self.status("Camera calibration loaded from config", True) + return True + + except Exception as e: + self.status(f"Failed to load camera calibration: {e}", True) + self.M_est = None + self.M_inv = None + self._cal_ref_pos = None + self._cal_image_width = None + self._cal_image_height = None + return False + + def clear_camera_calibration(self) -> None: + """Clear the saved camera calibration from config and memory.""" + self.M_est = None + self.M_inv = None + self._cal_ref_pos = None + self._cal_image_width = None + self._cal_image_height = None + + # Clear from config + self.config.camera_calibration = {} + + # Persist to disk + from printer.printerConfig import PrinterSettingsManager + PrinterSettingsManager.save(self.CONFIG_SUBDIR, self.config) + + self.status("Camera calibration cleared", True) + + # ======================================================================== + # Camera calibration helper methods + # ======================================================================== + + def _surface_to_gray_cv(self, arr: np.ndarray) -> np.ndarray: + """Convert RGB numpy array to grayscale for OpenCV.""" + if arr.ndim == 2: + return arr + gray = cv2.cvtColor(arr, cv2.COLOR_RGB2GRAY) + return gray + + def _edges_canny(self, gray_u8: np.ndarray) -> np.ndarray: + """Compute normalized Canny edges.""" + g = cv2.GaussianBlur(gray_u8, (5, 5), 0) + e = cv2.Canny(g, 60, 180) + ef = e.astype(np.float32) + ef -= ef.mean() + ef /= (ef.std() + 1e-6) + return ef + + def _phase_corr_shift( + self, + img_a_f32: np.ndarray, + img_b_f32: np.ndarray + ) -> Tuple[float, float, float]: + """Compute phase correlation shift between two images.""" + win = cv2.createHanningWindow( + (img_a_f32.shape[1], img_a_f32.shape[0]), + cv2.CV_32F + ) + (dx, dy), response = cv2.phaseCorrelate(img_a_f32, img_b_f32, win) + return float(dx), float(dy), float(response) + + def _capture_and_process_edges(self) -> Optional[np.ndarray]: + """Capture a still image and return its edge map.""" + try: + # Capture still + self.camera.capture_image() + while self.camera.is_taking_image: + time.sleep(0.01) + + # Get frame as numpy array + arr = self.camera.get_last_frame(prefer="still", wait_for_still=False) + if arr is None: + return None + + # Store the calibration image resolution + if self._cal_image_height is None: + self._cal_image_height = arr.shape[0] + self._cal_image_width = arr.shape[1] + self.status( + f"Calibration using image resolution: " + f"{self._cal_image_width}x{self._cal_image_height}", + True + ) + + # Convert to grayscale and compute edges + gray = self._surface_to_gray_cv(arr) + edges = self._edges_canny(gray) + return edges + except Exception as e: + self.status(f"Edge capture failed: {e}", True) + return None + + # ======================================================================== + # Main calibration routine + # ======================================================================== + + def _handle_camera_calibrate(self, cmd: command) -> None: + """ + Run the calibration routine to determine the mapping between + image pixels and world coordinates using phase correlation. + """ + self.status("Starting camera calibration...", True) + + # Reset calibration state + self.M_est = None + self.M_inv = None + self._cal_image_width = None + self._cal_image_height = None + + # Allow pausing/stopping + if self.pause_point(): + self.status("Calibration cancelled.", True) + return + + # Step 1: Capture base image at current position + self.status("Capturing base image...", True) + self._exec_gcode("M400", wait=True) + time.sleep(0.3) # Settle time + + cal_base_pos = self.get_position() + edges_base = self._capture_and_process_edges() + + if edges_base is None: + self.status("Failed to capture base image.", True) + return + + if self.pause_point(): + self.status("Calibration cancelled.", True) + return + + # Step 2: Move +X and capture + dx_mm = self._cal_move_x_ticks / 100.0 + self.status(f"Moving +X by {dx_mm:.2f}mm...", True) + self._exec_gcode(f"G91", wait=True) # Relative mode + self._exec_gcode(f"G0 X{dx_mm:.2f}", wait=True) + self._exec_gcode(f"G90", wait=True) # Back to absolute + time.sleep(0.3) + + edges_x = self._capture_and_process_edges() + if edges_x is None: + self.status("Failed to capture +X image.", True) + return + + # Compute phase correlation for +X move + dpx_x, dpy_x, resp_x = self._phase_corr_shift(edges_base, edges_x) + self.status( + f"+X move: pixel shift=({dpx_x:.2f}, {dpy_x:.2f}), " + f"response={resp_x:.3f}", + True + ) + + if self.pause_point(): + self.status("Calibration cancelled.", True) + return + + # Step 3: Return to base, then move +Y and capture + self.status("Returning to base position...", True) + self._exec_gcode( + f"G0 X{cal_base_pos.x/100:.2f} Y{cal_base_pos.y/100:.2f}", + wait=True + ) + time.sleep(0.3) + + dy_mm = self._cal_move_y_ticks / 100.0 + self.status(f"Moving +Y by {dy_mm:.2f}mm...", True) + self._exec_gcode(f"G91", wait=True) + self._exec_gcode(f"G0 Y{dy_mm:.2f}", wait=True) + self._exec_gcode(f"G90", wait=True) + time.sleep(0.3) + + edges_y = self._capture_and_process_edges() + if edges_y is None: + self.status("Failed to capture +Y image.", True) + return + + # Compute phase correlation for +Y move + dpx_y, dpy_y, resp_y = self._phase_corr_shift(edges_base, edges_y) + self.status( + f"+Y move: pixel shift=({dpx_y:.2f}, {dpy_y:.2f}), " + f"response={resp_y:.3f}", + True + ) + + # Step 4: Return to base + self.status("Returning to base position...", True) + self._exec_gcode( + f"G0 X{cal_base_pos.x/100:.2f} Y{cal_base_pos.y/100:.2f}", + wait=True + ) + + # Step 5: Build calibration matrix + # M * [dx_world, dy_world] = [dpx_pixel, dpy_pixel] + # We have two observations: + # M * [dx_ticks, 0] = [dpx_x, dpy_x] + # M * [0, dy_ticks] = [dpx_y, dpy_y] + + world_x = np.array([[self._cal_move_x_ticks], [0.0]]) + world_y = np.array([[0.0], [self._cal_move_y_ticks]]) + pixel_x = np.array([[dpx_x], [dpy_x]]) + pixel_y = np.array([[dpx_y], [dpy_y]]) + + # M = [pixel_x, pixel_y] * [world_x, world_y]^-1 + world_mat = np.hstack([world_x, world_y]) + pixel_mat = np.hstack([pixel_x, pixel_y]) + + try: + world_inv = np.linalg.inv(world_mat) + self.M_est = pixel_mat @ world_inv + self.M_inv = np.linalg.inv(self.M_est) + + # Store calibration reference position (center of image at cal_base_pos) + self._cal_ref_pos = cal_base_pos + + self.status( + f"Calibration complete. Matrix M_est:\n{self.M_est}\n" + f"Inverse M_inv:\n{self.M_inv}", + True + ) + + # Save calibration to config + self._save_camera_calibration() + + except np.linalg.LinAlgError: + self.status("Calibration failed: singular matrix.", True) + return + + # ======================================================================== + # Vision-guided movement + # ======================================================================== + + def _pixel_to_world_delta( + self, + pixel_x: float, + pixel_y: float, + image_center_x: Optional[float] = None, + image_center_y: Optional[float] = None + ) -> Optional[Tuple[float, float]]: + """ + Convert pixel coordinates to world coordinate delta from calibration reference. + + Args: + pixel_x: X coordinate in image pixels + pixel_y: Y coordinate in image pixels + image_center_x: Center X of image (defaults to _cal_image_width/2) + image_center_y: Center Y of image (defaults to _cal_image_height/2) + + Returns: + (dx_ticks, dy_ticks) relative to calibration reference position, + or None if calibration is not available + """ + if self.M_inv is None: + return None + + if image_center_x is None: + image_center_x = (self._cal_image_width or 0) / 2.0 + if image_center_y is None: + image_center_y = (self._cal_image_height or 0) / 2.0 + + # Pixel delta from image center + pixel_delta = np.array([[pixel_x - image_center_x], [pixel_y - image_center_y]]) + + # Convert to world delta + world_delta = self.M_inv @ pixel_delta + + # NOTE: We negate both X and Y to convert from image coordinates to stage coordinates. + # Image coords: origin at top-left, X increases right, Y increases down + # Stage coords: X and Y both increase in positive directions + # The calibration matrix M is built to map stage deltas to pixel deltas, + # so M_inv maps pixel deltas to stage deltas, but we need to flip signs + # to account for the image coordinate system. + dx_ticks = -float(world_delta[0, 0]) + dy_ticks = -float(world_delta[1, 0]) + + return dx_ticks, dy_ticks + + def _handle_move_to_vision_point(self, cmd: command) -> None: + """ + Move to a position specified by vision coordinates. + + cmd.value should be a dict with: + - 'pixel_x': X coordinate in image + - 'pixel_y': Y coordinate in image + - 'relative': bool, if True move relative to current position, + if False move relative to calibration reference + """ + if self.M_inv is None: + self.status("Cannot move: calibration required first", True) + return + + try: + params = cmd.value + pixel_x = float(params['pixel_x']) + pixel_y = float(params['pixel_y']) + relative = params.get('relative', True) + except (TypeError, KeyError, ValueError) as e: + self.status(f"Invalid MOVE_TO_VISION_POINT parameters: {e}", True) + return + + # Convert pixel coords to world delta + result = self._pixel_to_world_delta(pixel_x, pixel_y) + if result is None: + self.status("Pixel-to-world conversion failed", True) + return + + dx_ticks, dy_ticks = result + + # Determine target position + if relative: + # Move relative to current position + current_pos = self.get_position() + new_x_ticks = current_pos.x + int(round(dx_ticks)) + new_y_ticks = current_pos.y + int(round(dy_ticks)) + else: + # Move relative to calibration reference + if self._cal_ref_pos is None: + self.status("No calibration reference position available", True) + return + new_x_ticks = self._cal_ref_pos.x + int(round(dx_ticks)) + new_y_ticks = self._cal_ref_pos.y + int(round(dy_ticks)) + + # Convert to mm + new_x_mm = new_x_ticks / 100.0 + new_y_mm = new_y_ticks / 100.0 + + # Bounds check + max_x = self.get_max_x() + max_y = self.get_max_y() + + if not (0 <= new_x_mm <= max_x and 0 <= new_y_mm <= max_y): + self.status( + f"Vision target out of bounds: ({new_x_mm:.2f}, {new_y_mm:.2f})", + True + ) + return + + # Execute move + self._exec_gcode( + f"G0 X{new_x_mm:.2f} Y{new_y_mm:.2f}", + wait=True, + message=f"Moving to vision point: X={new_x_mm:.2f}, Y={new_y_mm:.2f}", + log=True + ) + + # ======================================================================== + # Public convenience methods + # ======================================================================== + + def set_calibration_moves(self, x_ticks: int, y_ticks: int) -> None: + """ + Set the calibration move distances in ticks (0.01mm units). + + Args: + x_ticks: Distance to move in X during calibration + y_ticks: Distance to move in Y during calibration + """ + self._cal_move_x_ticks = x_ticks + self._cal_move_y_ticks = y_ticks + + def start_camera_calibration(self) -> None: + """Enqueue a camera calibration command.""" + self.reset_after_stop() + self.enqueue_cmd(command( + kind="CAMERA_CALIBRATE", + value="", + message="Starting camera calibration", + log=True + )) + + def go_to_calibration_pattern(self, position: Optional[Position] = None) -> None: + """ + Move to a known calibration pattern position. + + This is useful for setting up before running calibration, allowing you to + position the camera over a calibration target (e.g., a grid or known feature). + + Args: + position: Position to move to (in 0.01mm ticks). + If None, looks for 'calibration_pattern_position' in printer config. + """ + if position is None: + # Try to load from printer config (same pattern as get_sample_position) + if hasattr(self.config, 'calibration_pattern_position'): + try: + entry = self.config.calibration_pattern_position + x_mm = float(entry["x"]) + y_mm = float(entry["y"]) + z_mm = float(entry["z"]) + position = Position( + x=int(x_mm * 100), + y=int(y_mm * 100), + z=int(z_mm * 100), + ) + except (KeyError, ValueError, TypeError) as e: + self.status( + f"Invalid calibration_pattern_position in printer config: {e}", + True + ) + return + else: + self.status( + "No calibration pattern position provided or configured in printer config", + True + ) + return + + # Move to the position + x_mm = position.x / 100.0 + y_mm = position.y / 100.0 + z_mm = position.z / 100.0 + + self.enqueue_printer( + f"G0 X{x_mm:.2f} Y{y_mm:.2f} Z{z_mm:.2f}", + message=f"Moving to calibration pattern at X={x_mm:.2f}, Y={y_mm:.2f}, Z={z_mm:.2f}", + log=True + ) + + def move_to_vision_point( + self, + pixel_x: float, + pixel_y: float, + relative: bool = True + ) -> None: + """ + Move to a point identified by vision coordinates. + + Args: + pixel_x: X coordinate in image pixels + pixel_y: Y coordinate in image pixels + relative: If True, move relative to current position; + if False, move relative to calibration reference + """ + self.enqueue_cmd(command( + kind="MOVE_TO_VISION_POINT", + value={ + 'pixel_x': pixel_x, + 'pixel_y': pixel_y, + 'relative': relative + }, + message=f"Moving to vision point ({pixel_x:.1f}, {pixel_y:.1f})", + log=True + )) + + def get_calibration_status(self) -> dict: + """ + Get current calibration status. + + Returns: + Dict with calibration state information + """ + return { + 'calibrated': self.M_inv is not None, + 'image_width': self._cal_image_width, + 'image_height': self._cal_image_height, + 'reference_position': self._cal_ref_pos, + 'matrix_M': self.M_est.tolist() if self.M_est is not None else None, + 'matrix_M_inv': self.M_inv.tolist() if self.M_inv is not None else None, + } + + def is_calibrated(self) -> bool: + """ + Check if camera is calibrated and ready for vision-guided movement. + + Returns: + True if calibration matrices are loaded and valid + """ + return (self.M_est is not None and + self.M_inv is not None and + self._cal_ref_pos is not None) + \ No newline at end of file diff --git a/printer/printerConfig.py b/printer/printerConfig.py index 12fcd2a..5bd1479 100644 --- a/printer/printerConfig.py +++ b/printer/printerConfig.py @@ -14,6 +14,10 @@ class PrinterSettings(): max_z: int = 6000 # Maximum Z dimension in steps step_size: int = 4 # minimum distance that can be moved in 0.01mm sample_positions: dict[int, dict[str, float]] = field(default_factory=dict) + calibration_pattern_position: dict[str, float] = field(default_factory=dict) # X, Y, Z in mm + + # Camera calibration data + camera_calibration: dict[str, any] = field(default_factory=dict) # Stores M_est, M_inv, reference position, etc. def make_printer_settings_manager( From fbe7ba7303621028650f07e296553eb33e698ad0 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Thu, 8 Jan 2026 23:29:11 -0900 Subject: [PATCH 04/46] Added Sample Slot Calibration Menu --- UI/modals/sample_settings_modal.py | 308 +++++++++++++++++++ UI/ui_layout.py | 14 +- config/printers/Ender3/default_settings.yaml | 6 +- 3 files changed, 326 insertions(+), 2 deletions(-) create mode 100644 UI/modals/sample_settings_modal.py diff --git a/UI/modals/sample_settings_modal.py b/UI/modals/sample_settings_modal.py new file mode 100644 index 0000000..9b88f73 --- /dev/null +++ b/UI/modals/sample_settings_modal.py @@ -0,0 +1,308 @@ +""" +Sample Settings Modal - Manual configuration of sample positions for the printer. +""" + +import pygame +from typing import List, Tuple, Callable + +from UI.frame import Frame +from UI.modal import Modal +from UI.text import Text, TextStyle +from UI.input.button import Button, ButtonColors +from UI.input.text_field import TextField +from UI.input.scroll_frame import ScrollFrame +from UI.list_frame import ListFrame +from UI.styles import make_button_text_style, make_display_text_style + +from printer.automated_controller import AutomatedPrinter + + +def build_sample_settings_modal(modal: Modal, controller: AutomatedPrinter) -> None: + """ + Build the sample settings modal UI for configuring sample positions. + + Args: + modal: The Modal frame to populate + controller: The printer controller instance + """ + + # Container for all settings + content = modal.body + + y_offset = 10 + + # ========== Camera Height Section ========== + Text( + text="Camera Height (mm):", + parent=content, + x=10, y=y_offset, + style=make_display_text_style(16) + ) + + camera_height_field = TextField( + parent=content, + x=10, y=y_offset + 25, + width=200, height=30, + placeholder="0.00", + border_color=pygame.Color("#b3b4b6"), + text_color=pygame.Color("#5a5a5a") + ) + + def set_camera_height_from_position(): + """Set camera height field to current Z position""" + z_mm = controller.position.z / 100.0 + camera_height_field.set_text(f"{z_mm:.2f}") + + Button( + set_camera_height_from_position, + x=220, y=y_offset + 25, + width=175, height=30, + text="Set from Current", + parent=content, + text_style=make_button_text_style() + ) + + # Note about camera height + Text( + text="Must be above and out of focus of all samples", + parent=content, + x=10, y=y_offset + 60, + style=TextStyle( + font_size=12, + color=pygame.Color("#7a7a7a"), + font_name="assets/fonts/SofiaSans-Regular.ttf" + ) + ) + + y_offset += 90 + + # ========== Y Start Offset Section ========== + Text( + text="Y Start Offset (mm):", + parent=content, + x=10, y=y_offset, + style=make_display_text_style(16) + ) + + y_start_field = TextField( + parent=content, + x=10, y=y_offset + 25, + width=200, height=30, + placeholder="0.00", + border_color=pygame.Color("#b3b4b6"), + text_color=pygame.Color("#5a5a5a") + ) + + def set_y_start_from_position(): + """Set Y start field to current Y position""" + y_mm = controller.position.y / 100.0 + y_start_field.set_text(f"{y_mm:.2f}") + + Button( + set_y_start_from_position, + x=220, y=y_offset + 25, + width=175, height=30, + text="Set from Current", + parent=content, + text_style=make_button_text_style() + ) + + y_offset += 70 + + # ========== Sample X Offsets Section ========== + Text( + text="Sample X Offsets:", + parent=content, + x=10, y=y_offset, + style=make_display_text_style(16) + ) + + y_offset += 30 + + # Scroll frame for sample list + scroll_height = 340 + scroll_area = ScrollFrame( + parent=content, + x=10, y=y_offset, + width=445, height=scroll_height, + background_color=pygame.Color("#f5f5f5"), + scrollbar_width=12 + ) + + # Store references to sample fields (index, field) tuples + sample_fields: List[Tuple[int, TextField]] = [] + + def build_sample_row(i: int, parent: Frame) -> None: + """Build a row for each sample's X offset""" + sample_num = i + 1 # Display number (1-based) + sample_index = i + 1 # Config key (also 1-based) + + # Sample label + Text( + text=f"Sample {sample_num}:", + parent=parent, + x=5, y=15, + style=make_display_text_style(14) + ) + + # X offset field + x_field = TextField( + parent=parent, + x=100, y=5, + width=150, height=30, + placeholder="0.00", + border_color=pygame.Color("#b3b4b6"), + text_color=pygame.Color("#5a5a5a") + ) + sample_fields.append((sample_index, x_field)) # Store with config index + + # Set from current button (only sets X) + def set_x_from_current(): + x_mm = controller.position.x / 100.0 + x_field.set_text(f"{x_mm:.2f}") + + Button( + set_x_from_current, + x=260, y=5, + width=160, height=30, + text="Set X from Current", + parent=parent, + text_style=TextStyle(font_size=14, color=pygame.Color("#5a5a5a")) + ) + + # Create list of sample rows + num_samples = controller.get_num_slots() + sample_list = ListFrame( + parent=scroll_area, + x=0, y=0, + width=1.0, height=num_samples * 35, + width_is_percent=True, + row_height=35, + count=num_samples, + row_builder=build_sample_row + ) + + y_offset += scroll_height + 10 + + # ========== Load current values ========== + def load_values_from_config(): + """Load current values from printer config""" + try: + # Check if sample_positions exists and is a dict + if not hasattr(controller.config, 'sample_positions'): + print("No sample_positions found in config") + return + + if not isinstance(controller.config.sample_positions, dict): + print(f"sample_positions is not a dict: {type(controller.config.sample_positions)}") + return + + if len(controller.config.sample_positions) == 0: + print("sample_positions dict is empty") + return + + # Load camera height (from first sample's Z, assuming all share this) + # sample_positions is a dict with 1-based integer keys (1, 2, 3, ...) + if 1 in controller.config.sample_positions: + first_sample = controller.config.sample_positions[1] + if isinstance(first_sample, dict): + camera_z = first_sample.get("z", 0.0) + camera_height_field.set_text(f"{camera_z:.2f}") + + # Load Y start (from first sample's Y) + y_start = first_sample.get("y", 0.0) + y_start_field.set_text(f"{y_start:.2f}") + else: + print(f"First sample is not a dict: {type(first_sample)}") + return + + # Load each sample's X offset + for sample_index, field in sample_fields: + if sample_index in controller.config.sample_positions: + sample_pos = controller.config.sample_positions[sample_index] + if isinstance(sample_pos, dict): + x_offset = sample_pos.get("x", 0.0) + field.set_text(f"{x_offset:.2f}") + else: + print(f"Sample {sample_index} is not a dict: {type(sample_pos)}") + except Exception as e: + import traceback + print(f"Error loading sample settings: {e}") + print(traceback.format_exc()) + + # ========== Bottom Buttons ========== + button_y = y_offset + 5 + + def save_settings(): + """Save settings to printer config""" + try: + # Parse camera height and Y start + camera_z = float(camera_height_field.text or "0.0") + y_start = float(y_start_field.text or "0.0") + + # Ensure sample_positions is a dict + if not isinstance(controller.config.sample_positions, dict): + controller.config.sample_positions = {} + + # Update each sample position + for sample_index, field in sample_fields: + x_offset = float(field.text or "0.0") + + # Ensure the sample position entry exists + if sample_index not in controller.config.sample_positions: + controller.config.sample_positions[sample_index] = {} + + # Update the sample position + controller.config.sample_positions[sample_index]["x"] = x_offset + controller.config.sample_positions[sample_index]["y"] = y_start + controller.config.sample_positions[sample_index]["z"] = camera_z + + # Save the config + from printer.printerConfig import PrinterSettingsManager + PrinterSettingsManager.save(controller.CONFIG_SUBDIR, controller.config) + + print("Sample settings saved successfully") + modal.close() + + except ValueError as e: + print(f"Error parsing values: {e}") + except Exception as e: + import traceback + print(f"Error saving sample settings: {e}") + print(traceback.format_exc()) + + def reset_settings(): + """Reload settings from printer config""" + try: + # Reload the config from disk + from printer.printerConfig import PrinterSettingsManager + controller.config = PrinterSettingsManager.load(controller.CONFIG_SUBDIR) + + # Update UI fields + load_values_from_config() + + print("Sample settings reloaded from config") + + except Exception as e: + print(f"Error reloading sample settings: {e}") + + Button( + save_settings, + x=10, y=button_y, + width=150, height=40, + text="Save", + parent=content, + text_style=make_button_text_style() + ) + + Button( + reset_settings, + x=170, y=button_y, + width=150, height=40, + text="Reset", + parent=content, + text_style=make_button_text_style() + ) + + # Load initial values when modal is built + load_values_from_config() \ No newline at end of file diff --git a/UI/ui_layout.py b/UI/ui_layout.py index c43c755..56776da 100644 --- a/UI/ui_layout.py +++ b/UI/ui_layout.py @@ -31,6 +31,7 @@ ) from UI.modals.camera_settings_modal import build_camera_settings_modal from UI.modals.automation_settings_modal import build_automation_settings_modal +from UI.modals.sample_settings_modal import build_sample_settings_modal RIGHT_PANEL_WIDTH = 400 @@ -368,9 +369,20 @@ def open_capture_folder(): #3rd Row Button(lambda: movementSystem.go_to_calibration_pattern(), 10, 130, 117, 40, "Go to Slide", parent=camera_control, text_style=make_button_text_style()) Button(lambda: movementSystem.start_camera_calibration(), 132, 130, 207, 40, "Calibrate Movement", parent=camera_control, text_style=make_button_text_style()) + #4th row + # Create sample settings modal + sample_settings_modal = Modal( + parent=camera_control.parent.parent.parent, # Attach to root (camera_control -> FlexFrame -> Frame -> root) + title="Sample Position Settings", + overlay=False, + width=465, + height=640 + ) + build_sample_settings_modal(sample_settings_modal, movementSystem) + Button(lambda: movementSystem.start_autofocus(), 10, 175, 127, 40, "Sample Cal.", parent=camera_control, text_style=make_button_text_style()) - Button(lambda: movementSystem.start_autofocus(), 142, 175, 167, 40, "Sample Settings", parent=camera_control, text_style=make_button_text_style()) + Button(lambda: sample_settings_modal.open(), 142, 175, 167, 40, "Sample Settings", parent=camera_control, text_style=make_button_text_style()) def _build_automation_control(automation_box, movementSystem, machine_vision_overlay, automation_settings_modal): diff --git a/config/printers/Ender3/default_settings.yaml b/config/printers/Ender3/default_settings.yaml index 039dea8..e162f28 100644 --- a/config/printers/Ender3/default_settings.yaml +++ b/config/printers/Ender3/default_settings.yaml @@ -25,4 +25,8 @@ sample_positions: 17: { x: 202.04, y: 200.00, z: 29.00 } 18: { x: 213.36, y: 200.00, z: 29.00 } 19: { x: 224.88, y: 200.00, z: 29.00 } - 20: { x: 224.88, y: 200.00, z: 29.00 } \ No newline at end of file + 20: { x: 224.88, y: 200.00, z: 29.00 } +calibration_pattern_position: + x: 226.08 + y: 186.9 + z: 33.2 \ No newline at end of file From 12344b2e6f1b0528019010c00494c90fe6047962 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Fri, 9 Jan 2026 01:52:33 -0900 Subject: [PATCH 05/46] Calibration related changes --- UI/modals/sample_settings_modal.py | 140 +++++- UI/overlays/red_detection_mark_overlay.py | 493 ++++++++++++++++++++++ UI/ui_layout.py | 13 +- printer/printerConfig.py | 4 + 4 files changed, 630 insertions(+), 20 deletions(-) create mode 100644 UI/overlays/red_detection_mark_overlay.py diff --git a/UI/modals/sample_settings_modal.py b/UI/modals/sample_settings_modal.py index 9b88f73..5ef3164 100644 --- a/UI/modals/sample_settings_modal.py +++ b/UI/modals/sample_settings_modal.py @@ -76,6 +76,72 @@ def set_camera_height_from_position(): y_offset += 90 + # ========== Calibration Y Position Section ========== + Text( + text="Calibration Y Position (mm):", + parent=content, + x=10, y=y_offset, + style=make_display_text_style(16) + ) + + calibration_y_field = TextField( + parent=content, + x=10, y=y_offset + 25, + width=200, height=30, + placeholder="220.00", + border_color=pygame.Color("#b3b4b6"), + text_color=pygame.Color("#5a5a5a") + ) + + def set_calibration_y_from_position(): + """Set calibration Y field to current Y position""" + y_mm = controller.position.y / 100.0 + calibration_y_field.set_text(f"{y_mm:.2f}") + + Button( + set_calibration_y_from_position, + x=220, y=y_offset + 25, + width=175, height=30, + text="Set from Current", + parent=content, + text_style=make_button_text_style() + ) + + y_offset += 70 + + # ========== Calibration Z Position Section ========== + Text( + text="Calibration Z Position (mm):", + parent=content, + x=10, y=y_offset, + style=make_display_text_style(16) + ) + + calibration_z_field = TextField( + parent=content, + x=10, y=y_offset + 25, + width=200, height=30, + placeholder="26.00", + border_color=pygame.Color("#b3b4b6"), + text_color=pygame.Color("#5a5a5a") + ) + + def set_calibration_z_from_position(): + """Set calibration Z field to current Z position""" + z_mm = controller.position.z / 100.0 + calibration_z_field.set_text(f"{z_mm:.2f}") + + Button( + set_calibration_z_from_position, + x=220, y=y_offset + 25, + width=175, height=30, + text="Set from Current", + parent=content, + text_style=make_button_text_style() + ) + + y_offset += 70 + # ========== Y Start Offset Section ========== Text( text="Y Start Offset (mm):", @@ -145,11 +211,40 @@ def build_sample_row(i: int, parent: Frame) -> None: style=make_display_text_style(14) ) + # Go To button (C) + def go_to_sample(): + """Move to this sample's X position using calibration Y and Z""" + try: + x_mm = float(x_field.text or "0.0") + cal_y_mm = float(calibration_y_field.text or "220.0") + cal_z_mm = float(calibration_z_field.text or "26.0") + + # Convert to ticks (0.01 mm units) + x_ticks = int(x_mm * 100) + y_ticks = int(cal_y_mm * 100) + z_ticks = int(cal_z_mm * 100) + + from printer.models import Position + target_pos = Position(x=x_ticks, y=y_ticks, z=z_ticks) + controller.move_to_position(target_pos) + + except ValueError as e: + print(f"Error parsing position values: {e}") + + Button( + go_to_sample, + x=100, y=5, + width=30, height=30, + text="C", + parent=parent, + text_style=TextStyle(font_size=14, color=pygame.Color("#5a5a5a")) + ) + # X offset field x_field = TextField( parent=parent, - x=100, y=5, - width=150, height=30, + x=135, y=5, + width=115, height=30, placeholder="0.00", border_color=pygame.Color("#b3b4b6"), text_color=pygame.Color("#5a5a5a") @@ -188,6 +283,13 @@ def set_x_from_current(): def load_values_from_config(): """Load current values from printer config""" try: + # Load calibration positions from proper fields + calibration_y = getattr(controller.config, 'calibration_y', 220.0) + calibration_z = getattr(controller.config, 'calibration_z', 26.0) + + calibration_y_field.set_text(f"{calibration_y:.2f}") + calibration_z_field.set_text(f"{calibration_z:.2f}") + # Check if sample_positions exists and is a dict if not hasattr(controller.config, 'sample_positions'): print("No sample_positions found in config") @@ -236,32 +338,38 @@ def load_values_from_config(): def save_settings(): """Save settings to printer config""" try: - # Parse camera height and Y start + # Parse camera height, Y start, and calibration positions camera_z = float(camera_height_field.text or "0.0") y_start = float(y_start_field.text or "0.0") + calibration_y = float(calibration_y_field.text or "220.0") + calibration_z = float(calibration_z_field.text or "26.0") - # Ensure sample_positions is a dict - if not isinstance(controller.config.sample_positions, dict): - controller.config.sample_positions = {} + # Save calibration positions to proper fields + controller.config.calibration_y = calibration_y + controller.config.calibration_z = calibration_z + + # Create a fresh sample_positions dict to prevent accumulation of extra entries + new_sample_positions = {} - # Update each sample position + # Update each sample position from the UI fields for sample_index, field in sample_fields: x_offset = float(field.text or "0.0") - # Ensure the sample position entry exists - if sample_index not in controller.config.sample_positions: - controller.config.sample_positions[sample_index] = {} - - # Update the sample position - controller.config.sample_positions[sample_index]["x"] = x_offset - controller.config.sample_positions[sample_index]["y"] = y_start - controller.config.sample_positions[sample_index]["z"] = camera_z + # Create the sample position entry + new_sample_positions[sample_index] = { + "x": x_offset, + "y": y_start, + "z": camera_z + } + + # Replace the entire sample_positions dict with our clean version + controller.config.sample_positions = new_sample_positions # Save the config from printer.printerConfig import PrinterSettingsManager PrinterSettingsManager.save(controller.CONFIG_SUBDIR, controller.config) - print("Sample settings saved successfully") + print(f"Sample settings saved successfully ({len(new_sample_positions)} sample positions)") modal.close() except ValueError as e: diff --git a/UI/overlays/red_detection_mark_overlay.py b/UI/overlays/red_detection_mark_overlay.py new file mode 100644 index 0000000..e6cd076 --- /dev/null +++ b/UI/overlays/red_detection_mark_overlay.py @@ -0,0 +1,493 @@ +""" +Red mark detection overlay for camera view. + +This overlay detects red registration marks in real-time from the camera feed +and displays the detection results with center line and distance from center. +""" + +import pygame +import numpy as np +import cv2 +from typing import Optional, List, Tuple + +from UI.frame import Frame +from UI.camera_view import CameraView +from UI.text import Text, TextStyle + + +class RedMarkDetectionOverlay(Frame): + """ + Real-time red mark detection overlay that shows detected registration marks + and displays distance from image center. + """ + def __init__( + self, + camera_view: CameraView, + visible: bool = False, + + # Detection parameters + min_area: int = 50, + max_area: int = 10000, + max_aspect_ratio: float = 3.0, + red_threshold_percentile: int = 75, + + # Stabilization / smoothing parameters + smoothing_alpha: float = 0.25, + deadband_px: float = 2.0, + max_step_px_per_frame: float = 30.0, + + # Orientation switching: if marks are clustered on one side, draw a horizontal line + side_cluster_fraction: float = 0.85, + side_cluster_margin: float = 0.18, + + # Visual parameters + valid_mark_color: pygame.Color = pygame.Color(0, 255, 0), # Green + filtered_mark_color: pygame.Color = pygame.Color(255, 0, 0), # Red + center_line_color: pygame.Color = pygame.Color(255, 255, 0), # Yellow + text_color: pygame.Color = pygame.Color(255, 255, 255), # White + text_bg_color: pygame.Color = pygame.Color(0, 0, 0, 180), # Semi-transparent black + ): + super().__init__( + parent=camera_view, + x=0, y=0, + width=1, height=1, + x_is_percent=True, y_is_percent=True, + width_is_percent=True, height_is_percent=True, + z_index=camera_view.z_index + 2, + background_color=None + ) + + self.camera_view = camera_view + self.visible = visible + self.mouse_passthrough = True + + # Detection parameters + self.min_area = min_area + self.max_area = max_area + self.max_aspect_ratio = max_aspect_ratio + self.red_threshold_percentile = red_threshold_percentile + + # Stabilization parameters + self.smoothing_alpha = float(smoothing_alpha) + self.deadband_px = float(deadband_px) + self.max_step_px_per_frame = float(max_step_px_per_frame) + + # Orientation switching parameters + self.side_cluster_fraction = float(side_cluster_fraction) + self.side_cluster_margin = float(side_cluster_margin) + + # Visual parameters + self.valid_mark_color = valid_mark_color + self.filtered_mark_color = filtered_mark_color + self.center_line_color = center_line_color + self.text_color = text_color + self.text_bg_color = text_bg_color + + # Detection results (updated each frame) + self.valid_centers: List[Tuple[float, float]] = [] + self.filtered_centers: List[Tuple[float, float]] = [] + # Raw per-frame measurements + self.mean_x_raw: Optional[float] = None + self.mean_y_raw: Optional[float] = None + + # Stabilized (displayed) measurements + self.mean_x: Optional[float] = None + self.mean_y: Optional[float] = None + self.distance_from_center: Optional[float] = None + self.image_width: Optional[int] = None + self.image_height: Optional[int] = None + + # Jump threshold: if raw value is this many pixels away, jump instantly + self.jump_threshold_px: float = 50.0 + + # Cached overlay surface + self._overlay = None + self._overlay_size = None + + # Filter state + self._smoothed_mean_x: Optional[float] = None + self._smoothed_mean_y: Optional[float] = None + + # Line orientation: 'vertical' (default) or 'horizontal' + self._line_orientation: str = 'vertical' + + # Hysteresis counter to prevent rapid flipping + self._cluster_frames: int = 0 + + # ==================== Public API ==================== + + def toggle(self) -> None: + """Toggle visibility of the detection overlay.""" + self.visible = not self.visible + + def set_visible(self, value: bool) -> None: + """Set visibility of the detection overlay.""" + self.visible = bool(value) + + def is_visible(self) -> bool: + """Check if overlay is currently visible.""" + return self.visible + + def set_jump_threshold(self, threshold_px: float) -> None: + """ + Set the jump threshold in pixels. + When the raw measurement differs from the smoothed value by more than this + threshold, the smoothed value will jump directly to the raw value instead + of gradually moving towards it. + + Args: + threshold_px: Jump threshold in pixels (default: 50.0) + """ + self.jump_threshold_px = float(threshold_px) + + # ==================== Detection ==================== + + def _detect_red_marks(self, img_rgb: np.ndarray) -> Tuple[List[Tuple[float, float]], List[Tuple[float, float]]]: + """ + Detect red registration marks in an RGB image. + + Args: + img_rgb: RGB image as numpy array (H, W, 3) + + Returns: + Tuple of (valid_centers, filtered_centers) + """ + # Extract channels + r = img_rgb[:, :, 0].astype(float) + g = img_rgb[:, :, 1].astype(float) + b = img_rgb[:, :, 2].astype(float) + + # Enhanced red isolation: R - max(G, B) to handle reflections better + red_isolated = r - np.maximum(g, b) + red_isolated = np.clip(red_isolated, 0, 255) + + # Normalize to 0-255 range + if red_isolated.max() > 0: + red_isolated = (red_isolated / red_isolated.max() * 255).astype(np.uint8) + else: + red_isolated = red_isolated.astype(np.uint8) + + # Apply morphological operations to clean up noise + kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3)) + red_isolated = cv2.morphologyEx(red_isolated, cv2.MORPH_CLOSE, kernel) + + # Adaptive thresholding to handle varying lighting + threshold = ( + np.percentile(red_isolated[red_isolated > 0], self.red_threshold_percentile) + if np.any(red_isolated > 0) + else 50 + ) + binary = (red_isolated > threshold).astype(np.uint8) * 255 + + # Find connected components + num_labels, labels, stats, centroids = cv2.connectedComponentsWithStats(binary, connectivity=8) + + # Get image dimensions + img_height = img_rgb.shape[0] + lower_half_y = img_height / 2 + + # Filter components - restrict to lower half of image + centers = [] + filtered_centers = [] + + for i in range(1, num_labels): # Skip background (label 0) + area = stats[i, cv2.CC_STAT_AREA] + if self.min_area < area < self.max_area: + x, y, w, h = ( + stats[i, cv2.CC_STAT_LEFT], + stats[i, cv2.CC_STAT_TOP], + stats[i, cv2.CC_STAT_WIDTH], + stats[i, cv2.CC_STAT_HEIGHT] + ) + aspect_ratio = max(w, h) / min(w, h) if min(w, h) > 0 else float('inf') + + if aspect_ratio < self.max_aspect_ratio: + center = (float(centroids[i][0]), float(centroids[i][1])) + # Check if in lower half + if center[1] >= lower_half_y: + centers.append(center) + else: + filtered_centers.append(center) + + # Remove outliers using IQR method on X coordinates + valid_centers = centers.copy() + outlier_centers = [] + + if len(centers) > 3: # Need at least 4 points for meaningful outlier detection + x_coords = np.array([x for x, y in centers]) + q1 = np.percentile(x_coords, 25) + q3 = np.percentile(x_coords, 75) + iqr = q3 - q1 + lower_bound = q1 - 1.5 * iqr + upper_bound = q3 + 1.5 * iqr + + valid_centers = [] + outlier_centers = [] + for center in centers: + if lower_bound <= center[0] <= upper_bound: + valid_centers.append(center) + else: + outlier_centers.append(center) + + # Combine all filtered centers + all_filtered = filtered_centers + outlier_centers + + return valid_centers, all_filtered + + # ==================== Stabilization ==================== + + def _update_smoothed_value(self, prev: Optional[float], raw: Optional[float]) -> Optional[float]: + """Update a stabilized value from a noisy per-frame measurement. + + - If there is no raw measurement this frame, hold the previous value. + - Jump threshold: if raw is too far from prev, jump directly to it. + - Deadband: ignore tiny changes. + - Slew-rate limit: clamp maximum change per frame. + - EMA: ease toward the (clamped) target. + """ + if raw is None: + return prev + if prev is None: + return float(raw) + + delta = float(raw) - float(prev) + + # If the raw value is too far away, jump directly to it + if abs(delta) > self.jump_threshold_px: + return float(raw) + + if abs(delta) <= self.deadband_px: + return prev + + if self.max_step_px_per_frame > 0: + delta = float(np.clip(delta, -self.max_step_px_per_frame, self.max_step_px_per_frame)) + + alpha = float(np.clip(self.smoothing_alpha, 0.0, 1.0)) + return float(prev + alpha * delta) + + def _clustered_on_one_side(self, centers: List[Tuple[float, float]], img_w: int) -> bool: + """Return True if most detected marks are clustered on the left or right side.""" + if not centers or img_w <= 0: + self._cluster_frames = 0 + return False + + xs = np.array([c[0] for c in centers], dtype=np.float32) + margin = float(np.clip(self.side_cluster_margin, 0.0, 0.45)) + left_edge = img_w * margin + right_edge = img_w * (1.0 - margin) + + left_frac = float(np.mean(xs <= left_edge)) + right_frac = float(np.mean(xs >= right_edge)) + frac = max(left_frac, right_frac) + + # Simple hysteresis: require a couple consecutive frames before switching to horizontal. + if frac >= self.side_cluster_fraction: + self._cluster_frames += 1 + else: + # decay rather than hard reset, to avoid flicker + self._cluster_frames = max(0, self._cluster_frames - 1) + + return self._cluster_frames >= 2 + + # ==================== Drawing ==================== + + def _get_overlay(self, surface_size: Tuple[int, int]) -> pygame.Surface: + """Return an RGBA overlay the size of the target surface (recreate on resize).""" + if self._overlay is None or self._overlay_size != surface_size: + self._overlay_size = surface_size + self._overlay = pygame.Surface(surface_size, flags=pygame.SRCALPHA) + else: + self._overlay.fill((0, 0, 0, 0)) + return self._overlay + + def _draw_info_text(self, overlay: pygame.Surface, fr: Tuple[int, int, int, int]) -> None: + """Draw information text at the top of the overlay.""" + if not self.valid_centers and not self.filtered_centers: + return + + fx, fy, fw, fh = fr + + # Prepare text lines + lines = [] + lines.append(f"Valid marks: {len(self.valid_centers)}") + if self.filtered_centers: + lines.append(f"Filtered: {len(self.filtered_centers)}") + + if self.image_width is not None and self.image_height is not None: + lines.append(f"Line: {self._line_orientation}") + + if self._line_orientation == 'horizontal': + if self.mean_y is not None and self.image_height is not None and self.distance_from_center is not None: + lines.append(f"Center Y (stable): {self.mean_y:.1f} px") + if self.mean_y_raw is not None: + lines.append(f"Center Y (raw): {self.mean_y_raw:.1f} px") + + center_y = self.image_height / 2 + if center_y > 0: + percent = (self.distance_from_center / center_y) * 100 + direction = "down" if self.mean_y > center_y else "up" + lines.append(f"Distance from center: {self.distance_from_center:.1f} px ({percent:.1f}% {direction})") + else: + if self.mean_x is not None and self.image_width is not None and self.distance_from_center is not None: + lines.append(f"Center X (stable): {self.mean_x:.1f} px") + if self.mean_x_raw is not None: + lines.append(f"Center X (raw): {self.mean_x_raw:.1f} px") + + center_x = self.image_width / 2 + if center_x > 0: + percent = (self.distance_from_center / center_x) * 100 + direction = "right" if self.mean_x > center_x else "left" + lines.append(f"Distance from center: {self.distance_from_center:.1f} px ({percent:.1f}% {direction})") + + # Render text with background + font = pygame.font.Font(None, 24) + y_offset = fy + 10 + + for line in lines: + text_surface = font.render(line, True, self.text_color) + text_rect = text_surface.get_rect() + text_rect.topleft = (fx + 10, y_offset) + + # Draw semi-transparent background + bg_rect = text_rect.inflate(10, 4) + bg_surface = pygame.Surface((bg_rect.width, bg_rect.height), pygame.SRCALPHA) + bg_surface.fill(self.text_bg_color) + overlay.blit(bg_surface, bg_rect.topleft) + + # Draw text + overlay.blit(text_surface, text_rect) + y_offset += text_rect.height + 2 + + def draw(self, surface: pygame.Surface) -> None: + """Draw the detection overlay if visible and camera is initialized.""" + if not self.visible: + return + + if not self.camera_view.camera.initialized: + return + + fr = self.camera_view.get_frame_rect() + if not fr: + return + + fx, fy, fw, fh = fr + + # Get the current camera frame + arr = self.camera_view.camera.get_last_frame(prefer="latest", wait_for_still=False) + if arr is None: + return + + # Ensure RGB format + if arr.dtype != np.uint8: + arr = np.clip(arr, 0, 255).astype(np.uint8) + if arr.ndim == 2: + arr = np.stack([arr]*3, axis=-1) + + img_h, img_w = arr.shape[:2] + self.image_width = img_w + self.image_height = img_h + + # Detect red marks + self.valid_centers, self.filtered_centers = self._detect_red_marks(arr) + + # Raw mean measurements + if self.valid_centers: + self.mean_x_raw = float(np.mean([x for x, y in self.valid_centers])) + self.mean_y_raw = float(np.mean([y for x, y in self.valid_centers])) + else: + self.mean_x_raw = None + self.mean_y_raw = None + + # Update stabilized values + self._smoothed_mean_x = self._update_smoothed_value(self._smoothed_mean_x, self.mean_x_raw) + self._smoothed_mean_y = self._update_smoothed_value(self._smoothed_mean_y, self.mean_y_raw) + self.mean_x = self._smoothed_mean_x + self.mean_y = self._smoothed_mean_y + + # Decide whether we should draw a horizontal or vertical line + clustered = self._clustered_on_one_side(self.valid_centers, img_w) + self._line_orientation = 'horizontal' if clustered else 'vertical' + + # Distance from center depends on which line we're drawing + if self._line_orientation == 'horizontal': + if self.mean_y is not None: + center_y = img_h / 2.0 + self.distance_from_center = float(abs(self.mean_y - center_y)) + else: + self.distance_from_center = None + else: + if self.mean_x is not None: + center_x = img_w / 2.0 + self.distance_from_center = float(abs(self.mean_x - center_x)) + else: + self.distance_from_center = None + + # Build/resize overlay and clear it + overlay = self._get_overlay(surface.get_size()) + overlay.fill((0, 0, 0, 0)) + + # Calculate scaling factor (image coordinates to display coordinates) + scale_x = fw / img_w + scale_y = fh / img_h + + # Draw filtered out centers in red + for x, y in self.filtered_centers: + display_x = fx + x * scale_x + display_y = fy + y * scale_y + pygame.draw.circle(overlay, self.filtered_mark_color, (int(display_x), int(display_y)), 5) + + # Draw valid center dots in green + for x, y in self.valid_centers: + display_x = fx + x * scale_x + display_y = fy + y * scale_y + pygame.draw.circle(overlay, self.valid_mark_color, (int(display_x), int(display_y)), 5) + + # Draw the stabilized line + if self._line_orientation == 'horizontal': + if self.mean_y is not None: + display_mean_y = fy + self.mean_y * scale_y + pygame.draw.line( + overlay, + self.center_line_color, + (fx, int(display_mean_y)), + (fx + fw, int(display_mean_y)), + 2 + ) + + # Also draw the image center line for reference (dimmer) + center_y = img_h / 2 + display_center_y = fy + center_y * scale_y + pygame.draw.line( + overlay, + (*self.center_line_color[:3], 100), + (fx, int(display_center_y)), + (fx + fw, int(display_center_y)), + 1 + ) + else: + if self.mean_x is not None: + display_mean_x = fx + self.mean_x * scale_x + pygame.draw.line( + overlay, + self.center_line_color, + (int(display_mean_x), fy), + (int(display_mean_x), fy + fh), + 2 + ) + + # Also draw the image center line for reference (dimmer) + center_x = img_w / 2 + display_center_x = fx + center_x * scale_x + pygame.draw.line( + overlay, + (*self.center_line_color[:3], 100), + (int(display_center_x), fy), + (int(display_center_x), fy + fh), + 1 + ) + + # Draw info text + self._draw_info_text(overlay, fr) + + # Composite overlay onto the screen surface + surface.blit(overlay, (0, 0)) \ No newline at end of file diff --git a/UI/ui_layout.py b/UI/ui_layout.py index 56776da..3ed874f 100644 --- a/UI/ui_layout.py +++ b/UI/ui_layout.py @@ -18,6 +18,7 @@ from UI.flex_frame import FlexFrame from UI.overlays.interactive_camera_overlay import InteractiveCameraOverlay +from UI.overlays.red_detection_mark_overlay import RedMarkDetectionOverlay from UI.input.text_field import TextField from UI.input.button import Button, ButtonShape, ButtonColors @@ -86,6 +87,10 @@ def create_control_panel( ) machine_vision_overlay = FocusOverlay(camera_view, movementSystem.machine_vision) interactive_overlay = InteractiveCameraOverlay(camera_view, movementSystem) + detection_overlay = RedMarkDetectionOverlay( + camera_view=camera_view, + visible=False + ) # --- Control Box --- control_box = Section( @@ -121,7 +126,7 @@ def create_control_panel( x=0, y=0, width=1.0, height=258, width_is_percent=True ) - _build_camera_control(camera_control, movementSystem, camera, interactive_overlay, camera_settings_modal) + _build_camera_control(camera_control, movementSystem, camera, detection_overlay, camera_settings_modal) # --- Sample Box --- sample_box = Section( @@ -304,7 +309,7 @@ def on_state_changed(state: bool, btn: ToggleButton): return go_to_sample_button, decrement_button, increment_button, sample_label#, pos1_display, pos2_display -def _build_camera_control(camera_control, movementSystem: AutomatedPrinter, camera, interactive_overlay, camera_settings_modal): +def _build_camera_control(camera_control, movementSystem: AutomatedPrinter, camera, detection_overlay, camera_settings_modal): # Header Settings Button settings = Button(lambda: camera_settings_modal.open(), x=0, y=0, @@ -377,11 +382,11 @@ def open_capture_folder(): title="Sample Position Settings", overlay=False, width=465, - height=640 + height=780 ) build_sample_settings_modal(sample_settings_modal, movementSystem) - Button(lambda: movementSystem.start_autofocus(), 10, 175, 127, 40, "Sample Cal.", parent=camera_control, text_style=make_button_text_style()) + Button(lambda: detection_overlay.toggle(), 10, 175, 127, 40, "Sample Cal.", parent=camera_control, text_style=make_button_text_style()) Button(lambda: sample_settings_modal.open(), 142, 175, 167, 40, "Sample Settings", parent=camera_control, text_style=make_button_text_style()) diff --git a/printer/printerConfig.py b/printer/printerConfig.py index 5bd1479..74b995d 100644 --- a/printer/printerConfig.py +++ b/printer/printerConfig.py @@ -16,6 +16,10 @@ class PrinterSettings(): sample_positions: dict[int, dict[str, float]] = field(default_factory=dict) calibration_pattern_position: dict[str, float] = field(default_factory=dict) # X, Y, Z in mm + # Sample calibration positions (for verifying X positions) + calibration_y: float = 220.0 # Y position for calibration checks (mm) + calibration_z: float = 26.0 # Z position for calibration checks (mm) + # Camera calibration data camera_calibration: dict[str, any] = field(default_factory=dict) # Stores M_est, M_inv, reference position, etc. From 2a78f8519498859b0317554ffbae35d1ba0dc894 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Fri, 9 Jan 2026 01:52:46 -0900 Subject: [PATCH 06/46] Added Default Settings --- config/printers/Ender3/default_settings.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/config/printers/Ender3/default_settings.yaml b/config/printers/Ender3/default_settings.yaml index e162f28..6ef3b87 100644 --- a/config/printers/Ender3/default_settings.yaml +++ b/config/printers/Ender3/default_settings.yaml @@ -26,6 +26,8 @@ sample_positions: 18: { x: 213.36, y: 200.00, z: 29.00 } 19: { x: 224.88, y: 200.00, z: 29.00 } 20: { x: 224.88, y: 200.00, z: 29.00 } +calibration_y: 220.0 +calibration_z: 26.0 calibration_pattern_position: x: 226.08 y: 186.9 From d6f1bb55b5424fd263ceb9f2b3eb54047b5fae83 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Fri, 9 Jan 2026 03:00:01 -0900 Subject: [PATCH 07/46] modified layout and added toggles --- UI/modals/camera_settings_modal.py | 57 +++++- UI/modals/sample_settings_modal.py | 55 +++--- UI/overlays/interactive_camera_overlay.py | 163 +++++++++++++----- UI/ui_layout.py | 2 +- .../automation/camera_calibration_mixin.py | 66 ++++++- 5 files changed, 266 insertions(+), 77 deletions(-) diff --git a/UI/modals/camera_settings_modal.py b/UI/modals/camera_settings_modal.py index 89d712e..daa63bd 100644 --- a/UI/modals/camera_settings_modal.py +++ b/UI/modals/camera_settings_modal.py @@ -458,6 +458,50 @@ def get_wbgain(): ) +def add_overlay_controls_section(modal, overlay, *, y: int, x: int = 8) -> None: + """Add toggles for crosshair and DPI display visibility.""" + if overlay is None: + return + + Text("Camera Overlay Controls", parent=modal, x=x, y=y + 8, style=make_settings_text_style()) + + btn_w, btn_h = 135, 28 + spacing = 12 + base_y = y + 28 + + # Crosshair toggle + def toggle_crosshair(): + overlay.toggle_crosshair() + crosshair_btn.set_text( + "Crosshair: ON" if overlay.crosshair_visible else "Crosshair: OFF" + ) + + crosshair_btn = Button( + toggle_crosshair, + x=x, y=base_y, width=btn_w, height=btn_h, + text=f"Crosshair: {'ON' if overlay.crosshair_visible else 'OFF'}", + parent=modal, + colors=BASE_BUTTON_COLORS, + text_style=RADIO_TEXT_STYLE, + ) + + # DPI display toggle + def toggle_dpi(): + overlay.toggle_dpi_display() + dpi_btn.set_text( + "DPI Display: ON" if overlay.dpi_display_visible else "DPI Display: OFF" + ) + + dpi_btn = Button( + toggle_dpi, + x=x + (btn_w + spacing), y=base_y, width=btn_w, height=btn_h, + text=f"DPI Display: {'ON' if overlay.dpi_display_visible else 'OFF'}", + parent=modal, + colors=BASE_BUTTON_COLORS, + text_style=RADIO_TEXT_STYLE, + ) + + def add_save_load_reset_section(modal, camera, *, y: int, x: int = 8) -> None: btn_w, btn_h = 88, 28 spacing = 12 @@ -516,10 +560,15 @@ def on_reset(): # Orchestrator # --------------------------------------------------------------------------- -def build_camera_settings_modal(modal, camera) -> None: +def build_camera_settings_modal(modal, camera, overlay=None) -> None: """Populate the provided Modal with camera setting controls. This applies values live via camera.update_settings(persist=False). + Args: + modal: The Modal instance to populate with controls + camera: Camera instance with settings + overlay: Optional InteractiveCameraOverlay instance for overlay controls + Layout is organized into vertically stacked sections. Each section is built by a dedicated function to keep this module modular and easy to extend or rearrange. @@ -560,7 +609,11 @@ def build_camera_settings_modal(modal, camera) -> None: add_level_range_high_setting(scroll_area, camera, settings, y=layout.next_y()) add_white_balance_gain_setting(scroll_area, camera, settings, y=layout.next_y()) + # Overlay controls (crosshair and DPI display) + if overlay is not None: + add_overlay_controls_section(scroll_area, overlay, y=layout.next_y()) + add_save_load_reset_section( modal, camera, y=modal.height-80 - ) + ) \ No newline at end of file diff --git a/UI/modals/sample_settings_modal.py b/UI/modals/sample_settings_modal.py index 5ef3164..af0a954 100644 --- a/UI/modals/sample_settings_modal.py +++ b/UI/modals/sample_settings_modal.py @@ -76,30 +76,30 @@ def set_camera_height_from_position(): y_offset += 90 - # ========== Calibration Y Position Section ========== + # ========== Y Start Offset Section ========== Text( - text="Calibration Y Position (mm):", + text="Y Start Offset (mm):", parent=content, x=10, y=y_offset, style=make_display_text_style(16) ) - calibration_y_field = TextField( + y_start_field = TextField( parent=content, x=10, y=y_offset + 25, width=200, height=30, - placeholder="220.00", + placeholder="0.00", border_color=pygame.Color("#b3b4b6"), text_color=pygame.Color("#5a5a5a") ) - def set_calibration_y_from_position(): - """Set calibration Y field to current Y position""" + def set_y_start_from_position(): + """Set Y start field to current Y position""" y_mm = controller.position.y / 100.0 - calibration_y_field.set_text(f"{y_mm:.2f}") + y_start_field.set_text(f"{y_mm:.2f}") Button( - set_calibration_y_from_position, + set_y_start_from_position, x=220, y=y_offset + 25, width=175, height=30, text="Set from Current", @@ -109,30 +109,30 @@ def set_calibration_y_from_position(): y_offset += 70 - # ========== Calibration Z Position Section ========== + # ========== Calibration Y Position Section ========== Text( - text="Calibration Z Position (mm):", + text="Calibration Y Position (mm):", parent=content, x=10, y=y_offset, style=make_display_text_style(16) ) - calibration_z_field = TextField( + calibration_y_field = TextField( parent=content, x=10, y=y_offset + 25, width=200, height=30, - placeholder="26.00", + placeholder="220.00", border_color=pygame.Color("#b3b4b6"), text_color=pygame.Color("#5a5a5a") ) - def set_calibration_z_from_position(): - """Set calibration Z field to current Z position""" - z_mm = controller.position.z / 100.0 - calibration_z_field.set_text(f"{z_mm:.2f}") + def set_calibration_y_from_position(): + """Set calibration Y field to current Y position""" + y_mm = controller.position.y / 100.0 + calibration_y_field.set_text(f"{y_mm:.2f}") Button( - set_calibration_z_from_position, + set_calibration_y_from_position, x=220, y=y_offset + 25, width=175, height=30, text="Set from Current", @@ -142,30 +142,30 @@ def set_calibration_z_from_position(): y_offset += 70 - # ========== Y Start Offset Section ========== + # ========== Calibration Z Position Section ========== Text( - text="Y Start Offset (mm):", + text="Calibration Z Position (mm):", parent=content, x=10, y=y_offset, style=make_display_text_style(16) ) - y_start_field = TextField( + calibration_z_field = TextField( parent=content, x=10, y=y_offset + 25, width=200, height=30, - placeholder="0.00", + placeholder="26.00", border_color=pygame.Color("#b3b4b6"), text_color=pygame.Color("#5a5a5a") ) - def set_y_start_from_position(): - """Set Y start field to current Y position""" - y_mm = controller.position.y / 100.0 - y_start_field.set_text(f"{y_mm:.2f}") + def set_calibration_z_from_position(): + """Set calibration Z field to current Z position""" + z_mm = controller.position.z / 100.0 + calibration_z_field.set_text(f"{z_mm:.2f}") Button( - set_y_start_from_position, + set_calibration_z_from_position, x=220, y=y_offset + 25, width=175, height=30, text="Set from Current", @@ -219,6 +219,8 @@ def go_to_sample(): cal_y_mm = float(calibration_y_field.text or "220.0") cal_z_mm = float(calibration_z_field.text or "26.0") + print(f"Moving to Sample {sample_num} calibration position: X={x_mm:.2f}, Y={cal_y_mm:.2f}, Z={cal_z_mm:.2f}") + # Convert to ticks (0.01 mm units) x_ticks = int(x_mm * 100) y_ticks = int(cal_y_mm * 100) @@ -255,6 +257,7 @@ def go_to_sample(): def set_x_from_current(): x_mm = controller.position.x / 100.0 x_field.set_text(f"{x_mm:.2f}") + print(f"Sample {sample_num}: Set X offset to {x_mm:.2f} mm from current position") Button( set_x_from_current, diff --git a/UI/overlays/interactive_camera_overlay.py b/UI/overlays/interactive_camera_overlay.py index 78f4127..c2b91d1 100644 --- a/UI/overlays/interactive_camera_overlay.py +++ b/UI/overlays/interactive_camera_overlay.py @@ -11,6 +11,7 @@ from UI.frame import Frame from UI.camera_view import CameraView +from UI.text import Text, TextStyle from UI.styles import CROSSHAIR_COLOR class InteractiveCameraOverlay(Frame): @@ -45,6 +46,10 @@ def __init__( self.controller = controller self.visible = visible + # Separate visibility controls + self.crosshair_visible = True + self.dpi_display_visible = True + # Enable click handling self.mouse_passthrough = False @@ -58,6 +63,24 @@ def __init__( self._overlay = None self._overlay_size = None + # DPI display text (top-right corner with 10px margin) + # Use 100% width minus margin, left aligned + self.dpi_text = Text( + text="", + parent=self, + x=0.99, y=10, # 98% across (leaves ~2% margin) + x_is_percent=True, + y_is_percent=False, + x_align="right", # Right-align the text at that position + y_align="top", + style=TextStyle( + color=pygame.Color(255, 255, 255), + font_size=16 + ) + ) + self.dpi_text.mouse_passthrough = True + self._update_dpi_text() + # ==================== Visibility Control ==================== def toggle_overlay(self) -> None: @@ -68,6 +91,38 @@ def set_visible(self, value: bool) -> None: """Set visibility of the crosshair overlay.""" self.visible = bool(value) + def toggle_crosshair(self) -> None: + """Toggle visibility of the crosshair only.""" + self.crosshair_visible = not self.crosshair_visible + + def set_crosshair_visible(self, value: bool) -> None: + """Set visibility of the crosshair.""" + self.crosshair_visible = bool(value) + + def show_crosshair(self) -> None: + """Show the crosshair.""" + self.crosshair_visible = True + + def hide_crosshair(self) -> None: + """Hide the crosshair.""" + self.crosshair_visible = False + + def toggle_dpi_display(self) -> None: + """Toggle visibility of the DPI display text.""" + self.dpi_display_visible = not self.dpi_display_visible + + def set_dpi_display_visible(self, value: bool) -> None: + """Set visibility of the DPI display text.""" + self.dpi_display_visible = bool(value) + + def show_dpi_display(self) -> None: + """Show the DPI display text.""" + self.dpi_display_visible = True + + def hide_dpi_display(self) -> None: + """Hide the DPI display text.""" + self.dpi_display_visible = False + def set_crosshair_color(self, color: pygame.Color) -> None: """Update the crosshair color.""" self.crosshair_color = color @@ -88,6 +143,20 @@ def set_crosshair_properties( # ==================== Calibration Helpers ==================== + def _update_dpi_text(self) -> None: + """Update the DPI display text based on current calibration status.""" + # Check calibration directly without calling is_calibrated() to avoid recursion + if not hasattr(self.controller, 'M_inv') or self.controller.M_inv is None: + self.dpi_text.set_text("") + return + + # Get DPI directly from controller + dpi = getattr(self.controller, '_cal_dpi', None) + if dpi is not None: + self.dpi_text.set_text(f"Estimated DPI: {dpi:.1f}") + else: + self.dpi_text.set_text("") + def run_calibration(self) -> None: """ Trigger camera calibration through the controller. @@ -101,6 +170,8 @@ def run_calibration(self) -> None: return self.controller.start_camera_calibration() + # Update DPI display after calibration + self._update_dpi_text() def is_calibrated(self) -> bool: """Check if camera calibration is available.""" @@ -265,47 +336,59 @@ def draw(self, surface: pygame.Surface) -> None: fx, fy, fw, fh = fr - # Build/resize overlay and clear it - overlay = self._get_overlay(surface.get_size()) - overlay.fill((0, 0, 0, 0)) + # Update DPI display + if self.dpi_display_visible: + self._update_dpi_text() + else: + self.dpi_text.set_text("") - # Calculate center point of the camera frame - center_x = fx + fw // 2 - center_y = fy + fh // 2 + # Only draw crosshair if crosshair_visible is True + if self.crosshair_visible: + # Build/resize overlay and clear it + overlay = self._get_overlay(surface.get_size()) + overlay.fill((0, 0, 0, 0)) - # Draw crosshair lines with gap in the middle - - # Horizontal line (left and right segments) - pygame.draw.line( - overlay, - self.crosshair_color, - (center_x - self.crosshair_gap - self.crosshair_length, center_y), - (center_x - self.crosshair_gap, center_y), - self.crosshair_thickness - ) - pygame.draw.line( - overlay, - self.crosshair_color, - (center_x + self.crosshair_gap, center_y), - (center_x + self.crosshair_gap + self.crosshair_length, center_y), - self.crosshair_thickness - ) + # Calculate center point of the camera frame + center_x = fx + fw // 2 + center_y = fy + fh // 2 - # Vertical line (top and bottom segments) - pygame.draw.line( - overlay, - self.crosshair_color, - (center_x, center_y - self.crosshair_gap - self.crosshair_length), - (center_x, center_y - self.crosshair_gap), - self.crosshair_thickness - ) - pygame.draw.line( - overlay, - self.crosshair_color, - (center_x, center_y + self.crosshair_gap), - (center_x, center_y + self.crosshair_gap + self.crosshair_length), - self.crosshair_thickness - ) + # Draw crosshair lines with gap in the middle + + # Horizontal line (left and right segments) + pygame.draw.line( + overlay, + self.crosshair_color, + (center_x - self.crosshair_gap - self.crosshair_length, center_y), + (center_x - self.crosshair_gap, center_y), + self.crosshair_thickness + ) + pygame.draw.line( + overlay, + self.crosshair_color, + (center_x + self.crosshair_gap, center_y), + (center_x + self.crosshair_gap + self.crosshair_length, center_y), + self.crosshair_thickness + ) + + # Vertical line (top and bottom segments) + pygame.draw.line( + overlay, + self.crosshair_color, + (center_x, center_y - self.crosshair_gap - self.crosshair_length), + (center_x, center_y - self.crosshair_gap), + self.crosshair_thickness + ) + pygame.draw.line( + overlay, + self.crosshair_color, + (center_x, center_y + self.crosshair_gap), + (center_x, center_y + self.crosshair_gap + self.crosshair_length), + self.crosshair_thickness + ) + + # Composite overlay onto the screen surface + surface.blit(overlay, (0, 0)) - # Composite overlay onto the screen surface - surface.blit(overlay, (0, 0)) \ No newline at end of file + # Draw children (including DPI text if visible) + for child in reversed(self.children): + child.draw(surface) \ No newline at end of file diff --git a/UI/ui_layout.py b/UI/ui_layout.py index 3ed874f..b9d594e 100644 --- a/UI/ui_layout.py +++ b/UI/ui_layout.py @@ -116,7 +116,7 @@ def create_control_panel( # --- Camera Settings Modal --- camera_settings_modal = Modal(parent=root_frame, title="Camera Settings", overlay=False, width=308, height=660) - build_camera_settings_modal(camera_settings_modal, camera) + build_camera_settings_modal(camera_settings_modal, camera, interactive_overlay) # --- Camera Settings --- camera_control = Section( diff --git a/printer/automation/camera_calibration_mixin.py b/printer/automation/camera_calibration_mixin.py index 3d1c0ad..f23dca6 100644 --- a/printer/automation/camera_calibration_mixin.py +++ b/printer/automation/camera_calibration_mixin.py @@ -40,6 +40,7 @@ def _init_camera_calibration_handlers(self): self._cal_ref_pos = None # Position where calibration was performed self._cal_image_width = None # Image width used during calibration self._cal_image_height = None # Image height used during calibration + self._cal_dpi = None # Computed DPI (dots per inch) from calibration # Calibration parameters (can be overridden) self._cal_move_x_ticks = 100 # 1.00mm in 0.01mm units @@ -67,6 +68,7 @@ def _save_camera_calibration(self) -> None: 'image_height': self._cal_image_height, 'move_x_ticks': self._cal_move_x_ticks, 'move_y_ticks': self._cal_move_y_ticks, + 'dpi': float(self._cal_dpi) if self._cal_dpi is not None else None, } # Save to printer config @@ -117,7 +119,14 @@ def _load_camera_calibration(self) -> bool: self._cal_move_x_ticks = cal_data.get('move_x_ticks', 100) self._cal_move_y_ticks = cal_data.get('move_y_ticks', 100) - self.status("Camera calibration loaded from config", True) + # Load or calculate DPI + self._cal_dpi = cal_data.get('dpi') + if self._cal_dpi is None: + # Calculate DPI if not saved + self._calculate_dpi() + + dpi_str = f" (DPI: {self._cal_dpi:.1f})" if self._cal_dpi is not None else "" + self.status(f"Camera calibration loaded from config{dpi_str}", True) return True except Exception as e: @@ -127,6 +136,7 @@ def _load_camera_calibration(self) -> bool: self._cal_ref_pos = None self._cal_image_width = None self._cal_image_height = None + self._cal_dpi = None return False def clear_camera_calibration(self) -> None: @@ -136,6 +146,7 @@ def clear_camera_calibration(self) -> None: self._cal_ref_pos = None self._cal_image_width = None self._cal_image_height = None + self._cal_dpi = None # Clear from config self.config.camera_calibration = {} @@ -179,6 +190,41 @@ def _phase_corr_shift( (dx, dy), response = cv2.phaseCorrelate(img_a_f32, img_b_f32, win) return float(dx), float(dy), float(response) + def _calculate_dpi(self) -> None: + """ + Calculate DPI (dots per inch) from the calibration matrix. + DPI represents the average resolution of the camera image. + + The calculation uses the calibration matrix M_est to determine how many + pixels correspond to physical movement. We compute the average scaling + factor from both X and Y axes and convert to DPI (pixels per inch). + """ + if self.M_est is None: + self._cal_dpi = None + return + + try: + # M_est maps world deltas (in 0.01mm ticks) to pixel deltas + # M_est[0,0] and M_est[1,1] are the diagonal elements (x->px, y->py) + # Extract pixels per tick for both axes + px_per_tick_x = abs(self.M_est[0, 0]) # pixels per 0.01mm in X + px_per_tick_y = abs(self.M_est[1, 1]) # pixels per 0.01mm in Y + + # Average the two axes + px_per_tick_avg = (px_per_tick_x + px_per_tick_y) / 2.0 + + # Convert to pixels per mm (1 tick = 0.01mm) + px_per_mm = px_per_tick_avg * 100.0 + + # Convert to DPI (1 inch = 25.4 mm) + dpi = px_per_mm * 25.4 + + self._cal_dpi = dpi + + except Exception as e: + self.status(f"Failed to calculate DPI: {e}", False) + self._cal_dpi = None + def _capture_and_process_edges(self) -> Optional[np.ndarray]: """Capture a still image and return its edge map.""" try: @@ -235,7 +281,7 @@ def _handle_camera_calibrate(self, cmd: command) -> None: # Step 1: Capture base image at current position self.status("Capturing base image...", True) self._exec_gcode("M400", wait=True) - time.sleep(0.3) # Settle time + time.sleep(0.6) # Settle time cal_base_pos = self.get_position() edges_base = self._capture_and_process_edges() @@ -254,7 +300,7 @@ def _handle_camera_calibrate(self, cmd: command) -> None: self._exec_gcode(f"G91", wait=True) # Relative mode self._exec_gcode(f"G0 X{dx_mm:.2f}", wait=True) self._exec_gcode(f"G90", wait=True) # Back to absolute - time.sleep(0.3) + time.sleep(0.6) edges_x = self._capture_and_process_edges() if edges_x is None: @@ -279,14 +325,14 @@ def _handle_camera_calibrate(self, cmd: command) -> None: f"G0 X{cal_base_pos.x/100:.2f} Y{cal_base_pos.y/100:.2f}", wait=True ) - time.sleep(0.3) + time.sleep(0.6) dy_mm = self._cal_move_y_ticks / 100.0 self.status(f"Moving +Y by {dy_mm:.2f}mm...", True) self._exec_gcode(f"G91", wait=True) self._exec_gcode(f"G0 Y{dy_mm:.2f}", wait=True) self._exec_gcode(f"G90", wait=True) - time.sleep(0.3) + time.sleep(0.6) edges_y = self._capture_and_process_edges() if edges_y is None: @@ -331,9 +377,13 @@ def _handle_camera_calibrate(self, cmd: command) -> None: # Store calibration reference position (center of image at cal_base_pos) self._cal_ref_pos = cal_base_pos + # Calculate DPI from the calibration matrix + self._calculate_dpi() + + dpi_str = f", DPI: {self._cal_dpi:.1f}" if self._cal_dpi is not None else "" self.status( f"Calibration complete. Matrix M_est:\n{self.M_est}\n" - f"Inverse M_inv:\n{self.M_inv}", + f"Inverse M_inv:\n{self.M_inv}{dpi_str}", True ) @@ -574,6 +624,7 @@ def get_calibration_status(self) -> dict: 'reference_position': self._cal_ref_pos, 'matrix_M': self.M_est.tolist() if self.M_est is not None else None, 'matrix_M_inv': self.M_inv.tolist() if self.M_inv is not None else None, + 'dpi': self._cal_dpi, } def is_calibrated(self) -> bool: @@ -585,5 +636,4 @@ def is_calibrated(self) -> bool: """ return (self.M_est is not None and self.M_inv is not None and - self._cal_ref_pos is not None) - \ No newline at end of file + self._cal_ref_pos is not None) \ No newline at end of file From fd312e73910f0914bbbdba57b58b3477e0b97a53 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Mon, 19 Jan 2026 23:30:29 -0900 Subject: [PATCH 08/46] image stacking/stitching test --- misc/generic_image_stitch/.gitignore | 1 + misc/generic_image_stitch/focusstack.py | 80 ++ misc/generic_image_stitch/inc_stitch.py | 993 ++++++++++++++++++++++++ misc/generic_image_stitch/siftstitch.py | 161 ++++ misc/siftstitch.py | 168 ---- 5 files changed, 1235 insertions(+), 168 deletions(-) create mode 100644 misc/generic_image_stitch/.gitignore create mode 100644 misc/generic_image_stitch/focusstack.py create mode 100644 misc/generic_image_stitch/inc_stitch.py create mode 100644 misc/generic_image_stitch/siftstitch.py delete mode 100644 misc/siftstitch.py diff --git a/misc/generic_image_stitch/.gitignore b/misc/generic_image_stitch/.gitignore new file mode 100644 index 0000000..ea5c1ad --- /dev/null +++ b/misc/generic_image_stitch/.gitignore @@ -0,0 +1 @@ +/input/* diff --git a/misc/generic_image_stitch/focusstack.py b/misc/generic_image_stitch/focusstack.py new file mode 100644 index 0000000..b054121 --- /dev/null +++ b/misc/generic_image_stitch/focusstack.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python3 +""" +Batch focus stacking script. +Processes all subfolders in a given directory, running focus-stack on the JPEG images in each. +""" + +import os +import sys +import subprocess +from pathlib import Path + + +def main(): + if len(sys.argv) != 2: + print("Usage: python focus_stack_batch.py ") + sys.exit(1) + + input_folder = Path(sys.argv[1]) + + if not input_folder.exists(): + print(f"Error: Folder '{input_folder}' does not exist") + sys.exit(1) + + if not input_folder.is_dir(): + print(f"Error: '{input_folder}' is not a directory") + sys.exit(1) + + # Get the focus-stack executable path relative to this script + script_dir = Path(__file__).parent + focus_stack_exe = script_dir / "../../focus-stack/focus-stack.exe" + focus_stack_exe = focus_stack_exe.resolve() + + if not focus_stack_exe.exists(): + print(f"Error: focus-stack executable not found at '{focus_stack_exe}'") + sys.exit(1) + + # Process each subfolder + subfolders = [d for d in input_folder.iterdir() if d.is_dir()] + + if not subfolders: + print(f"No subfolders found in '{input_folder}'") + sys.exit(0) + + print(f"Found {len(subfolders)} subfolder(s) to process") + + for subfolder in sorted(subfolders): + # Check if there are any JPEG files in this subfolder + jpeg_files = sorted(subfolder.glob("*.jpeg")) + + if not jpeg_files: + print(f"Skipping '{subfolder.name}': no JPEG files found") + continue + + # Construct output path (use absolute path) + output_file = (input_folder / f"{subfolder.name}.jpeg").resolve() + + # Build the command with expanded file list (use absolute paths) + cmd = [str(focus_stack_exe)] + cmd.extend([str(f.resolve()) for f in jpeg_files]) + cmd.append(f"--output={str(output_file)}") + + print(f"\nProcessing '{subfolder.name}' ({len(jpeg_files)} images)...") + print(f"Output: {output_file}") + + try: + result = subprocess.run(cmd, check=True, capture_output=True, text=True) + print(f"Success: {subfolder.name}") + if result.stdout: + print(result.stdout) + except subprocess.CalledProcessError as e: + print(f"Error processing '{subfolder.name}':") + print(f"Exit code: {e.returncode}") + if e.stderr: + print(f"Error output: {e.stderr}") + + print("\nBatch processing complete!") + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/misc/generic_image_stitch/inc_stitch.py b/misc/generic_image_stitch/inc_stitch.py new file mode 100644 index 0000000..52c9f62 --- /dev/null +++ b/misc/generic_image_stitch/inc_stitch.py @@ -0,0 +1,993 @@ +#!/usr/bin/env python3 +""" +Progressive Image Stitching Viewer +Shows images being stitched together in real-time using pygame. +Improved version with template matching fallback for vertical alignment. +""" + +import cv2 +import numpy as np +import os +import argparse +import pygame +import time +from pathlib import Path + + +class ProgressiveStitcher: + def __init__(self, display_width=1200, display_height=800, row_counts=None): + """Initialize the progressive stitcher with pygame display.""" + pygame.init() + self.display_width = display_width + self.display_height = display_height + self.screen = pygame.display.set_mode((display_width, display_height)) + pygame.display.set_caption("Progressive Image Stitching") + + self.clock = pygame.time.Clock() + self.font = pygame.font.Font(None, 36) + self.small_font = pygame.font.Font(None, 24) + + # Spatial layout information + self.row_counts = row_counts or [] # Number of images per row + + # World space for images + self.images = [] # List of dicts with 'image', 'world_pos', 'index' + self.world_offset = [0, 0] # Camera offset for panning + self.zoom_scale = 1.0 + + # Track current position in snake pattern + self.current_row = 0 + self.current_col = 0 + self.direction = -1 # -1 for left, 1 for right + + # Estimated image size (will be updated with first image) + self.avg_image_width = 800 + self.avg_image_height = 600 + self.overlap_ratio = 0.4 # Estimated overlap between images + + def load_images_from_folder(self, folder_path): + """Load all JPEG images from the specified folder in order.""" + image_files = [] + valid_extensions = {'.jpg', '.jpeg', '.JPG', '.JPEG'} + + # Get all image files + all_files = [] + for file in os.listdir(folder_path): + if any(file.endswith(ext) for ext in valid_extensions): + all_files.append(file) + + # Sort by numeric prefix if possible + def get_numeric_key(filename): + # Extract leading numbers from filename + import re + match = re.match(r'(\d+)', filename) + if match: + return int(match.group(1)) + return filename + + all_files.sort(key=get_numeric_key) + + image_files = [os.path.join(folder_path, f) for f in all_files] + + if not image_files: + raise ValueError(f"No JPEG images found in {folder_path}") + + print(f"Found {len(image_files)} images:") + for img_file in image_files: + print(f" - {os.path.basename(img_file)}") + + return image_files + + def find_feature_rich_region(self, image_gray, direction): + """ + Use edge detection to find the most feature-rich vertical strip in the image. + + Args: + image_gray: Grayscale image + direction: -1 for left, 1 for right (which side to prioritize) + + Returns: + tuple: (start_col, width) for the best region, or None if detection fails + """ + h, w = image_gray.shape + + # Detect edges using Canny + edges = cv2.Canny(image_gray, 50, 150) + + # Divide image into vertical strips and count edges in each + num_strips = 10 + strip_width = w // num_strips + edge_counts = [] + + for i in range(num_strips): + start_x = i * strip_width + end_x = min((i + 1) * strip_width, w) + strip = edges[:, start_x:end_x] + edge_count = np.sum(strip > 0) + edge_counts.append((i, edge_count, start_x, end_x)) + + # Sort by edge count (most features first) + edge_counts.sort(key=lambda x: x[1], reverse=True) + + # Prioritize strips on the side we're interested in + if direction == -1: + # Going left: prefer left side (lower indices) + # Weight: lower index = higher priority + weighted_scores = [(i, count - (idx * count * 0.1), start_x, end_x) + for idx, count, start_x, end_x in edge_counts] + else: + # Going right: prefer right side (higher indices) + # Weight: higher index = higher priority + weighted_scores = [(i, count + (idx * count * 0.1), start_x, end_x) + for idx, count, start_x, end_x in edge_counts] + + # Re-sort by weighted score + weighted_scores.sort(key=lambda x: x[1], reverse=True) + + # Take top 3-4 strips and use them (they might be adjacent) + best_strips = weighted_scores[:4] + best_indices = [x[0] for x in best_strips] + best_indices.sort() + + # Find contiguous region + if len(best_indices) >= 2: + start_idx = best_indices[0] + end_idx = best_indices[-1] + # Expand to include strips in between + start_col = start_idx * strip_width + end_col = min((end_idx + 1) * strip_width, w) + width = end_col - start_col + + # Ensure width is reasonable (20-50% of image) + min_width = int(w * 0.2) + max_width = int(w * 0.5) + if width < min_width: + width = min_width + if width > max_width: + width = max_width + # Adjust start_col if needed + if direction == -1: + start_col = 0 + else: + start_col = w - width + + print(f" Edge detection: Best region at x={start_col}-{start_col + width} ({width}px wide)") + return start_col, width + + return None + + def find_vertical_offset_template_matching(self, new_image, prev_row_image): + """ + Use template matching to find vertical offset between images. + Also detects horizontal offset based on where the match occurred. + + Args: + new_image: The new image (top) + prev_row_image: The image from previous row (bottom) + + Returns: + tuple: (y_offset, x_offset, confidence) or (None, None, 0) if matching fails + """ + print(f" Attempting template matching for vertical alignment...") + + new_h, new_w = new_image.shape[:2] + prev_h, prev_w = prev_row_image.shape[:2] + + # Convert to grayscale for template matching + new_gray = cv2.cvtColor(new_image, cv2.COLOR_BGR2GRAY) + prev_gray = cv2.cvtColor(prev_row_image, cv2.COLOR_BGR2GRAY) + + # Calculate estimated overlap based on overlap ratio + estimated_overlap_height = int(self.avg_image_height * self.overlap_ratio) + + # Use a more conservative template height (60% of estimated overlap) + # This ensures the template is significantly smaller than the search region + overlap_height = int(estimated_overlap_height * 0.6) + overlap_height = min(overlap_height, int(new_h * 0.3)) # Cap at 30% of image height + template_full = new_gray[-overlap_height:, :] + + # Search region should be generous: estimated overlap + 100% margin + search_margin = estimated_overlap_height # Full overlap height as margin + search_height = estimated_overlap_height + search_margin + search_height = min(search_height, prev_h) # Don't exceed image bounds + + # Use edge detection to find feature-rich region for the template + feature_region = self.find_feature_rich_region(template_full, self.direction) + + if feature_region is not None: + feature_start_col, feature_width = feature_region + + # Crop template to feature-rich region + template = template_full[:, feature_start_col:feature_start_col + feature_width] + + print(f" Using edge-detected template region: {feature_width}px wide at x={feature_start_col}") + + # Store where we cropped the template from (for offset calculation later) + template_crop_start = feature_start_col + template_crop_width = feature_width + else: + # Fallback: use fixed percentage on the side based on direction + print(f" Edge detection failed, using fallback template crop") + template_crop_ratio = 0.3 + template_crop_width = int(new_w * template_crop_ratio) + + if self.direction == -1: + # Going left: use left side + template = template_full[:, :template_crop_width] + template_crop_start = 0 + else: + # Going right: use right side + template = template_full[:, -template_crop_width:] + template_crop_start = new_w - template_crop_width + + # Search region: use the same side as template, but wider + # This ensures the template region will be found in the search region + search_crop_ratio = 0.5 # Use 50% of the width for search + search_crop_width = int(prev_w * search_crop_ratio) + + if self.direction == -1: + # Going left: search in left portion + search_region_full = prev_gray[:search_height, :] + search_region = search_region_full[:, :search_crop_width] + search_crop_start = 0 + print(f" Direction: LEFT, searching in left {search_crop_ratio:.0%} ({search_crop_width}px)") + else: + # Going right: search in right portion + search_region_full = prev_gray[:search_height, :] + search_region = search_region_full[:, -search_crop_width:] + search_crop_start = prev_w - search_crop_width + print(f" Direction: RIGHT, searching in right {search_crop_ratio:.0%} ({search_crop_width}px)") + + print(f" Template: {overlap_height}x{template_crop_width}px, Search: {search_height}x{search_crop_width}px") + + print(f" Using estimated overlap: {estimated_overlap_height}px, template: {overlap_height}px height") + + # Resize if images are too large for efficient matching + # Scale based on the TEMPLATE size to keep it manageable + max_dim = 500 + scale = 1.0 + template_h_before_scale, template_w_before_scale = template.shape + template_max_dim = max(template_h_before_scale, template_w_before_scale) + if template_max_dim > max_dim: + scale = max_dim / template_max_dim + template = cv2.resize(template, None, fx=scale, fy=scale, interpolation=cv2.INTER_AREA) + search_region = cv2.resize(search_region, None, fx=scale, fy=scale, interpolation=cv2.INTER_AREA) + + template_h, template_w = template.shape + search_h, search_w = search_region.shape + + print(f" After scaling: Template {template_h}x{template_w}, Search {search_h}x{search_w}") + + # Save debug images to see what's being matched + debug_dir = "template_matching_debug" + import os + os.makedirs(debug_dir, exist_ok=True) + + # Save template and search region + cv2.imwrite(f"{debug_dir}/template_{self.current_row}.png", template) + cv2.imwrite(f"{debug_dir}/search_{self.current_row}.png", search_region) + print(f" Debug: Saved template and search images to {debug_dir}/") + + # OpenCV requires search region to be strictly larger than template in BOTH dimensions + if template_h >= search_h or template_w >= search_w: + print(f" Template matching failed: template too large") + return None, None, 0 + + # Additional check: ensure there's meaningful room for searching (template < 70% of search) + if template_h > search_h * 0.7 or template_w > search_w * 0.7: + print(f" Template matching failed: insufficient search space (template needs to be <70% of search)") + return None, None, 0 + + # Perform template matching using normalized cross-correlation + result = cv2.matchTemplate(search_region, template, cv2.TM_CCOEFF_NORMED) + min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(result) + + print(f" Template matching confidence: {max_val:.3f}") + + # If confidence is too low and we used edge detection, try fallback with side-based crop + if max_val < 0.5 and feature_region is not None: + print(f" Edge-detected template confidence too low, trying side-based fallback...") + + # Try side-based template instead + fallback_crop_ratio = 0.4 + fallback_crop_width = int(new_w * fallback_crop_ratio) + + if self.direction == -1: + template_fallback = template_full[:, :fallback_crop_width] + template_crop_start = 0 + else: + template_fallback = template_full[:, -fallback_crop_width:] + template_crop_start = new_w - fallback_crop_width + + # Resize fallback template + template_h_fb, template_w_fb = template_fallback.shape + template_max_dim_fb = max(template_h_fb, template_w_fb) + scale_fb = 1.0 + if template_max_dim_fb > max_dim: + scale_fb = max_dim / template_max_dim_fb + template_fallback = cv2.resize(template_fallback, None, fx=scale_fb, fy=scale_fb, interpolation=cv2.INTER_AREA) + search_region_fb = cv2.resize(search_region, None, fx=scale_fb, fy=scale_fb, interpolation=cv2.INTER_AREA) + else: + search_region_fb = search_region.copy() + + template_h_fb, template_w_fb = template_fallback.shape + search_h_fb, search_w_fb = search_region_fb.shape + + # Check if valid + if template_h_fb < search_h_fb and template_w_fb < search_w_fb and \ + template_h_fb < search_h_fb * 0.7 and template_w_fb < search_w_fb * 0.7: + + result_fb = cv2.matchTemplate(search_region_fb, template_fallback, cv2.TM_CCOEFF_NORMED) + min_val_fb, max_val_fb, min_loc_fb, max_loc_fb = cv2.minMaxLoc(result_fb) + + print(f" Fallback confidence: {max_val_fb:.3f}") + + # Use fallback if it's better + if max_val_fb > max_val: + print(f" Using fallback template (better confidence)") + template = template_fallback + search_region = search_region_fb + max_val = max_val_fb + max_loc = max_loc_fb + scale = scale_fb + template_h, template_w = template_h_fb, template_w_fb + search_h, search_w = search_h_fb, search_w_fb + + # Save fallback debug images + cv2.imwrite(f"{debug_dir}/template_{self.current_row}_fallback.png", template) + + # Require a minimum confidence threshold + if max_val < 0.5: + print(f" Template matching confidence too low") + return None, None, 0 + + # The match location tells us where the TOP-LEFT of template matched in the search region + # Template is the bottom overlap_height pixels of new_image + # Search region is the top search_height pixels of prev_row_image + match_y = max_loc[1] + match_x = max_loc[0] + + # Scale back to original resolution + match_y = int(match_y / scale) + match_x = int(match_x / scale) + + # Calculate the Y offset: + # The template (bottom of new image) matched at position match_y in the search region + # Y offset = match_y + overlap_height - new_h (should be negative) + y_offset = match_y + overlap_height - new_h + + # Calculate the X offset: + # match_x tells us where the template matched within the search region (in search_region coordinates) + # We need to convert this to world coordinates accounting for our crops + # Template was cropped starting at template_crop_start + # Search was cropped starting at search_crop_start + # If match_x = 0, it means template aligned perfectly with search start + # The actual X offset in world coordinates: + x_offset = search_crop_start + match_x - template_crop_start + + # Create visualization showing where the match was found + debug_dir = "template_matching_debug" + + # Also save edge detection visualization + edges_new = cv2.Canny(new_gray, 50, 150) + edges_prev = cv2.Canny(prev_gray, 50, 150) + cv2.imwrite(f"{debug_dir}/edges_new_{self.current_row}.png", edges_new) + cv2.imwrite(f"{debug_dir}/edges_prev_{self.current_row}.png", edges_prev) + + vis_search = cv2.cvtColor(search_region, cv2.COLOR_GRAY2BGR) + # Draw rectangle where template matched + cv2.rectangle(vis_search, + (match_x, match_y), + (match_x + template_w, match_y + template_h), + (0, 255, 0), 2) + # Add text showing match position + cv2.putText(vis_search, f"Match: ({match_x}, {match_y})", + (match_x, match_y - 10), + cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 1) + cv2.imwrite(f"{debug_dir}/match_visualization_{self.current_row}.png", vis_search) + + print(f" Template match found at x={match_x}, y={match_y}") + print(f" Template height={overlap_height}, new image height={new_h}") + print(f" Calculated offsets: X={x_offset:.1f}, Y={y_offset:.1f} pixels") + + return y_offset, x_offset, max_val + + def find_horizontal_offset_template_matching(self, new_image, prev_image, direction): + """ + Use template matching to find horizontal offset between images. + + Args: + new_image: The new image + prev_image: The previous image + direction: -1 for left, 1 for right + + Returns: + tuple: (x_offset, confidence) or (None, 0) if matching fails + """ + print(f" Attempting template matching for horizontal alignment...") + + new_h, new_w = new_image.shape[:2] + prev_h, prev_w = prev_image.shape[:2] + + # Convert to grayscale + new_gray = cv2.cvtColor(new_image, cv2.COLOR_BGR2GRAY) + prev_gray = cv2.cvtColor(prev_image, cv2.COLOR_BGR2GRAY) + + # Calculate estimated overlap based on overlap ratio + estimated_overlap_width = int(self.avg_image_width * self.overlap_ratio) + + # Take overlap region based on direction + # Use estimated overlap width, but cap it at 40% of image width for safety + overlap_width = min(estimated_overlap_width, int(new_w * 0.4)) + + # Focus search region around expected overlap area + search_margin = int(estimated_overlap_width * 0.5) + search_width = estimated_overlap_width + search_margin + search_width = min(search_width, prev_w) # Don't exceed image bounds + + if direction == -1: + # Moving left: take right edge of new image as template + template = new_gray[:, -overlap_width:] + # Search in left portion of previous image (focused on overlap region) + search_region = prev_gray[:, :search_width] + else: + # Moving right: take left edge of new image as template + template = new_gray[:, :overlap_width] + # Search in right portion of previous image (focused on overlap region) + search_region = prev_gray[:, -search_width:] + + print(f" Using estimated overlap: {estimated_overlap_width}px, template: {overlap_width}px, search: {search_width}px") + + # Resize if images are too large + max_height = 600 + scale = 1.0 + if new_h > max_height or prev_h > max_height: + scale = max_height / max(new_h, prev_h) + template = cv2.resize(template, None, fx=scale, fy=scale, interpolation=cv2.INTER_AREA) + search_region = cv2.resize(search_region, None, fx=scale, fy=scale, interpolation=cv2.INTER_AREA) + + template_h, template_w = template.shape + search_h, search_w = search_region.shape + + # OpenCV requires search region to be strictly larger than template in BOTH dimensions + if template_w >= search_w or template_h >= search_h: + print(f" Template matching failed: template too large (template: {template_h}x{template_w}, search: {search_h}x{search_w})") + return None, 0 + + # Additional check: ensure there's meaningful room for searching + if template_w > search_w * 0.9 or template_h > search_h * 0.95: + print(f" Template matching failed: insufficient search space (template: {template_h}x{template_w}, search: {search_h}x{search_w})") + return None, 0 + + # Perform template matching + result = cv2.matchTemplate(search_region, template, cv2.TM_CCOEFF_NORMED) + min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(result) + + print(f" Template matching confidence: {max_val:.3f}") + + if max_val < 0.5: + print(f" Template matching confidence too low") + return None, 0 + + match_x = max_loc[0] + match_x = int(match_x / scale) + + # Calculate x offset + if direction == -1: + # Moving left: new image goes to the left of previous + x_offset = -(prev_w - match_x) + else: + # Moving right: new image goes to the right of previous + x_offset = (prev_w - search_width) + match_x + + print(f" Template match found at x={match_x}, offset={x_offset:.1f} pixels") + + return x_offset, max_val + + def get_nearby_images(self, world_pos, radius=2.5): + """Get images within radius of the given world position.""" + nearby = [] + px, py = world_pos + + for img_data in self.images: + ix, iy = img_data['world_pos'] + # Calculate distance in "image units" + dist_x = abs(px - ix) / self.avg_image_width + dist_y = abs(py - iy) / self.avg_image_height + dist = (dist_x**2 + dist_y**2)**0.5 + + if dist <= radius: + nearby.append(img_data) + + return nearby + + def world_to_screen(self, world_pos): + """Convert world coordinates to screen coordinates.""" + wx, wy = world_pos + sx = (wx + self.world_offset[0]) * self.zoom_scale + self.display_width // 2 + sy = (wy + self.world_offset[1]) * self.zoom_scale + self.display_height // 2 + return [sx, sy] + + def render_worldspace(self): + """Render all images in their worldspace positions.""" + self.screen.fill((30, 30, 30)) + + # Sort images by index to render in order (earlier images first) + sorted_images = sorted(self.images, key=lambda x: x['index']) + + for img_data in sorted_images: + image = img_data['image'] + world_pos = img_data['world_pos'] + + # Convert to screen space + screen_pos = self.world_to_screen(world_pos) + + # Scale image + h, w = image.shape[:2] + scaled_w = int(w * self.zoom_scale) + scaled_h = int(h * self.zoom_scale) + + # Skip if too small or off screen + if scaled_w < 2 or scaled_h < 2: + continue + if screen_pos[0] + scaled_w < 0 or screen_pos[0] > self.display_width: + continue + if screen_pos[1] + scaled_h < 0 or screen_pos[1] > self.display_height: + continue + + # Resize and convert to pygame + scaled_img = cv2.resize(image, (scaled_w, scaled_h), interpolation=cv2.INTER_AREA) + # Convert BGR to RGB + rgb_image = cv2.cvtColor(scaled_img, cv2.COLOR_BGR2RGB) + # Transpose to get correct orientation for pygame (swap axes) + rgb_image = np.transpose(rgb_image, (1, 0, 2)) + surface = pygame.surfarray.make_surface(rgb_image) + + # Draw with slight transparency for overlaps + surface.set_alpha(220) + self.screen.blit(surface, screen_pos) + + # Draw border + pygame.draw.rect(self.screen, (100, 100, 100), + (*screen_pos, scaled_w, scaled_h), 1) + + def auto_frame_images(self): + """Automatically adjust zoom and offset to fit all images.""" + if not self.images: + return + + # Find bounding box of all images + min_x = min(img['world_pos'][0] for img in self.images) + max_x = max(img['world_pos'][0] + img['image'].shape[1] for img in self.images) + min_y = min(img['world_pos'][1] for img in self.images) + max_y = max(img['world_pos'][1] + img['image'].shape[0] for img in self.images) + + # Calculate zoom to fit + width = max_x - min_x + height = max_y - min_y + + zoom_x = (self.display_width - 100) / width if width > 0 else 1.0 + zoom_y = (self.display_height - 150) / height if height > 0 else 1.0 + self.zoom_scale = min(zoom_x, zoom_y, 1.0) + + # Center on images + center_x = (min_x + max_x) / 2 + center_y = (min_y + max_y) / 2 + self.world_offset = [-center_x, -center_y] + + def draw_status_overlay(self, message, image_num, total_images, loading_file=None): + """Draw status text overlay on top of rendered scene.""" + # Draw semi-transparent background for text + overlay = pygame.Surface((self.display_width, 130), pygame.SRCALPHA) + overlay.fill((30, 30, 30, 200)) + self.screen.blit(overlay, (0, 0)) + + # Draw main message + text = self.font.render(message, True, (255, 255, 255)) + text_rect = text.get_rect(center=(self.display_width // 2, 30)) + self.screen.blit(text, text_rect) + + # Draw progress + progress_text = f"Image {image_num} of {total_images}" + progress = self.small_font.render(progress_text, True, (200, 200, 200)) + progress_rect = progress.get_rect(center=(self.display_width // 2, 70)) + self.screen.blit(progress, progress_rect) + + # Draw loading indicator if provided + if loading_file: + loading_text = f"Loading: {loading_file}" + loading = self.small_font.render(loading_text, True, (150, 200, 255)) + loading_rect = loading.get_rect(center=(self.display_width // 2, 100)) + self.screen.blit(loading, loading_rect) + + def add_image(self, new_image, index): + """Add a new image to the worldspace using stitching to determine precise position.""" + h, w = new_image.shape[:2] + + if not self.images: + # First image at origin + self.avg_image_width = w + self.avg_image_height = h + self.current_col = self.row_counts[0] - 1 if self.row_counts else 0 + self.current_row = 0 + self.direction = -1 + print(f" First image size: {w}x{h}") + + self.images.append({ + 'image': new_image, + 'world_pos': [0, 0], + 'index': index, + 'row': 0, + 'col': self.current_col + }) + return True + + # Check if we're moving to a new row + need_new_row = False + if self.direction == -1: + # Moving left + if self.current_col == 0: + # About to finish this row + need_new_row = True + else: + # Moving right + if self.current_row < len(self.row_counts) and self.current_col == self.row_counts[self.current_row] - 1: + # About to finish this row + need_new_row = True + + if need_new_row: + # Moving to new row - need to stitch with image from previous row + print(f" Moving to new row {self.current_row + 1}") + + # Get the last image from the current row (this is where we transition from) + transition_image = self.images[-1] + + self.current_row += 1 + self.direction = -self.direction # Flip direction + + if self.direction == -1: + # Now moving left, start at rightmost column of new row + self.current_col = self.row_counts[self.current_row] - 1 if self.current_row < len(self.row_counts) else 0 + else: + # Now moving right, start at leftmost column of new row + self.current_col = 0 + + # Find the appropriate image from the previous row to stitch with + prev_row_images = [img for img in self.images if img.get('row', 0) == self.current_row - 1] + + if prev_row_images: + transition_x = transition_image['world_pos'][0] + + # Find candidate images from previous row to try matching against + # Sort by X distance from transition point + candidates = sorted(prev_row_images, key=lambda img: abs(img['world_pos'][0] - transition_x)) + + # Try up to 3 candidates from previous row + max_candidates = min(3, len(candidates)) + + print(f" Will try matching against {max_candidates} candidates from previous row") + + best_match = None + best_confidence = 0 + + for i, candidate_img in enumerate(candidates[:max_candidates]): + print(f" Candidate {i+1}/{max_candidates}: image {candidate_img['index']} (X: {candidate_img['world_pos'][0]:.1f})") + + # First try SIFT-based stitching + stitcher = cv2.Stitcher.create(cv2.Stitcher_SCANS) + status, stitched = stitcher.stitch([new_image, candidate_img['image']]) + + if status == cv2.Stitcher_OK: + # Calculate Y offset from stitched result + stitched_h, stitched_w = stitched.shape[:2] + prev_h, prev_w = candidate_img['image'].shape[:2] + + # Y offset is negative (moving up) + y_offset = -(stitched_h - prev_h) + next_y = candidate_img['world_pos'][1] + y_offset + + # X position: use the candidate image's X position + next_x = candidate_img['world_pos'][0] + + print(f" SIFT successful! Y offset: {y_offset:.1f} pixels") + world_pos = [next_x, next_y] + break # Found a good match, stop searching + else: + # SIFT failed, try template matching + print(f" SIFT failed, trying template matching...") + y_offset, x_offset, confidence = self.find_vertical_offset_template_matching( + new_image, candidate_img['image'] + ) + + if confidence > best_confidence: + best_match = { + 'candidate': candidate_img, + 'y_offset': y_offset, + 'x_offset': x_offset, + 'confidence': confidence + } + best_confidence = confidence + print(f" Template confidence: {confidence:.3f} (new best)") + else: + print(f" Template confidence: {confidence:.3f}") + + # If we found a very good match, stop searching + if confidence > 0.8: + break + + # Check if we found a good match through SIFT (world_pos was set) or template matching + if 'world_pos' not in locals(): + # SIFT didn't work for any candidate, use best template match + if best_match and best_confidence > 0.5: + next_y = best_match['candidate']['world_pos'][1] + best_match['y_offset'] + next_x = best_match['candidate']['world_pos'][0] + best_match['x_offset'] + print(f" Using best template match: image {best_match['candidate']['index']}, confidence: {best_confidence:.3f}") + print(f" Offsets: X={best_match['x_offset']:.1f}, Y={best_match['y_offset']:.1f} pixels") + world_pos = [next_x, next_y] + else: + # All methods failed, use estimated offset + closest_img = candidates[0] + y_offset = -(self.avg_image_height * (1 - self.overlap_ratio)) + x_offset = 0 + print(f" All matching attempts failed (best confidence: {best_confidence:.3f}), using estimated offset") + print(f" Estimated offsets: X={x_offset:.1f}, Y={y_offset:.1f} pixels") + next_y = closest_img['world_pos'][1] + y_offset + next_x = closest_img['world_pos'][0] + world_pos = [next_x, next_y] + else: + # No previous row images, use estimated offset from last image + prev_pos = self.images[-1]['world_pos'] + y_offset = -(self.avg_image_height * (1 - self.overlap_ratio)) + x_offset = 0 # No horizontal offset for new row + print(f" No previous row images found, using estimated offset") + print(f" Estimated offsets: X={x_offset:.1f}, Y={y_offset:.1f} pixels") + world_pos = [prev_pos[0], prev_pos[1] + y_offset] + + else: + # Continue in same row - stitch horizontally with previous image + prev_image_data = self.images[-1] + prev_image = prev_image_data['image'] + prev_pos = prev_image_data['world_pos'] + + # Use stitcher to find homography/transformation + stitcher = cv2.Stitcher.create(cv2.Stitcher_SCANS) + + # Stitch the two images + if self.direction == -1: + # Moving left: prev_image on right, new_image on left + status, stitched = stitcher.stitch([new_image, prev_image]) + else: + # Moving right: prev_image on left, new_image on right + status, stitched = stitcher.stitch([prev_image, new_image]) + + if status == cv2.Stitcher_OK: + # Calculate offset based on stitched result + stitched_h, stitched_w = stitched.shape[:2] + prev_h, prev_w = prev_image.shape[:2] + new_h, new_w = new_image.shape[:2] + + if self.direction == -1: + # Moving left: new image adds width to the left + x_offset = -(stitched_w - prev_w) + next_x = prev_pos[0] + x_offset + next_y = prev_pos[1] + print(f" Horizontal stitch (SIFT) successful! X offset: {x_offset:.1f} pixels (moving left)") + else: + # Moving right: new image adds width to the right + x_offset = stitched_w - prev_w + next_x = prev_pos[0] + x_offset + next_y = prev_pos[1] + print(f" Horizontal stitch (SIFT) successful! X offset: {x_offset:.1f} pixels (moving right)") + + world_pos = [next_x, next_y] + else: + # SIFT failed, try template matching + print(f" Horizontal stitch (SIFT) failed, trying template matching...") + x_offset, confidence = self.find_horizontal_offset_template_matching( + new_image, prev_image, self.direction + ) + + if x_offset is not None and confidence > 0.5: + next_x = prev_pos[0] + x_offset + next_y = prev_pos[1] + print(f" Template matching successful! X offset: {x_offset:.1f} pixels (confidence: {confidence:.3f})") + world_pos = [next_x, next_y] + else: + # Both methods failed, use estimated position + x_offset = self.avg_image_width * (1 - self.overlap_ratio) * self.direction + y_offset = 0 # No vertical offset in same row + print(f" Template matching also failed, using estimated position") + print(f" Estimated offsets: X={x_offset:.1f}, Y={y_offset:.1f} pixels") + next_x = prev_pos[0] + x_offset + next_y = prev_pos[1] + world_pos = [next_x, next_y] + + # Update column counter + if self.direction == -1: + self.current_col -= 1 + else: + self.current_col += 1 + + print(f" Placing at world position: ({world_pos[0]:.1f}, {world_pos[1]:.1f}), row: {self.current_row}, col: {self.current_col}") + + # Add image to worldspace + self.images.append({ + 'image': new_image, + 'world_pos': world_pos, + 'index': index, + 'row': self.current_row, + 'col': self.current_col + }) + + return True + + def run(self, image_files, delay=1.0): + """Run the progressive stitching visualization.""" + total_images = len(image_files) + running = True + + try: + for idx, img_file in enumerate(image_files, 1): + # Check for quit events + for event in pygame.event.get(): + if event.type == pygame.QUIT: + running = False + break + elif event.type == pygame.KEYDOWN: + if event.key == pygame.K_ESCAPE: + running = False + break + + if not running: + break + + # Show loading message while keeping previous view + filename = os.path.basename(img_file) + print(f"\nLoading image {idx}/{total_images}: {filename}") + + if self.images: + # Render current worldspace with loading indicator + self.render_worldspace() + self.draw_status_overlay( + f"Panorama Progress: {len(self.images)}/{total_images} images", + len(self.images), + total_images, + loading_file=filename + ) + pygame.display.flip() + + # Simulate capture delay + start_time = time.time() + while time.time() - start_time < delay: + # Check for quit events during delay + for event in pygame.event.get(): + if event.type == pygame.QUIT: + running = False + break + elif event.type == pygame.KEYDOWN: + if event.key == pygame.K_ESCAPE: + running = False + break + + if not running: + break + + time.sleep(0.01) + + if not running: + break + + # Load image + img = cv2.imread(img_file) + + if img is None: + print(f"Warning: Could not load {img_file}, skipping...") + continue + + # Add image to worldspace + print(f"Adding image {idx} to worldspace...") + self.add_image(img, idx) + + # Auto-frame to show all images + self.auto_frame_images() + + # Render updated worldspace + self.render_worldspace() + self.draw_status_overlay( + f"Panorama Progress: {len(self.images)}/{total_images} images", + len(self.images), + total_images + ) + pygame.display.flip() + + # Small pause to show the result + time.sleep(0.3) + + # Final display + if running and self.images: + print("\nAll images loaded!") + self.render_worldspace() + self.draw_status_overlay( + "All Images Loaded! (Press ESC or close window to exit)", + total_images, + total_images + ) + pygame.display.flip() + + # Wait for user to close + waiting = True + while waiting: + for event in pygame.event.get(): + if event.type == pygame.QUIT: + waiting = False + elif event.type == pygame.KEYDOWN: + if event.key == pygame.K_ESCAPE: + waiting = False + self.clock.tick(30) + + finally: + pygame.quit() + + +def main(): + parser = argparse.ArgumentParser( + description='Progressive image stitching viewer with real-time display', + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" +Examples: + python inc_stitch_improved.py /path/to/images --rows 9 9 9 10 8 8 + python inc_stitch_improved.py /path/to/images --delay 2.0 --rows 9 9 9 10 8 8 + python inc_stitch_improved.py /path/to/images --width 1600 --height 900 --rows 9 9 9 10 8 8 + """ + ) + + parser.add_argument('input_folder', type=str, + help='Folder containing JPEG images to stitch') + parser.add_argument('--delay', '-d', type=float, default=1.0, + help='Delay between loading images in seconds (default: 1.0)') + parser.add_argument('--width', '-w', type=int, default=1200, + help='Display width in pixels (default: 1200)') + parser.add_argument('--height', '-ht', type=int, default=800, + help='Display height in pixels (default: 800)') + parser.add_argument('--rows', '-r', type=int, nargs='+', default=None, + help='Number of images per row in snake pattern (e.g., 9 9 9 10 8 8)') + + args = parser.parse_args() + + # Validate input folder + if not os.path.isdir(args.input_folder): + print(f"Error: {args.input_folder} is not a valid directory") + return 1 + + try: + # Create stitcher + stitcher = ProgressiveStitcher(args.width, args.height, row_counts=args.rows) + + # Load image file paths + image_files = stitcher.load_images_from_folder(args.input_folder) + + if len(image_files) < 1: + print("Error: Need at least 1 image") + return 1 + + # Validate row counts if provided + if args.rows: + total_expected = sum(args.rows) + if total_expected != len(image_files): + print(f"Warning: Row counts sum to {total_expected} but found {len(image_files)} images") + print("Proceeding anyway...") + + # Run progressive stitching + stitcher.run(image_files, delay=args.delay) + + return 0 + + except Exception as e: + print(f"Error: {e}") + import traceback + traceback.print_exc() + return 1 + + +if __name__ == "__main__": + exit(main()) \ No newline at end of file diff --git a/misc/generic_image_stitch/siftstitch.py b/misc/generic_image_stitch/siftstitch.py new file mode 100644 index 0000000..ea5d787 --- /dev/null +++ b/misc/generic_image_stitch/siftstitch.py @@ -0,0 +1,161 @@ +#!/usr/bin/env python3 +""" +Image Stitching Script +Stitches together overlapping JPEG images from a folder into a single panorama. +""" + +import cv2 +import numpy as np +import os +import argparse +from pathlib import Path + + +def load_images_from_folder(folder_path): + """Load all JPEG images from the specified folder.""" + image_files = [] + valid_extensions = {'.jpg', '.jpeg', '.JPG', '.JPEG'} + + # Get all image files and sort them + for file in sorted(os.listdir(folder_path)): + if any(file.endswith(ext) for ext in valid_extensions): + image_files.append(os.path.join(folder_path, file)) + + if not image_files: + raise ValueError(f"No JPEG images found in {folder_path}") + + print(f"Found {len(image_files)} images:") + for img_file in image_files: + print(f" - {os.path.basename(img_file)}") + + # Load images + images = [] + for img_file in image_files: + img = cv2.imread(img_file) + if img is None: + print(f"Warning: Could not load {img_file}, skipping...") + continue + images.append(img) + + return images + + +def stitch_images(images): + """ + Stitch images together using OpenCV's Stitcher. + + Args: + images: List of images to stitch + + Returns: + Stitched image or None if stitching failed + """ + print(f"\nStitching {len(images)} images...") + + # Create stitcher object + stitcher = cv2.Stitcher.create(cv2.Stitcher_SCANS) + + # Perform stitching + status, stitched = stitcher.stitch(images) + + # Check stitching status + if status == cv2.Stitcher_OK: + print("Stitching successful!") + return stitched + else: + error_messages = { + cv2.Stitcher_ERR_NEED_MORE_IMGS: "Need more images", + cv2.Stitcher_ERR_HOMOGRAPHY_EST_FAIL: "Homography estimation failed", + cv2.Stitcher_ERR_CAMERA_PARAMS_ADJUST_FAIL: "Camera parameters adjustment failed" + } + error_msg = error_messages.get(status, f"Unknown error (code: {status})") + print(f"Stitching failed: {error_msg}") + return None + + +def crop_black_borders(image): + """Remove black borders from stitched image.""" + # Convert to grayscale + gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) + + # Threshold to find non-black regions + _, thresh = cv2.threshold(gray, 1, 255, cv2.THRESH_BINARY) + + # Find contours + contours, _ = cv2.findContours(thresh, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) + + if contours: + # Get bounding box of largest contour + largest_contour = max(contours, key=cv2.contourArea) + x, y, w, h = cv2.boundingRect(largest_contour) + + # Crop image + cropped = image[y:y+h, x:x+w] + return cropped + + return image + + +def main(): + parser = argparse.ArgumentParser( + description='Stitch overlapping JPEG images into a panorama', + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" +Examples: + python image_stitcher.py /path/to/images + python image_stitcher.py /path/to/images --output my_panorama.jpg + python image_stitcher.py /path/to/images --no-crop + """ + ) + + parser.add_argument('input_folder', type=str, + help='Folder containing JPEG images to stitch') + parser.add_argument('--output', '-o', type=str, default='stitched_panorama.jpg', + help='Output filename (default: stitched_panorama.jpg)') + parser.add_argument('--no-crop', action='store_true', + help='Skip automatic cropping of black borders') + + args = parser.parse_args() + + # Validate input folder + if not os.path.isdir(args.input_folder): + print(f"Error: {args.input_folder} is not a valid directory") + return 1 + + try: + # Load images + images = load_images_from_folder(args.input_folder) + + if len(images) < 2: + print("Error: Need at least 2 images to stitch") + return 1 + + # Stitch images + result = stitch_images(images) + + if result is None: + print("\nStitching failed. Tips:") + print(" - Ensure images have sufficient overlap (30-50%)") + print(" - Images should be taken from the same position") + print(" - Ensure images are in the correct order") + return 1 + + # Crop black borders unless disabled + if not args.no_crop: + print("Cropping black borders...") + result = crop_black_borders(result) + + # Save result + cv2.imwrite(args.output, result) + print(f"\nPanorama saved to: {args.output}") + print(f"Output size: {result.shape[1]}x{result.shape[0]} pixels") + + return 0 + + except Exception as e: + print(f"Error: {e}") + return 1 + + +if __name__ == "__main__": + exit(main()) \ No newline at end of file diff --git a/misc/siftstitch.py b/misc/siftstitch.py deleted file mode 100644 index 4c3f217..0000000 --- a/misc/siftstitch.py +++ /dev/null @@ -1,168 +0,0 @@ -#!/usr/bin/env python3 -""" -stitch_sift.py - -Usage: - python stitch_sift.py /path/to/images_folder -o output.jpg - -Dependencies: - pip install opencv-contrib-python numpy -""" - -import os -import argparse -import cv2 -import numpy as np - - -def load_images_from_folder(folder): - exts = (".jpg", ".jpeg", ".png", ".tif", ".tiff", ".bmp") - files = [os.path.join(folder, f) for f in sorted(os.listdir(folder)) - if f.lower().endswith(exts)] - imgs = [cv2.imread(f) for f in files] - files = [f for f, im in zip(files, imgs) if im is not None] - imgs = [im for im in imgs if im is not None] - return files, imgs - - -def detect_and_compute_sift(img, sift): - gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) - kps, des = sift.detectAndCompute(gray, None) - return kps, des - - -def match_descriptors(des1, des2): - # BFMatcher with default params; use kNN and ratio test - bf = cv2.BFMatcher(cv2.NORM_L2) - knn = bf.knnMatch(des1, des2, k=2) - good = [] - for m_n in knn: - if len(m_n) != 2: - continue - m, n = m_n - if m.distance < 0.75 * n.distance: - good.append(m) - return good - - -def find_homography_from_matches(kp1, kp2, matches, min_matches=8): - if len(matches) < min_matches: - return None - pts1 = np.float32([kp1[m.queryIdx].pt for m in matches]) - pts2 = np.float32([kp2[m.trainIdx].pt for m in matches]) - H, mask = cv2.findHomography(pts2, pts1, cv2.RANSAC, 5.0) # maps pts2 -> pts1 - return H, mask - - -def compose_global_homographies(images): - sift = cv2.SIFT_create() - kps = [] - dess = [] - for im in images: - kp, des = detect_and_compute_sift(im, sift) - kps.append(kp) - dess.append(des) - - # global_h[0] = identity (map image 0 into base coord) - global_h = [np.eye(3)] - for i in range(1, len(images)): - des_prev = dess[i - 1] - des_cur = dess[i] - kp_prev = kps[i - 1] - kp_cur = kps[i] - if des_prev is None or des_cur is None or len(kp_prev) < 4 or len(kp_cur) < 4: - print(f"WARNING: not enough features between images {i-1} and {i}; using identity.") - H_pair = np.eye(3) - else: - matches = match_descriptors(des_prev, des_cur) - pair = find_homography_from_matches(kp_prev, kp_cur, matches) - if pair is None or pair[0] is None: - print(f"WARNING: homography failed between images {i-1} and {i}; using identity.") - H_pair = np.eye(3) - else: - H_pair = pair[0] - # compose: H maps points in image_i to image_{i-1} - # global for i = global_{i-1} @ H_pair - H_global = global_h[i - 1] @ H_pair - # normalize - H_global = H_global / H_global[2, 2] - global_h.append(H_global) - return global_h - - -def warp_and_blend(images, homographies): - # compute canvas extents by transforming corners - corners = [] - for im, H in zip(images, homographies): - h, w = im.shape[:2] - pts = np.array([[0, 0], [w, 0], [w, h], [0, h]], dtype=np.float32) - pts_h = cv2.perspectiveTransform(pts.reshape(-1, 1, 2), H).reshape(-1, 2) - corners.append(pts_h) - all_pts = np.vstack(corners) - x_min, y_min = np.floor(all_pts.min(axis=0)).astype(int) - x_max, y_max = np.ceil(all_pts.max(axis=0)).astype(int) - - # translation to shift everything into positive coordinates - tx = -x_min if x_min < 0 else 0 - ty = -y_min if y_min < 0 else 0 - canvas_w = x_max - x_min - canvas_h = y_max - y_min - print(f"Canvas size: {canvas_w} x {canvas_h}") - - accumulator = np.zeros((canvas_h, canvas_w, 3), dtype=np.float32) - weight = np.zeros((canvas_h, canvas_w), dtype=np.float32) - - for idx, (im, H) in enumerate(zip(images, homographies)): - Ht = H.copy() - # add translation - T = np.array([[1, 0, tx], - [0, 1, ty], - [0, 0, 1]], dtype=np.float64) - Ht = T @ Ht - warped = cv2.warpPerspective(im, Ht, (canvas_w, canvas_h)) - mask = cv2.warpPerspective(np.ones((im.shape[0], im.shape[1]), dtype=np.uint8), Ht, (canvas_w, canvas_h)) - mask_f = mask.astype(np.float32) - - # accumulate weighted sum (simple averaging where images overlap) - accumulator += warped.astype(np.float32) * mask_f[:, :, None] - weight += mask_f - - # avoid divide by zero - nonzero = weight > 0 - result = np.zeros_like(accumulator, dtype=np.uint8) - result[nonzero] = (accumulator[nonzero] / weight[nonzero, None]).astype(np.uint8) - - # crop to content bbox - ys, xs = np.where(weight > 0) - if len(xs) == 0 or len(ys) == 0: - return result - x0, x1 = xs.min(), xs.max() - y0, y1 = ys.min(), ys.max() - cropped = result[y0:y1 + 1, x0:x1 + 1] - return cropped - - -def main(): - parser = argparse.ArgumentParser(description="Stitch images in a folder using SIFT.") - parser.add_argument("folder", help="Folder containing images (overlapping)") - parser.add_argument("-o", "--output", default="panorama.jpg", help="Output filename") - args = parser.parse_args() - - files, images = load_images_from_folder(args.folder) - if len(images) == 0: - print("No images found in folder.") - return - if len(images) == 1: - cv2.imwrite(args.output, images[0]) - print("Single image - saved as output.") - return - - print(f"Loaded {len(images)} images.") - homographies = compose_global_homographies(images) - pano = warp_and_blend(images, homographies) - cv2.imwrite(args.output, pano) - print(f"Panorama saved to {args.output}") - - -if __name__ == "__main__": - main() From 38cd921bf283c89bf34f53b3ff1e85779eface4e Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Mon, 19 Jan 2026 23:49:59 -0900 Subject: [PATCH 09/46] removed old stuff --- UI/camera_view.py | 142 ------------- UI/flex_frame.py | 119 ----------- UI/frame.py | 421 ------------------------------------- UI/input/button.py | 186 ----------------- UI/input/button_icon.py | 106 ---------- UI/input/radio.py | 185 ----------------- UI/input/scroll_frame.py | 306 --------------------------- UI/input/slider.py | 244 ---------------------- UI/input/text_field.py | 425 -------------------------------------- UI/input/toggle_button.py | 78 ------- UI/list_frame.py | 186 ----------------- UI/modal.py | 249 ---------------------- UI/section_frame.py | 255 ----------------------- UI/styles.py | 59 ------ UI/text.py | 285 ------------------------- UI/tooltip.py | 180 ---------------- 16 files changed, 3426 deletions(-) delete mode 100644 UI/camera_view.py delete mode 100644 UI/flex_frame.py delete mode 100644 UI/frame.py delete mode 100644 UI/input/button.py delete mode 100644 UI/input/button_icon.py delete mode 100644 UI/input/radio.py delete mode 100644 UI/input/scroll_frame.py delete mode 100644 UI/input/slider.py delete mode 100644 UI/input/text_field.py delete mode 100644 UI/input/toggle_button.py delete mode 100644 UI/list_frame.py delete mode 100644 UI/modal.py delete mode 100644 UI/section_frame.py delete mode 100644 UI/styles.py delete mode 100644 UI/text.py delete mode 100644 UI/tooltip.py diff --git a/UI/camera_view.py b/UI/camera_view.py deleted file mode 100644 index 91863b1..0000000 --- a/UI/camera_view.py +++ /dev/null @@ -1,142 +0,0 @@ -from typing import Optional - -import pygame -import numpy as np - -from UI.frame import Frame -from UI.text import Text, TextStyle - - -class CameraView(Frame): - """ - A Frame that renders a camera feed inside itself, respecting pixel margins. - - Maintains aspect ratio with letterboxing - - Reacts to parent resize automatically - - Can be put behind everything via z_index - """ - def __init__( - self, - camera, - parent: Optional[Frame] = None, - *, - x: float = 0, - y: float = 0, - width: float = 1.0, - height: float = 1.0, - x_is_percent: bool = True, - y_is_percent: bool = True, - width_is_percent: bool = True, - height_is_percent: bool = True, - z_index: int = -100, # keep it behind panels/modals - x_align: str = "left", - y_align: str = "top", - background_color: Optional[pygame.Color] = None, - mouse_passthrough: bool = True, - left_margin_px: int = 0, - right_margin_px: int = 0, - top_margin_px: int = 0, - bottom_margin_px: int = 0, - ): - self.camera = camera - self.mouse_passthrough = mouse_passthrough - self.background_color = background_color - - # margins (in pixels) to reserve for other UI - self.left_margin_px = left_margin_px - self.right_margin_px = right_margin_px - self.top_margin_px = top_margin_px - self.bottom_margin_px = bottom_margin_px - - # track last applied size to avoid redundant resizes - self._last_draw_w = None - self._last_draw_h = None - self._last_frame_rect = None # (dx, dy, fw, fh) - - super().__init__( - parent=parent, - x=x, y=y, width=width, height=height, - x_is_percent=x_is_percent, y_is_percent=y_is_percent, - width_is_percent=width_is_percent, height_is_percent=height_is_percent, - z_index=z_index, x_align=x_align, y_align=y_align, - background_color=None, # we fill manually to keep margins clean - ) - - self.no_camera_text = Text( - text="No Camera Detected", - parent=self, - x=0.5, y=0.5, - x_is_percent=True, y_is_percent=True, - x_align="center", y_align="center", - style=TextStyle(font_size=32), - ) - - # Show/hide based on current init state - if self.camera.initialized: - self.no_camera_text.add_hidden_reason("SYSTEM") - - def _compute_inner_rect(self): - # Base geometry from normal frame rules - abs_x, abs_y, abs_w, abs_h = super().get_absolute_geometry() - # Apply pixel margins to shrink usable area - ix = abs_x + self.left_margin_px - iy = abs_y + self.top_margin_px - iw = max(0, abs_w - self.left_margin_px - self.right_margin_px) - ih = max(0, abs_h - self.top_margin_px - self.bottom_margin_px) - return ix, iy, iw, ih - - def get_frame_rect(self): - """ - Returns (x, y, w, h) of the currently drawn camera frame within the surface, - accounting for letterboxing. May be None if nothing drawn yet. - """ - return self._last_frame_rect - - def get_absolute_geometry(self): - # Expose the *inner* rect as the geometry of this view - return self._compute_inner_rect() - - def draw(self, surface: pygame.Surface) -> None: - if self.is_effectively_hidden: - return - - ix, iy, iw, ih = self._compute_inner_rect() - if self.background_color: - pygame.draw.rect(surface, self.background_color, (ix, iy, iw, ih)) - - # --- fetch NumPy frame (prefer still, fallback to last live stream) --- - arr = self.camera.get_last_frame(prefer="latest", wait_for_still=False) - - if arr is None or iw <= 0 or ih <= 0: - self._last_frame_rect = (ix, iy, 0, 0) - # (Optionally draw a subtle "no signal" background here) - for child in reversed(self.children): - child.draw(surface) - return - - # Ensure contiguous uint8 RGB - if arr.dtype != np.uint8: - arr = np.clip(arr, 0, 255).astype(np.uint8) - if arr.ndim == 2: - arr = np.stack([arr]*3, axis=-1) - h, w, c = arr.shape - assert c in (3, 4) - - # --- fit to (iw, ih) with letterboxing --- - scale = min(iw / w, ih / h) - tw, th = max(1, int(round(w * scale))), max(1, int(round(h * scale))) - - # Convert NumPy → Surface then scale - # Use frombuffer on a contiguous copy to avoid strides issues - buf = arr[:, :, :3].copy(order="C").tobytes() # RGB only for pygame - frame_surf = pygame.image.frombuffer(buf, (w, h), "RGB") - if (tw, th) != (w, h): - frame_surf = pygame.transform.smoothscale(frame_surf, (tw, th)) - - dx = ix + (iw - tw) // 2 - dy = iy + (ih - th) // 2 - self._last_frame_rect = (dx, dy, tw, th) - surface.blit(frame_surf, (dx, dy)) - - # Draw overlays/children - for child in reversed(self.children): - child.draw(surface) diff --git a/UI/flex_frame.py b/UI/flex_frame.py deleted file mode 100644 index f7f1bc5..0000000 --- a/UI/flex_frame.py +++ /dev/null @@ -1,119 +0,0 @@ -import pygame -from typing import Tuple -from UI.frame import Frame - -class FlexFrame(Frame): - """ - Simple flex-like layout container. - - Direction: column only (for now). - - Packs visible children from top to bottom. - - Uses each child's *current* height (so collapsed Sections shrink naturally). - - Optionally fills child widths to container width. - """ - - def __init__( - self, - *, - parent=None, - x=0, y=0, width=100, height=100, - x_is_percent=False, y_is_percent=False, - width_is_percent=False, height_is_percent=False, - z_index=0, - background_color: pygame.Color | None = None, - - # Flex options - padding: Tuple[int, int, int, int] = (0, 0, 0, 0), # (left, top, right, bottom) - gap: int = 8, - fill_child_width: bool = True, - align_horizontal: str = "left", # "left" | "center" | "right" - auto_height_to_content: bool = False, # If True, grow/shrink this frame's height to fit children - **kwargs - ): - super().__init__( - parent=parent, x=x, y=y, width=width, height=height, - x_is_percent=x_is_percent, y_is_percent=y_is_percent, - width_is_percent=width_is_percent, height_is_percent=height_is_percent, - z_index=z_index, background_color=background_color, - padding=padding, - **kwargs - ) - self.gap = gap - self.fill_child_width = fill_child_width - self.align_horizontal = align_horizontal - self.auto_height_to_content = auto_height_to_content - - self._layout_dirty = True - - # --- Public: you can call this if you change padding/gap/etc dynamically - def request_layout(self) -> None: - self._layout_dirty = True - - # --- Core layout: compute child positions/sizes in absolute container space - def _layout(self) -> None: - - y_cursor = 0 # content-local pixels (0 is top of content box) - visible_children = [ch for ch in self.children if not ch.is_effectively_hidden] - - for i, child in enumerate(visible_children): - # --- Fill width if requested: since base Frame uses the content box, - # percent widths are now relative to content width. So 1.0 == fill. - if self.fill_child_width: - child.width_is_percent = True - child.width = 1.0 - # else: leave child's width props as-is - - # Horizontal alignment inside content box - if self.align_horizontal == "left": - child.x_align = "left" - elif self.align_horizontal == "center": - child.x_align = "center" - elif self.align_horizontal == "right": - child.x_align = "right" - else: - child.x_align = "left" - - # Position vertically (content-local); base Frame will offset by content origin - child.y_is_percent = False - child.y = y_cursor - - # For left/center/right we want x as an offset from alignment anchor - child.x_is_percent = False - child.x = 0 # use alignment only - - # After setting width/position hints, read child's computed height - # (this uses current width/height props relative to content box) - _, _, ch_w, ch_h = child.get_absolute_geometry() - - # Advance cursor - y_cursor += ch_h - if i != len(visible_children) - 1: - y_cursor += self.gap - - # Auto-size this FlexFrame’s OUTER height to fit children + padding - if self.auto_height_to_content and not self.height_is_percent: - pt, pr, pb, pl = self.padding # base Frame’s padding (top,right,bottom,left) - self.height = max(0, y_cursor + pt + pb) - - self._layout_dirty = False - - # Ensure layout is up-to-date in all the usual passes - def draw(self, surface: pygame.Surface) -> None: - self._layout() - super().draw(surface) - - def process_mouse_move(self, px, py): - self._layout() - super().process_mouse_move(px, py) - - def process_mouse_press(self, px, py, button): - self._layout() - super().process_mouse_press(px, py, button) - - def process_mouse_release(self, px, py, button): - self._layout() - super().process_mouse_release(px, py, button) - - def process_mouse_wheel(self, px: int, py: int, *, dx: int, dy: int) -> bool: - self._layout() - return super().process_mouse_wheel(px, py, dx=dx, dy=dy) diff --git a/UI/frame.py b/UI/frame.py deleted file mode 100644 index 527340f..0000000 --- a/UI/frame.py +++ /dev/null @@ -1,421 +0,0 @@ -import pygame -from typing import Callable, Optional, Tuple, Type, TypeVar, Iterator, Optional, List, Any - -T = TypeVar("T") - -def default_frame_background() -> Optional[pygame.Color]: - return None - -class Frame(): - def __init__( - self, parent=None, x=0, y=0, width=100, height=100, - x_is_percent=False, y_is_percent=False, - width_is_percent=False, height_is_percent=False, - z_index=0, x_align: str = 'left', y_align: str = 'top', - background_color: Optional[pygame.Color] = None, - fill_remaining_height: bool = False, - padding: Tuple[int, int, int, int] = (0, 0, 0, 0), - ): - self.parent = parent - self.children = [] - - self.x = x - self.y = y - self.width = width - self.height = height - - self.background_color = background_color - - self.x_is_percent = x_is_percent - self.y_is_percent = y_is_percent - self.width_is_percent = width_is_percent - self.height_is_percent = height_is_percent - self.fill_remaining_height = fill_remaining_height - self.padding = padding - - self.z_index = z_index - self.x_align = x_align - self.y_align = y_align - - self.is_hovered = False - self.is_pressed = False - self.mouse_passthrough = False - self.hidden_reasons: set[str] = set() - - # Automatically add parent if its passed as an argument - if parent is not None: - parent.add_child(self) - - @property - def debug_outline_color(self) -> pygame.Color: - return pygame.Color(255, 0, 0) # Default: red - - @property - def position(self) -> Tuple[int, int]: - return (self.x, self.y) - - def update_position(self, x_offset: int, y_offset: int) -> None: - """Move this frame and all children by the given pixel offset, regardless of percent/absolute mode.""" - self._apply_offset(x_offset, y_offset) - - for child in self.children: - _, _, parent_width, parent_height = self.get_content_geometry() - percent_dx = x_offset / parent_width if parent_width else 0 - percent_dy = y_offset / parent_height if parent_height else 0 - child._apply_offset(x_offset, y_offset, percent_dx, percent_dy) - - def _apply_offset(self, dx: float, dy: float, dx_percent: float = 0.0, dy_percent: float = 0.0) -> None: - """Shift this frame by the given amounts, using percent or absolute logic as needed.""" - if self.x_is_percent: - self.x += dx_percent - else: - self.x += dx - - if self.y_is_percent: - self.y += dy_percent - else: - self.y += dy - - def update_size(self, width_offset: int, height_offset: int) -> None: - """Resize this frame and all children by pixel delta, adjusting percent-based children proportionally.""" - self._apply_size_change(width_offset, height_offset) - - for child in self.children: - _, _, parent_width, parent_height = self.get_content_geometry() - percent_dw = width_offset / parent_width if parent_width else 0 - percent_dh = height_offset / parent_height if parent_height else 0 - child._apply_size_change(width_offset, height_offset, percent_dw, percent_dh) - - def _apply_size_change(self, dw: float, dh: float, dw_percent: float = 0.0, dh_percent: float = 0.0) -> None: - """Apply size delta to this frame, supporting both pixel and percent-based width/height.""" - if self.width_is_percent: - self.width += dw_percent - # else: don't modify absolute width - - # If we're filling the remaining height, ignore direct height resizes - if not self.fill_remaining_height: - if self.height_is_percent: - self.height += dh_percent - # else: don't modify absolute height - - def iter_descendants(self) -> Iterator["Frame"]: - """Depth-first traversal over all descendants (not including self).""" - stack: List["Frame"] = list(getattr(self, "children", [])) - while stack: - node = stack.pop() - yield node - stack.extend(getattr(node, "children", [])) - - def find_child_of_type(self, cls: Type[T], *, include_self: bool=False) -> Optional[T]: - """Return the first descendant (optionally self) that is an instance of `cls`.""" - if include_self and isinstance(self, cls): # type: ignore[arg-type] - return self # type: ignore[return-value] - for node in self.iter_descendants(): - if isinstance(node, cls): - return node # type: ignore[return-value] - return None - - def find_children_of_type(self, cls: Type[T], *, include_self: bool=False) -> List[T]: - """Return all descendants (optionally self) that are instances of `cls`.""" - out: List[T] = [] - if include_self and isinstance(self, cls): # type: ignore[arg-type] - out.append(self) # type: ignore[arg-type] - for node in self.iter_descendants(): - if isinstance(node, cls): - out.append(node) # type: ignore[arg-type] - return out - - def find_first(self, predicate: Callable[[Any], bool], *, include_self: bool=False) -> Optional[Any]: - """Generic: return first node for which predicate(node) is True.""" - if include_self and predicate(self): - return self - for node in self.iter_descendants(): - if predicate(node): - return node - return None - - @property - def absolute_position(self) -> Tuple[int, int]: - abs_width = self.width * parent_width if self.width_is_percent else self.width - abs_height = self.height * parent_height if self.height_is_percent else self.height - return (abs_width, abs_height) - - @property - def size(self) -> Tuple[int, int]: - return (self.width, self.height) - - def get_absolute_geometry(self): - """Returns absolute screen coordinates""" - if self.parent: - parent_x, parent_y, parent_width, parent_height = self.parent.get_content_geometry() - else: - parent_x, parent_y = 0, 0 - parent_width, parent_height = pygame.display.get_surface().get_size() - - # Raw (pre-alignment) values - raw_x = self.x * parent_width if self.x_is_percent else self.x - raw_y = self.y * parent_height if self.y_is_percent else self.y - - abs_width = self.width * parent_width if self.width_is_percent else self.width - - if self.fill_remaining_height: - # Pin to top and stretch to parent's bottom - abs_y = parent_y + raw_y - abs_height = max(0, (parent_y + parent_height) - abs_y) - else: - # Normal vertical alignment path (uses declared height) - abs_height = self.height * parent_height if self.height_is_percent else self.height - - if self.y_align == 'top': - abs_y = parent_y + raw_y - elif self.y_align == 'center': - if self.y_is_percent: - abs_y = parent_y + raw_y - (abs_height // 2) - else: - abs_y = parent_y + (parent_height // 2) + raw_y - (abs_height // 2) - elif self.y_align == 'bottom': - abs_y = parent_y + parent_height - raw_y - abs_height - else: - raise ValueError(f"Invalid y_align: {self.y_align}") - - # Horizontal alignment (unchanged) - if self.x_align == 'left': - abs_x = parent_x + raw_x - elif self.x_align == 'center': - if self.x_is_percent: - abs_x = parent_x + raw_x - (abs_width // 2) - else: - abs_x = parent_x + (parent_width // 2) + raw_x - (abs_width // 2) - elif self.x_align == 'right': - abs_x = parent_x + parent_width - raw_x - abs_width - else: - raise ValueError(f"Invalid x_align: {self.x_align}") - - return abs_x, abs_y, abs_width, abs_height - - def get_content_geometry(self) -> Tuple[int, int, int, int]: - """Inner (padded) rectangle children should layout inside.""" - abs_x, abs_y, abs_w, abs_h = self.get_absolute_geometry() - pad_top, pad_right, pad_bottom, pad_left = self.padding - inner_x = abs_x + pad_left - inner_y = abs_y + pad_top - inner_w = max(0, abs_w - pad_left - pad_right) - inner_h = max(0, abs_h - pad_top - pad_bottom) - return inner_x, inner_y, inner_w, inner_h - - def add_child(self, child): - child.parent = self - self.children.append(child) - self.children.sort(key=lambda c: c.z_index, reverse=True) # front-to-back order - - def for_each_descendant(self, fn): - stack = list(self.children) - while stack: - node = stack.pop() - fn(node) - stack.extend(node.children) - - def contains_point(self, px, py): - if self.is_effectively_hidden: - return False - abs_x, abs_y, abs_width, abs_height = self.get_absolute_geometry() - return abs_x <= px <= abs_x + abs_width and abs_y <= py <= abs_y + abs_height - - def handle_click(self, px, py): - if self.is_effectively_hidden: - return False - # First check children, then self - for child in (self.children): - if child.mouse_passthrough: - continue - if child.contains_point(px, py): - child.handle_click(px, py) - return # Only send to first child that contains it (or remove this if you want overlapping elements to handle too) - - self.on_click() - - def handle_hover(self, px, py): - if self.is_effectively_hidden: - return - - for child in (self.children): - if child.mouse_passthrough: - continue - if child.contains_point(px, py): - child.handle_hover(px, py) - return - if self.contains_point(px, py): - self.on_hover() - - def _clear_hover_recursive(self): - if self.is_hovered: - self.is_hovered = False - self.on_hover_leave() - for ch in self.children: - ch._clear_hover_recursive() - - def process_mouse_move(self, px, py): - """Hover handling with z occlusion""" - if self.is_effectively_hidden: - return - - # First propagate to children front-to-back - top_hit = None - for child in (self.children): - if child.mouse_passthrough: - continue - if child.contains_point(px, py): - top_hit = child - break - - for child in self.children: - if child is top_hit: - child.process_mouse_move(px, py) - else: - child._clear_hover_recursive() - - # Now check self hover state - inside = self.contains_point(px, py) - if inside and not self.is_hovered: - self.is_hovered = True - self.on_hover_enter() - elif not inside and self.is_hovered: - self.is_hovered = False - self.on_hover_leave() - - def process_mouse_press(self, px, py, button): - if self.is_effectively_hidden: - return - - for child in self.children: - if child.mouse_passthrough: - continue - if child.contains_point(px, py): - child.process_mouse_press(px, py, button) - return - - if self.contains_point(px, py): - self.is_pressed = True - self.on_mouse_press(button) - - def process_mouse_release(self, px, py, button): - if self.is_effectively_hidden: - return - - for child in self.children: - if child.mouse_passthrough: - continue - if child.contains_point(px, py): - child.process_mouse_release(px, py, button) - return - - if self.is_pressed: - self.is_pressed = False - self.on_mouse_release(button) - if self.contains_point(px, py): - self.on_click(button) - - def process_mouse_wheel(self, px: int, py: int, *, dx: int, dy: int) -> bool: - # Route to topmost eligible child under the cursor first - for child in reversed(self.children): # assume later children are drawn on top - if child.contains_point(px, py): - if child.process_mouse_wheel(px, py, dx=dx, dy=dy): - return True - - # If no child handled it, let THIS frame handle it (if it wants) - return bool(self.on_wheel(dx, dy, px, py)) - - - def broadcast_mouse_wheel(self, px: int, py: int, *, dx: int = 0, dy: int = 0) -> None: - """Give every widget a chance to react to wheel (e.g., global zoom, tooltips).""" - if self.is_effectively_hidden: - return - for child in self.children: - child.broadcast_mouse_wheel(px, py, dx=dx, dy=dy) - self.on_wheel(dx, dy, px, py) - - # ---- visibility core ---- - def add_hidden_reason(self, reason: str): - if reason not in self.hidden_reasons: - self.hidden_reasons.add(reason) - - def remove_hidden_reason(self, reason: str): - self.hidden_reasons.discard(reason) - - @property - def is_effectively_hidden(self) -> bool: - return bool(self.hidden_reasons) - - def hide(self, recursive: bool = False): - self.add_hidden_reason("USER") - if recursive: - for ch in self.children: - ch.hide(True) - - def show(self, recursive: bool = False): - self.remove_hidden_reason("USER") - if recursive: - for ch in self.children: - ch.show(True) - - def draw(self, surface: pygame.Surface) -> None: - if self.is_effectively_hidden: - return - - abs_x, abs_y, abs_w, abs_h = self.get_absolute_geometry() - - if self.background_color: - pygame.draw.rect(surface, self.background_color, (abs_x, abs_y, abs_w, abs_h)) - - for child in reversed(self.children): - child.draw(surface) - - # --- Override these --- - def on_click(self, button=None): - pass - - def on_mouse_press(self, button): - pass - - def on_mouse_release(self, button): - pass - - def on_wheel(self, dx: int, dy: int, px: int, py: int) -> None: - """Override in widgets that want wheel input. (dx/dy match pygame.MOUSEWHEEL)""" - return False - - def on_hover(self): - pass - - def on_hover_enter(self): - pass - - def on_hover_leave(self): - pass - - def broadcast_mouse_press(self, px, py, button): - """Give every widget a chance to react to a global mouse press (e.g., focus/unfocus).""" - if self.is_effectively_hidden: - return - - for child in self.children: - child.broadcast_mouse_press(px, py, button) - self.on_global_mouse_press(px, py, button) - - def on_global_mouse_press(self, px, py, button): - """Override in widgets that need to react even if the click was outside them.""" - pass - - def broadcast_key_event(self, event): - """Bubble key events to all widgets; inactive widgets can ignore them.""" - if self.is_effectively_hidden: - return - - for child in self.children: - child.broadcast_key_event(event) - - self.on_key_event(event) - - def on_key_event(self, event): - """Override in widgets that want keyboard input.""" - pass diff --git a/UI/input/button.py b/UI/input/button.py deleted file mode 100644 index 371d3af..0000000 --- a/UI/input/button.py +++ /dev/null @@ -1,186 +0,0 @@ -import pygame -from typing import Callable, Optional, Tuple -from dataclasses import dataclass, field -from enum import Enum -from UI.text import Text, TextStyle -from UI.frame import Frame - -class ButtonShape(Enum): - RECTANGLE = "rectangle" - DIAMOND = "diamond" - -def default_background() -> pygame.Color: - return pygame.Color("#dbdbdb") - -def default_foreground() -> pygame.Color: # used for BORDER only - return pygame.Color("#b3b4b6") - -def default_hover_background() -> pygame.Color: - return pygame.Color("#b3b4b6") - -def default_disabled_background() -> pygame.Color: - return pygame.Color(128, 128, 128) - -def default_disabled_foreground() -> pygame.Color: - return pygame.Color(192, 192, 192) - -@dataclass -class ButtonColors: - # Backgrounds - background: pygame.Color = field(default_factory=default_background) - hover_background: pygame.Color = field(default_factory=default_hover_background) - disabled_background: pygame.Color = field(default_factory=default_disabled_background) - - # Borders ("foreground") - foreground: pygame.Color = field(default_factory=default_foreground) - hover_foreground: Optional[pygame.Color] = None - disabled_foreground: pygame.Color = field(default_factory=default_disabled_foreground) - - -class Button(Frame): - def __init__( - self, - function_to_call: Callable, - x: int, - y: int, - width: int, - height: int, - text: str = "", - colors: Optional[ButtonColors] = None, - text_style: Optional[TextStyle] = None, - args: Optional[Tuple] = None, - args_provider: Optional[Callable[[], Tuple]] = None, - shape: ButtonShape = ButtonShape.RECTANGLE, - **frame_kwargs - ): - super().__init__(x=x, y=y, width=width, height=height, **frame_kwargs) - - self.function_to_call = function_to_call - self.args = args or () - self.args_provider = args_provider - self.shape = shape - - self.is_hover = False - self.is_enabled = True - self.colors = colors or ButtonColors() - self.text_style = text_style or TextStyle(font_size=min(height - 4, 32)) - - # If hover border isn't given, keep border consistent with normal foreground. - if self.colors.hover_foreground is None: - self.colors.hover_foreground = self.colors.foreground - - # Create text child if provided - if text: - self.text = Text( - text, - x=0.5, y=0.5, - x_is_percent=True, - y_is_percent=True, - x_align="center", - y_align="center", - style=self.text_style - ) - # Inherit initial state - self.text.set_is_enabled(self.is_enabled) - self.text.set_is_hover(self.is_hover) - self.add_child(self.text) - else: - self.text = None - - @property - def debug_outline_color(self) -> pygame.Color: - return pygame.Color(0, 255, 0) - - def _point_in_diamond(self, x: int, y: int) -> bool: - abs_x, abs_y, abs_w, abs_h = self.get_absolute_geometry() - rel_x = x - abs_x - rel_y = y - abs_y - cx = abs_w / 2 - cy = abs_h / 2 - return (abs(rel_x - cx) / cx + abs(rel_y - cy) / cy) <= 1.0 - - def _get_diamond_points(self) -> list: - abs_x, abs_y, abs_w, abs_h = self.get_absolute_geometry() - cx = abs_x + abs_w / 2 - cy = abs_y + abs_h / 2 - return [ - (cx, abs_y), # top - (abs_x + abs_w, cy), # right - (cx, abs_y + abs_h), # bottom - (abs_x, cy) # left - ] - - def contains_point(self, x: int, y: int) -> bool: - if self.shape == ButtonShape.DIAMOND: - return self._point_in_diamond(x, y) - return super().contains_point(x, y) - - def on_click(self, button=None): - if not self.is_enabled: - return - args = self.args_provider() if self.args_provider else self.args - self.function_to_call(*args) - - def on_hover_enter(self): - self.is_hover = True - if self.text: - self.text.set_is_hover(True) - - def on_hover_leave(self): - self.is_hover = False - if self.text: - self.text.set_is_hover(False) - - def set_enabled(self, enabled: bool) -> None: - if self.is_enabled != enabled: - self.is_enabled = enabled - if self.text: - self.text.set_is_enabled(enabled) - - def set_text(self, text: str) -> None: - if self.text: - self.text.set_text(text) - elif text: - self.text = Text( - text, - x=0.5, y=0.5, - x_is_percent=True, - y_is_percent=True, - x_align="center", - y_align="center", - style=self.text_style, - ) - self.text.set_is_enabled(self.is_enabled) - self.text.set_is_hover(self.is_hover) - self.add_child(self.text) - - def set_shape(self, shape: ButtonShape) -> None: - self.shape = shape - - def _resolve_colors(self): - """Compute bg and border colors based on state (text handled by Text).""" - if not self.is_enabled: - return self.colors.disabled_background, self.colors.disabled_foreground - if self.is_hover: - return self.colors.hover_background, (self.colors.hover_foreground or self.colors.foreground) - return self.colors.background, self.colors.foreground - - def draw(self, surface: pygame.Surface) -> None: - if self.is_effectively_hidden: - return - - abs_x, abs_y, abs_w, abs_h = self.get_absolute_geometry() - bg_color, border_color = self._resolve_colors() - - # Draw geometry - if self.shape == ButtonShape.DIAMOND: - points = self._get_diamond_points() - pygame.draw.polygon(surface, bg_color, points) - pygame.draw.polygon(surface, border_color, points, 2) - else: - pygame.draw.rect(surface, bg_color, (abs_x, abs_y, abs_w, abs_h)) - pygame.draw.rect(surface, border_color, (abs_x, abs_y, abs_w, abs_h), 2) - - # Draw children - for child in self.children: - child.draw(surface) diff --git a/UI/input/button_icon.py b/UI/input/button_icon.py deleted file mode 100644 index e6c8900..0000000 --- a/UI/input/button_icon.py +++ /dev/null @@ -1,106 +0,0 @@ -import pygame -from UI.frame import Frame - - -def _load_surface(img_or_path: pygame.Surface | str) -> pygame.Surface: - """Load and convert a surface from either a path or an existing Surface.""" - if isinstance(img_or_path, pygame.Surface): - return img_or_path.convert_alpha() - return pygame.image.load(img_or_path).convert_alpha() - - -def _recolor_by_alpha_mask(src: pygame.Surface, - fill_rgba: tuple[int, int, int, int]) -> pygame.Surface: - """ - Recolor a monochrome/black symbol using ONLY its alpha as a mask. - All nontransparent pixels become `fill_rgba` with their original per-pixel alpha. - """ - out = pygame.Surface(src.get_size(), pygame.SRCALPHA) - out.fill(fill_rgba) - - try: - # Fast path with NumPy/surfarray - import pygame.surfarray as sarr # numpy-backed - alpha_src = sarr.pixels_alpha(src).copy() # copy to avoid locking issues - sarr.pixels_alpha(out)[:, :] = alpha_src - except Exception: - # Fallback: per-pixel (a bit slower, but fine for icons) - w, h = src.get_size() - try: - for y in range(h): - for x in range(w): - a = src.get_at((x, y)).a - if a: # only where alpha > 0 - r, g, b, _ = fill_rgba - out.set_at((x, y), pygame.Color(r, g, b, a)) - finally: - out.unlock() - src.unlock() - return out - - - -class ButtonIcon(Frame): - """ - A child widget that displays an image on top of a Button, recoloring a - specific color depending on hover state. - """ - - def __init__( - self, - parent_button, # your Button instance - image: pygame.Surface | str, # path or loaded surface - *, - normal_replace: tuple[int, int, int, int], - hover_replace: tuple[int, int, int, int], - size: tuple[int, int] | None = None, # (width, height) in px - inset_px: int = 0, # optional inset padding - z_index: int = 10 # draw order - ): - # Fills parent’s area by default - super().__init__(parent=parent_button, - x=0, y=0, width=1.0, height=1.0, - x_is_percent=True, y_is_percent=True, - width_is_percent=True, height_is_percent=True, - z_index=z_index, - padding=(inset_px, inset_px, inset_px, inset_px)) - - self.mouse_passthrough = True - self._size = size - base = _load_surface(image) - self._img_normal = _recolor_by_alpha_mask(base, normal_replace) - self._img_hover = _recolor_by_alpha_mask(base, hover_replace) - self._img_disabled = self._make_disabled(self._img_normal) - - def _make_disabled(self, surf: pygame.Surface) -> pygame.Surface: - """Dim the image for disabled state.""" - out = surf.copy() - out.fill((255, 255, 255, 153), special_flags=pygame.BLEND_RGBA_MULT) - return out - - def draw(self, surface: pygame.Surface) -> None: - if self.is_effectively_hidden: - return - - parent = self.parent - if not parent.is_enabled: - img = self._img_disabled - elif hasattr(parent, "is_hover") and parent.is_hover: - img = self._img_hover - else: - img = self._img_normal - - inner_x, inner_y, inner_w, inner_h = self.get_content_geometry() - - if self._size: - tw, th = self._size - blit_img = pygame.transform.smoothscale(img, (tw, th)) - else: - # default: fit inside content area - blit_img = pygame.transform.smoothscale( - img, (inner_w, inner_h) - ) - - bx = inner_x + (inner_w - blit_img.get_width()) // 2 - by = inner_y + (inner_h - blit_img.get_height()) // 2 - surface.blit(blit_img, (bx, by)) diff --git a/UI/input/radio.py b/UI/input/radio.py deleted file mode 100644 index 8b2c761..0000000 --- a/UI/input/radio.py +++ /dev/null @@ -1,185 +0,0 @@ -import pygame -from typing import Callable, Optional, Any, List -from dataclasses import dataclass, field - -from UI.input.button import Button, ButtonColors -from UI.text import TextStyle - -@dataclass -class SelectedColors: - """Optional override palette when a RadioButton is selected.""" - background: Optional[pygame.Color] = None - hover_background: Optional[pygame.Color] = None - foreground: Optional[pygame.Color] = None # border color when selected - hover_foreground: Optional[pygame.Color] = None - -class RadioGroup: - """ - Manages an exclusive group of RadioButtons. - - Only one RadioButton can be selected at a time. - - Optionally allow deselection by clicking the already-selected button. - """ - def __init__( - self, - allow_deselect: bool = False, - on_change: Optional[Callable[[Optional["RadioButton"]], None]] = None - ): - self._buttons: List["RadioButton"] = [] - self._selected: Optional["RadioButton"] = None - self.allow_deselect = allow_deselect - self.on_change = on_change - - def add(self, btn: "RadioButton") -> None: - if btn not in self._buttons: - self._buttons.append(btn) - btn._group = self - if btn.is_selected: - self.select(btn, fire=False) - - def remove(self, btn: "RadioButton") -> None: - if btn in self._buttons: - was_selected = (btn is self._selected) - self._buttons.remove(btn) - btn._group = None - if was_selected: - self._selected = None - if self.on_change: - self.on_change(None) - - def select(self, btn: Optional["RadioButton"], fire: bool = True) -> None: - if btn is None: - if self._selected: - self._selected._set_selected(False) - self._selected = None - if fire and self.on_change: - self.on_change(None) - return - - if btn not in self._buttons: - self.add(btn) - - if self._selected is btn: - if self.allow_deselect: - # Deselect current - btn._set_selected(False) - self._selected = None - if fire and self.on_change: - self.on_change(None) - # else: clicking again does nothing - return - - # Deselect previous - if self._selected: - self._selected._set_selected(False) - - # Select new - btn._set_selected(True) - self._selected = btn - if fire and self.on_change: - self.on_change(btn) - - def get_selected(self) -> Optional["RadioButton"]: - return self._selected - - def get_value(self) -> Optional[Any]: - return self._selected.value if self._selected else None - - def set_value(self, value: Any, fire: bool = True) -> None: - for b in self._buttons: - if b.value == value: - self.select(b, fire=fire) - return - # If value not found, deselect all - self.select(None, fire=fire) - - -class RadioButton(Button): - """ - A Button that participates in a RadioGroup. - - Clicking selects this button in its group (exclusive). - - Appearance changes when selected (via SelectedColors). - - You can still pass a per-button callback (function_to_call). - """ - def __init__( - self, - function_to_call, - x, y, width, height, - text: str = "", - *, - value: Any = None, - group: Optional[RadioGroup] = None, - selected: bool = False, - colors: Optional[ButtonColors] = None, - selected_colors: Optional[SelectedColors] = None, - text_style: Optional[TextStyle] = None, - **frame_kwargs - ): - super().__init__( - function_to_call=function_to_call, - x=x, y=y, width=width, height=height, - text=text, colors=colors, text_style=text_style, **frame_kwargs - ) - self.value = value if value is not None else text - self._group: Optional[RadioGroup] = None - self._is_selected: bool = False - self._selected_colors = selected_colors or SelectedColors() - - if group: - group.add(self) - if selected: - # defer to group to enforce exclusivity - (group or self._group).select(self, fire=False) - - # --- Public API --- - @property - def is_selected(self) -> bool: - return self._is_selected - - def set_selected(self, selected: bool, fire: bool = True) -> None: - """ - Ask the group to set selection (preferred). - If no group, sets locally. - """ - if self._group: - self._group.select(self if selected else None, fire=fire) - else: - self._set_selected(selected) - - # --- Internal state change (no group notifications) --- - def _set_selected(self, selected: bool) -> None: - if self._is_selected == selected: - return - self._is_selected = selected - # If you want to adjust child Text style on selection, you can do it here. - - # --- Button overrides --- - def on_click(self, button=None): - if not self.is_enabled: - return - # Selection logic first (so callbacks see the new state) - if self._group: - self._group.select(self) - else: - self._set_selected(True) - - # Optional per-button callback - super().on_click(button) - - def _resolve_colors(self): - """ - Extend Button color resolution to account for selected state. - We only override backgrounds/borders—your Text color stays independent. - """ - base_bg, base_border = super()._resolve_colors() - if not self._is_selected: - return base_bg, base_border - - # Selected palette with graceful fallback to base colors - bg = self._selected_colors.background or base_bg - fg = self._selected_colors.foreground or base_border - - if self.is_hover: - bg = self._selected_colors.hover_background or bg - fg = self._selected_colors.hover_foreground or fg - - return bg, fg diff --git a/UI/input/scroll_frame.py b/UI/input/scroll_frame.py deleted file mode 100644 index 11f5ccb..0000000 --- a/UI/input/scroll_frame.py +++ /dev/null @@ -1,306 +0,0 @@ -# UI/input/scroll_frame.py -import pygame -from UI.frame import Frame - -class ScrollbarV(Frame): - """Vertical scrollbar child for a ScrollFrame (right-side strip).""" - def __init__( - self, - *, - parent: "ScrollFrame", - width: int = 12, - track_color: pygame.Color = pygame.Color("#d7d7d7"), - thumb_color: pygame.Color = pygame.Color("#9a9a9a"), - thumb_min_px: int = 24, - z_index: int = 1000, - ): - super().__init__( - parent=parent, - x=0, y=0, - width=width, height=1.0, - width_is_percent=False, height_is_percent=True, - x_align="right", y_align="top", - z_index=z_index, - background_color=None, - ) - self._dragging = False - self._drag_offset = 0 - self.track_color = track_color - self.thumb_color = thumb_color - self.thumb_min_px = thumb_min_px - - # --- helpers that query parent metrics --- - @property - def _sf(self) -> "ScrollFrame": - return self.parent # type: ignore[return-value] - - def _viewport_h(self) -> int: - return self._sf._viewport_height() - - def _content_h(self) -> int: - return self._sf._content_height() - - def _track_rect(self) -> pygame.Rect: - x, y, w, h = self.get_absolute_geometry() - return pygame.Rect(x, y, w, h) - - def _thumb_h(self) -> int: - vh = self._viewport_h() - ch = self._content_h() - if ch <= vh: - return vh - return max(int(vh * (vh / ch)), self.thumb_min_px) - - def _thumb_rect(self) -> pygame.Rect: - track = self._track_rect() - vh = self._viewport_h() - ch = self._content_h() - - if ch <= vh: - # No overflow: fill track (thumb == track) - return pygame.Rect(track.x, track.y, track.w, track.h) - - thumb_h = self._thumb_h() - track_h = track.h - thumb_h - ratio = self._sf.scroll_y / (ch - vh) if ch > vh else 0.0 - thumb_y = track.y + int(ratio * track_h) - return pygame.Rect(track.x, thumb_y, track.w, thumb_h) - - def _set_scroll_from_thumb_y(self, thumb_y: int): - track = self._track_rect() - vh = self._viewport_h() - ch = self._content_h() - if ch <= vh: - self._sf._set_scroll(0) - return - thumb_h = self._thumb_h() - track_h = track.h - thumb_h - ratio = (thumb_y - track.y) / track_h if track_h > 0 else 0.0 - self._sf._set_scroll(ratio * (ch - vh)) - - # --- input handling --- - def process_mouse_press(self, px, py, button): - if self.is_effectively_hidden: - return - if button == "left": - thumb = self._thumb_rect() - track = self._track_rect() - if thumb.collidepoint(px, py): - self._dragging = True - self._drag_offset = py - thumb.y - return - if track.collidepoint(px, py): - # Jump to click position and begin drag - new_y = py - self._thumb_h() // 2 - new_y = max(track.y, min(new_y, track.bottom - self._thumb_h())) - self._set_scroll_from_thumb_y(new_y) - self._dragging = True - self._drag_offset = py - self._thumb_rect().y - return - # Not on the scrollbar; no need to route further because ScrollFrame will. - - def process_mouse_move(self, px, py): - if self._dragging: - track = self._track_rect() - th = self._thumb_h() - new_y = max(track.y, min(py - self._drag_offset, track.bottom - th)) - self._set_scroll_from_thumb_y(new_y) - - def process_mouse_release(self, px, py, button): - self._dragging = False - - # --- drawing --- - def draw(self, surface: pygame.Surface) -> None: - if self.is_effectively_hidden: - return - - # track - track = self._track_rect() - pygame.draw.rect(surface, self.track_color, track) - - # thumb - thumb = self._thumb_rect() - pygame.draw.rect(surface, self.thumb_color, thumb) - pygame.draw.rect(surface, pygame.Color(0, 0, 0), thumb, 1) - - -class ScrollFrame(Frame): - """Vertical-only scrollable container with a dedicated right-side ScrollbarV child.""" - def __init__( - self, - *, - parent, - x=0, y=0, width=100, height=100, - scroll_speed=30, - scrollbar_width=12, - background_color=pygame.Color("#f5f5f5"), - track_color=pygame.Color("#d7d7d7"), - thumb_color=pygame.Color("#9a9a9a"), - thumb_min_px=24, - bottom_padding = 10, - z_index=0, - **kwargs - ): - # Prevent overridden add_child from running before content exists - self._initializing = True - super().__init__( - parent=parent, x=x, y=y, width=width, height=height, - background_color=background_color, z_index=z_index, - **kwargs - ) - - self.scroll_y = 0 - self.scroll_speed = scroll_speed - self.scrollbar_width = scrollbar_width - self.bottom_padding = bottom_padding - - # Content container (everything the user adds goes here) - self.content = Frame( - parent=None, - x=0, y=0, - width=1.0, height=1.0, - width_is_percent=True, height_is_percent=True, - z_index=z_index # below scrollbar - ) - # Attach directly then sort - self.content.parent = self - self.children.append(self.content) - - # Scrollbar child (high z so it wins hit-testing) - self.scrollbar = ScrollbarV( - parent=self, - width=scrollbar_width, - track_color=track_color, - thumb_color=thumb_color, - thumb_min_px=thumb_min_px, - z_index=z_index + 999 # ensure on top - ) - - self._initializing = False - - # Route user children into the content frame - def add_child(self, child): - if self._initializing: - return super().add_child(child) - return self.content.add_child(child) - - # --- geometry + layout --- - def _viewport_rect(self) -> pygame.Rect: - x, y, w, h = self.get_absolute_geometry() - # If scrollbar is hidden, content gets full width - width = w if getattr(self.scrollbar, "is_hidden", False) else max(0, w - self.scrollbar_width) - return pygame.Rect(x, y, width, h) - - def _viewport_height(self) -> int: - return self.get_absolute_geometry()[3] - - def _layout(self): - """Two-pass layout so resizes clamp scroll and scrollbar hides/shows instantly.""" - abs_x, abs_y, abs_w, abs_h = self.get_absolute_geometry() - - # 1) Assume no scrollbar first - self.scrollbar.is_hidden = True - self._apply_content_geometry(abs_w, abs_h) - - # 2) Check if we actually need scrolling - need_scroll = self._content_height() > abs_h - if need_scroll: - # With scrollbar shown, viewport gets narrower - self.scrollbar.is_hidden = False - self._apply_content_geometry(max(0, abs_w - self.scrollbar_width), abs_h) - - # 3) Clamp scroll after any size change - self._clamp_scroll() - - def _content_height(self) -> int: - """Total vertical extent of content children relative to content's top.""" - if not self.content.children: - return self._viewport_height() - - _, content_abs_top, _, _ = self.content.get_absolute_geometry() - max_bottom_rel = 0 - for ch in self.content.children: - _, ch_abs_y, _, ch_h = ch.get_absolute_geometry() - bottom_rel = (ch_abs_y + ch_h) - content_abs_top - if bottom_rel > max_bottom_rel: - max_bottom_rel = bottom_rel - - return max(max_bottom_rel + self.bottom_padding, self._viewport_height()) - - - def _max_scroll(self) -> int: - return max(0, self._content_height() - self._viewport_height()) - - def _clamp_scroll(self) -> None: - """Keep scroll_y valid after size changes (e.g., window expand).""" - max_scroll = self._max_scroll() - new_y = min(max(self.scroll_y, 0), max_scroll) - if new_y != self.scroll_y: - self.scroll_y = new_y - self.content.y = -self.scroll_y - - def _apply_content_geometry(self, viewport_w: int, viewport_h: int) -> None: - self.content.width_is_percent = False - self.content.height_is_percent = False - self.content.x = 0 - self.content.y = -self.scroll_y - self.content.width = viewport_w - self.content.height = max(viewport_h, self._content_height()) - - # --- scroll core --- - def _set_scroll(self, value: int | float): - max_scroll = max(0, self._content_height() - self._viewport_height()) - self.scroll_y = max(0, min(int(value), max_scroll)) - self.content.y = -self.scroll_y - - # --- input routing: run layout first so geometry is up-to-date --- - def process_mouse_press(self, px, py, button): - self._layout() - # Children (including scrollbar) handle their own input via z-index ordering - super().process_mouse_press(px, py, button) - - def process_mouse_move(self, px, py): - self._layout() - # If the scrollbar is dragging, capture the move - if self.scrollbar._dragging: - self.scrollbar.process_mouse_move(px, py) - return - super().process_mouse_move(px, py) - - def process_mouse_release(self, px, py, button): - self._layout() - # If the scrollbar started a drag, ensure it gets the release - if self.scrollbar._dragging: - self.scrollbar.process_mouse_release(px, py, button) - return - super().process_mouse_release(px, py, button) - - def on_wheel(self, dx: int, dy: int, px: int, py: int) -> None: - # Keep layout fresh for correct hit-tests - self._layout() - - # Only react if the mouse is over the visible viewport or the scrollbar track - if not (self._viewport_rect().collidepoint(px, py) or self.scrollbar._track_rect().collidepoint(px, py)): - return - - # Pygame: positive dy == wheel up - self._set_scroll(self.scroll_y - dy * self.scroll_speed) - - # --- drawing --- - def draw(self, surface: pygame.Surface) -> None: - if self.is_effectively_hidden: - return - - self._layout() - abs_x, abs_y, abs_w, abs_h = self.get_absolute_geometry() - - if self.background_color: - pygame.draw.rect(surface, self.background_color, (abs_x, abs_y, abs_w, abs_h)) - - old_clip = surface.get_clip() - surface.set_clip(self._viewport_rect()) - self.content.draw(surface) - surface.set_clip(old_clip) - - if not self.scrollbar.is_hidden: - self.scrollbar.draw(surface) diff --git a/UI/input/slider.py b/UI/input/slider.py deleted file mode 100644 index e6df16e..0000000 --- a/UI/input/slider.py +++ /dev/null @@ -1,244 +0,0 @@ -# slider.py -import pygame -from typing import Callable, Optional -from UI.frame import Frame -from UI.text import TextStyle -from UI.input.button import Button, ButtonColors - -class Slider(Frame): - def __init__( - self, - min_value: float, - max_value: float, - x: int, - y: int, - width: int, - height: int, - initial_value: Optional[float] = None, - on_change: Optional[Callable[[float], None]] = None, - tick_count: int = 2, # 0 or >=2; 1 coerced to 2 - track_color: Optional[pygame.Color] = None, - tick_color: Optional[pygame.Color] = None, - knob_fill: Optional[pygame.Color] = None, - knob_border_color: Optional[pygame.Color] = None, - knob_hover_fill: Optional[pygame.Color] = None, - knob_hover_border_color: Optional[pygame.Color] = None, - with_buttons: bool = False, - step: float = 1.0, - **frame_kwargs - ): - super().__init__(x=x, y=y, width=width, height=height, **frame_kwargs) - - # --- values / callback --- - self.min_value = min_value - self.max_value = max_value - self.value = min_value if initial_value is None else max(min_value, min(max_value, initial_value)) - self.on_change = on_change - - # --- ticks --- - if tick_count < 0: - tick_count = 0 - if tick_count == 1: - tick_count = 2 - self.tick_count = tick_count - - # --- visuals --- - self.track_height = 4 - self.knob_width = 10 - self.knob_border = 2 - - self.track_color = track_color or pygame.Color("#b3b4b6") - self.tick_color = tick_color or self.track_color - self.knob_fill = knob_fill or pygame.Color("#b3b4b6") - self.knob_border_color = pygame.Color(knob_border_color) if knob_border_color else pygame.Color("#5a5a5a") - - # hover styles - self.knob_hover_fill = knob_hover_fill or pygame.Color("#5a5a5a") - self.knob_hover_border_color = pygame.Color(knob_hover_border_color) if knob_hover_border_color else pygame.Color("#5a5a5a") - - # --- interaction --- - self.dragging = False - self.knob_hover = False - - # --- optional +/- buttons (fixed layout, no resize adjustments) --- - self.with_buttons = with_buttons - self.step = step - self.btn_w = 0 - self.btn_margin = 4 - self.left_button: Optional[Button] = None - self.right_button: Optional[Button] = None - - if self.with_buttons: - self.btn_w = min(height, 16) - text_style = TextStyle( - hover_color=pygame.Color("#5a5a5a"), - font_size=24 - ) - - # White background & border, keep text black - btn_colors = ButtonColors( - background=pygame.Color("#f5f5f5"), - hover_background=pygame.Color("#f5f5f5"), - disabled_background=pygame.Color("#f5f5f5"), - - foreground=pygame.Color("#f5f5f5"), - hover_foreground=pygame.Color("#f5f5f5"), - disabled_foreground=pygame.Color("#f5f5f5") - ) - - self.left_button = Button( - self._decrement, 0, 0, self.btn_w, height, text_style=text_style, - text="-", colors=btn_colors, parent=self - ) - self.right_button = Button( - self._increment, width - self.btn_w, 0, self.btn_w, height, text_style=text_style, - text="+", colors=btn_colors, parent=self - ) - - # ===== Public helpers (so external buttons can also drive the slider) ===== - def increment(self, amount: Optional[float] = None): - self._bump(+ (amount if amount is not None else self.step)) - - def decrement(self, amount: Optional[float] = None): - self._bump(- (amount if amount is not None else self.step)) - - def set_value(self, v: float, *, notify: bool = False) -> None: - """ - Programmatically set the slider's value, clamped to [min_value, max_value]. - If notify=True, fire on_change if the value actually changed. - """ - v = max(self.min_value, min(self.max_value, float(v))) - old = self.value - self.value = v - if notify and self.on_change and self.value != old: - self.on_change(self.value) - - # ===== Internal helpers ===== - def _bump(self, delta: float): - old = self.value - self.value = max(self.min_value, min(self.max_value, self.value + delta)) - if self.on_change and self.value != old: - self.on_change(self.value) - - def _increment(self): - self._bump(self.step) - - def _decrement(self): - self._bump(-self.step) - - def _track_rect(self): - """Return (track_x, track_y, track_w, abs_h) for the inner track. - If buttons are enabled, we reserve left/right gutters.""" - abs_x, abs_y, abs_w, abs_h = self.get_absolute_geometry() - if self.with_buttons: - track_x = abs_x + self.btn_w + self.btn_margin - track_w = max(1, abs_w - 2 * (self.btn_w + self.btn_margin)) - else: - track_x = abs_x - track_w = max(1, abs_w) - track_y = abs_y + abs_h // 2 - self.track_height // 2 - return track_x, track_y, track_w, abs_h - - # --- value <-> position mapped to inner track --- - def _value_to_pos(self) -> int: - track_x, _, track_w, _ = self._track_rect() - if self.max_value == self.min_value: - return track_x - ratio = (self.value - self.min_value) / (self.max_value - self.min_value) - return track_x + int(ratio * track_w) - - def _pos_to_value(self, px: int) -> float: - track_x, _, track_w, _ = self._track_rect() - if track_w <= 0 or self.max_value == self.min_value: - return self.min_value - ratio = (px - track_x) / track_w - ratio = max(0.0, min(1.0, ratio)) - return self.min_value + ratio * (self.max_value - self.min_value) - - # ===== Events ===== - def on_mouse_press(self, button): - if button == "left": - self.dragging = True - - def on_mouse_release(self, button): - if button == "left": - self.dragging = False - - def process_mouse_move(self, px, py): - # Keep your existing hover bookkeeping - super().process_mouse_move(px, py) - - # Purely visual hover for the knob - if self.is_hovered: - track_x, _, _, abs_h = self._track_rect() - knob_center = self._value_to_pos() - knob_x = knob_center - self.knob_width // 2 - abs_x, abs_y, _, _ = self.get_absolute_geometry() - self.knob_hover = (knob_x <= px <= knob_x + self.knob_width) and (abs_y <= py <= abs_y + abs_h) - else: - self.knob_hover = False - - def on_hover_leave(self): - # Only clear highlight; do not cancel a live drag - self.knob_hover = False - if not pygame.mouse.get_pressed()[0]: - self.dragging = False - - # ===== Drawing ===== - def draw(self, surface: pygame.Surface) -> None: - if self.is_effectively_hidden: - return - - # Keep dragging even when mouse leaves the slider, stop on global button up - if self.dragging: - left_down = pygame.mouse.get_pressed()[0] - if not left_down: - self.dragging = False - else: - mx, _ = pygame.mouse.get_pos() - old = self.value - self.value = self._pos_to_value(mx) - if self.on_change and self.value != old: - self.on_change(self.value) - - # Draw track - track_x, track_y, track_w, abs_h = self._track_rect() - pygame.draw.rect(surface, self.track_color, (track_x, track_y, track_w, self.track_height)) - - # Ticks - if self.tick_count >= 2: - base_tick_h = max(6, min(12, abs_h // 2)) - end_tick_h = int(base_tick_h * 1.5) - for i in range(self.tick_count): - t = i / (self.tick_count - 1) - tx = track_x + int(t * track_w) - tick_h = end_tick_h if i in (0, self.tick_count - 1) else base_tick_h - top = track_y - tick_h // 2 - bottom = top + tick_h - pygame.draw.line( - surface, - self.tick_color, - (tx, top), - (tx, bottom + self.track_height), - int(self.track_height * 0.75) - ) - - # Knob (rectangle) with hover style - knob_x_center = self._value_to_pos() - knob_x = knob_x_center - self.knob_width // 2 - abs_x, abs_y, _, _ = self.get_absolute_geometry() - knob_y = abs_y - - if self.knob_hover: - fill_color = self.knob_hover_fill - border_color = self.knob_hover_border_color - else: - fill_color = self.knob_fill - border_color = self.knob_border_color - - # Draw children (the +/- buttons) - for child in self.children: - child.draw(surface) - - pygame.draw.rect(surface, fill_color, (knob_x, knob_y, self.knob_width, abs_h)) - pygame.draw.rect(surface, border_color, (knob_x, knob_y, self.knob_width, abs_h), self.knob_border) diff --git a/UI/input/text_field.py b/UI/input/text_field.py deleted file mode 100644 index fb80028..0000000 --- a/UI/input/text_field.py +++ /dev/null @@ -1,425 +0,0 @@ -import unicodedata -import re - -import pygame -from UI.frame import Frame -from UI.text import Text, TextStyle - -class TextField(Frame): - def __init__(self, parent=None, x=0, y=0, width=200, height=30, - placeholder="", style=None, - background_color=pygame.Color("white"), - text_color=pygame.Color("black"), - border_color=pygame.Color("black"), - padding=5, - on_text_change=None, allowed_pattern: str = None, on_commit=None, - **kwargs): - super().__init__(parent=parent, x=x, y=y, width=width, height=height, - background_color=background_color, **kwargs) - - self.active = False - self.placeholder = placeholder - self.text = "" - self.style = style or TextStyle(color=text_color, font_size=18) - self.border_color = border_color - - # Callbacks - self.on_text_change = on_text_change - self.allowed_pattern = re.compile(allowed_pattern) if allowed_pattern else None - self.on_commit = on_commit - - # Rendered text element inside the field - self._text = Text(self.placeholder, parent=self, x=padding, y=height // 2, - x_align="left", y_align="center", style=self.style) - - # Caret state - self._padding = padding - self._caret_index = 0 - self._caret_visible = True - self._blink_interval_ms = 500 - self._last_blink_ms = pygame.time.get_ticks() - self._scroll_px = 0 - - # --- key repeat state --- - self._repeat_key = None - self._repeat_delay_ms = 350 # first repeat delay (ms) - self._repeat_interval_ms = 40 # subsequent repeats (ms) - self._next_repeat_ms = 0 - - # --- focus management: respond to global clicks anywhere --- - def on_global_mouse_press(self, px, py, button): - if self.is_effectively_hidden: - return - - was_active = self.active - self.active = self.contains_point(px, py) - - if self.active and not was_active: - self._caret_index = len(self.text) - self._reset_blink() - elif not self.active and was_active: - if self.on_commit: - self.on_commit(self.text) - self._caret_visible = False - self._repeat_key = None - - - def _decode_clip_bytes(self, raw: bytes) -> str: - """Best-effort decode for clipboard bytes from pygame.scrap/SDL.""" - # BOMs first - if raw.startswith(b'\xff\xfe') or raw.startswith(b'\xfe\xff'): - try: - return raw.decode('utf-16') - except Exception: - pass - # Heuristic: lots of NULs => UTF-16 without BOM (Windows CF_UNICODETEXT) - if raw and raw[1:2] == b'\x00' or raw.count(b'\x00') >= max(1, len(raw)//4): - for enc in ('utf-16-le', 'utf-16-be'): - try: - return raw.decode(enc) - except Exception: - continue - # Try utf-8, then latin1 as last resort - for enc in ('utf-8', 'latin1'): - try: - return raw.decode(enc) - except Exception: - continue - # Fallback replace to avoid exceptions - return raw.decode('utf-8', errors='replace') - - def _sanitize_paste_text(self, s: str) -> str: - """Normalize, strip invisibles, and make it safe for a single-line field.""" - # Normalize - s = unicodedata.normalize('NFC', s) - - # Replace non-breaking spaces and other common whitespace oddities with space - s = s.replace('\u00A0', ' ') # nbsp - s = s.replace('\u2007', ' ') # figure space - s = s.replace('\u202F', ' ') # narrow nbsp - - # Drop zero-width and format chars (keep newline handling for now) - # Cf (format), Cc (control) — but allow \n and \t to survive for next step - s = ''.join(ch for ch in s if not ( - (unicodedata.category(ch) in ('Cf', 'Cc')) and ch not in ('\n', '\t') - )) - - # Optional: map curly quotes if your font lacks them - # Comment these out if your font supports U+2018/2019 properly - s = s.replace('‘', "'").replace('’', "'").replace('“', '"').replace('”', '"') - - # Normalize line endings and collapse to single line for this widget - s = s.replace('\r\n', '\n').replace('\r', '\n').replace('\t', ' ') - s = " ".join(s.splitlines()) # turns any newlines into single spaces - - # Trim trailing exotic whitespace that might still linger - s = re.sub(r'\s+$', ' ', s) # keep one space if user had one at end - return s - - # --- Paste functionality --- - def _insert_text(self, s: str): - if not s: - return - s = self._sanitize_paste_text(s) - if not s: - return - - # Validate pasted text - candidate = self.text[:self._caret_index] + s + self.text[self._caret_index:] - if self.allowed_pattern and not self.allowed_pattern.fullmatch(candidate): - return # reject paste if it violates the pattern - - self.text = self.text[:self._caret_index] + s + self.text[self._caret_index:] - self._caret_index += len(s) - self._refresh() - self._reset_blink() - self._ensure_caret_visible() - if getattr(self, "_repeat_key", None) == pygame.K_BACKSPACE: - self._repeat_key = None - - def _get_clipboard_text(self) -> str: - """Clipboard : try pygame.scrap, then pygame.clipboard; robust decoding.""" - # 1) pygame.scrap - try: - if hasattr(pygame, "scrap"): - if not pygame.scrap.get_init(): - pygame.scrap.init() - raw = ( - pygame.scrap.get("text/plain;charset=utf-8") - or pygame.scrap.get("text/plain") - or pygame.scrap.get(getattr(pygame, "SCRAP_TEXT", "text/plain")) - ) - if raw: - if isinstance(raw, bytes): - return self._decode_clip_bytes(raw) - return str(raw) - except Exception: - pass - - # 2) SDL clipboard - try: - if hasattr(pygame, "clipboard"): - txt = pygame.clipboard.get_text() - if isinstance(txt, bytes): - return self._decode_clip_bytes(txt) - return txt or "" - except Exception: - pass - - return "" - - # --- typing: react to KEYDOWN + KEYUP --- - def on_key_event(self, event): - if not self.active or self.is_effectively_hidden: - return - - # handle KEYUP to stop repeat - if event.type == pygame.KEYUP: - if event.key == self._repeat_key: - self._repeat_key = None - return - - if event.type != pygame.KEYDOWN: - return - - mods = getattr(event, "mod", 0) - is_ctrl_or_cmd = bool(mods & (pygame.KMOD_CTRL | pygame.KMOD_META)) - is_shift = bool(mods & pygame.KMOD_SHIFT) - - # Paste: Ctrl/Cmd+V or Shift+Insert - if (event.key == pygame.K_v and is_ctrl_or_cmd) or (event.key == pygame.K_INSERT and is_shift): - pasted = self._get_clipboard_text() - if pasted: - self._insert_text(pasted) - return - - if event.key == pygame.K_RETURN: - self.active = False - self._caret_visible = False - self._repeat_key = None - return - - if event.key == pygame.K_BACKSPACE: - self._do_backspace() - # start repeat timing - now = pygame.time.get_ticks() - self._repeat_key = pygame.K_BACKSPACE - self._next_repeat_ms = now + self._repeat_delay_ms - return - - if event.key == pygame.K_DELETE: - if self._caret_index < len(self.text): - self.text = self.text[:self._caret_index] + self.text[self._caret_index + 1:] - self._refresh(); self._reset_blink() - return - - if event.key == pygame.K_LEFT: - self._move_left() - now = pygame.time.get_ticks() - self._repeat_key = pygame.K_LEFT - self._next_repeat_ms = now + self._repeat_delay_ms - self._ensure_caret_visible() - return - - if event.key == pygame.K_RIGHT: - self._move_right() - now = pygame.time.get_ticks() - self._repeat_key = pygame.K_RIGHT - self._next_repeat_ms = now + self._repeat_delay_ms - self._ensure_caret_visible() - return - - if event.key == pygame.K_HOME: - self._caret_index = 0 - self._reset_blink() - self._ensure_caret_visible() - return - - if event.key == pygame.K_END: - self._caret_index = len(self.text) - self._reset_blink() - self._ensure_caret_visible() - return - - if event.unicode and event.unicode.isprintable(): - candidate = self.text[:self._caret_index] + event.unicode + self.text[self._caret_index:] - if self.allowed_pattern and not self.allowed_pattern.fullmatch(candidate): - return # reject this character - self.text = candidate - self._caret_index += 1 - self._refresh() - self._reset_blink() - self._ensure_caret_visible() - if self._repeat_key == pygame.K_BACKSPACE: - self._repeat_key = None - - def set_text(self, new_text: str, *, emit: bool = True): - self.text = str(new_text) - self._caret_index = len(self.text) - if emit: - self._refresh() - else: - cb = self.on_text_change - self.on_text_change = None - self._refresh() - self.on_text_change = cb - - # --- Carat Movement --- - - - # --- helper for a single backspace action --- - def _do_backspace(self): - if self._caret_index > 0: - self.text = self.text[:self._caret_index - 1] + self.text[self._caret_index:] - self._caret_index -= 1 - self._refresh() - self._reset_blink() - self._ensure_caret_visible() - - # --- per-frame repeat tick --- - def _update_key_repeat(self): - if not self.active or self._repeat_key is None: - return - now = pygame.time.get_ticks() - while now >= self._next_repeat_ms: - if self._repeat_key == pygame.K_BACKSPACE: - self._do_backspace() - elif self._repeat_key == pygame.K_LEFT: - self._move_left() - elif self._repeat_key == pygame.K_RIGHT: - self._move_right() - self._next_repeat_ms += self._repeat_interval_ms - - def _text_inner_width(self) -> int: - # Available width for text inside padding and border - return max(0, self.width - 2 * self._padding - 4) # -4 for the 2px border on each side - - def _text_width(self) -> int: - return self._measure_text_prefix_width(self.text) - - def _prefix_width(self, upto: int) -> int: - return self._measure_text_prefix_width(self.text[:upto]) - - def _ensure_caret_visible(self): - """Adjust self._scroll_px so the caret x (prefix width) stays inside the inner view.""" - inner_w = self._text_inner_width() - if inner_w <= 0: - self._scroll_px = 0 - return - - total_w = self._text_width() - caret_px = self._prefix_width(self._caret_index) - - # left edge - if caret_px < self._scroll_px: - self._scroll_px = caret_px - # right edge - elif caret_px > self._scroll_px + inner_w: - self._scroll_px = caret_px - inner_w - - # clamp scroll to content - max_scroll = max(0, total_w - inner_w) - self._scroll_px = max(0, min(self._scroll_px, max_scroll)) - - def _move_left(self): - if self._caret_index > 0: - self._caret_index -= 1 - self._reset_blink() - self._ensure_caret_visible() - - def _move_right(self): - if self._caret_index < len(self.text): - self._caret_index += 1 - self._reset_blink() - self._ensure_caret_visible() - - def _refresh(self): - if self.text: - self._text.set_text(self.text) - else: - self._text.set_text(self.placeholder) - self._caret_index = max(0, min(self._caret_index, len(self.text))) - # If content shrank, ensure scroll isn't past the end - inner_w = self._text_inner_width() - max_scroll = max(0, self._text_width() - inner_w) - self._scroll_px = max(0, min(self._scroll_px, max_scroll)) - - if self.on_text_change: - self.on_text_change(self.text) - - def _reset_blink(self): - self._last_blink_ms = pygame.time.get_ticks() - self._caret_visible = True - - def _update_blink(self): - now = pygame.time.get_ticks() - if now - self._last_blink_ms >= self._blink_interval_ms: - self._caret_visible = not self._caret_visible - self._last_blink_ms = now - - def _measure_text_prefix_width(self, prefix: str) -> int: - """ - Measure pixel width of a substring using the Text's FreeType font. - Falls back gracefully if font unavailable. - """ - font = getattr(self._text, "_font", None) - if not font: - # crude fallback: render via Text then read width - temp = Text(prefix, parent=self, x=0, y=0, style=self.style) - w, _ = temp.size - # don't add temp to tree permanently - self.children.remove(temp) - return w - # With pygame.freetype, render() returns (surface, rect) - surf, _ = font.render(prefix, fgcolor=self.style.color, size=self.style.font_size) - return surf.get_width() - - def draw(self, surface): - if self.is_effectively_hidden: - return - - self._update_key_repeat() - - # Background + border - abs_x, abs_y, abs_w, abs_h = self.get_absolute_geometry() - pygame.draw.rect(surface, self.background_color, (abs_x, abs_y, abs_w, abs_h)) - border = pygame.Color("dodgerblue") if self.active else self.border_color - pygame.draw.rect(surface, border, (abs_x, abs_y, abs_w, abs_h), 2) - - # Clip text to inner rect - clip_rect = pygame.Rect(abs_x + 2, abs_y + 2, max(0, abs_w - 4), max(0, abs_h - 4)) - prev_clip = surface.get_clip() - if prev_clip: # prev_clip can be a Rect or None - clip_rect = clip_rect.clip(prev_clip) - - surface.set_clip(clip_rect) - - # Calculate horizontal scroll if text exceeds field width - text_width = self._measure_text_prefix_width(self.text) - inner_width = abs_w - 2 * self._padding - 4 # inside padding and 2px border - if inner_width < 0: - inner_width = 0 - - # When text is shorter than view, no scroll; otherwise use self._scroll_px - total_w = self._text_width() - max_scroll = max(0, total_w - inner_width) - scroll = max(0, min(self._scroll_px, max_scroll)) - - # Draw the text with horizontal offset - text_abs_x = abs_x + self._padding + 2 - scroll # +2 for left border - self._text.x = text_abs_x - abs_x - self._text.draw(surface) - - # Caret - if self.active: - self._update_blink() - if self._caret_visible: - prefix_w = self._prefix_width(self._caret_index) - text_x, text_y, text_w, text_h = self._text.get_absolute_geometry() - caret_x = (abs_x + self._padding + 2) + (prefix_w - scroll) - pygame.draw.line(surface, pygame.Color("black"), - (caret_x, text_y), - (caret_x, text_y + text_h), 1) - - surface.set_clip(prev_clip) - diff --git a/UI/input/toggle_button.py b/UI/input/toggle_button.py deleted file mode 100644 index 8f5dc45..0000000 --- a/UI/input/toggle_button.py +++ /dev/null @@ -1,78 +0,0 @@ -import pygame -from dataclasses import dataclass -from typing import Callable, Optional - -from UI.input.button import Button, ButtonColors -from UI.text import TextStyle - -@dataclass -class ToggledColors: - background: Optional[pygame.Color] = None - hover_background: Optional[pygame.Color] = None - foreground: Optional[pygame.Color] = None - hover_foreground: Optional[pygame.Color] = None - -class ToggleButton(Button): - """ - Two-state button (ON/OFF). - - `on_click` is optional. - - `on_change` receives (state: bool, button: ToggleButton). - - Color palette can be overridden when ON via ToggledColors. - """ - def __init__( - self, - function_to_call: Optional[Callable] = None, # optional click callback - x: int = 0, y: int = 0, width: int = 100, height: int = 30, - text: str = "", - *, - toggled: bool = False, - on_change: Optional[Callable[[bool, "ToggleButton"], None]] = None, - colors: Optional[ButtonColors] = None, - toggled_colors: Optional[ToggledColors] = None, - text_style: Optional[TextStyle] = None, - **frame_kwargs - ): - super().__init__( - function_to_call=function_to_call, - x=x, y=y, width=width, height=height, - text=text, colors=colors, text_style=text_style, - **frame_kwargs - ) - self._is_on = bool(toggled) - self._on_change = on_change - self._toggled_colors = toggled_colors or ToggledColors() - - @property - def is_on(self) -> bool: - return self._is_on - - def set_toggled(self, on: bool, fire: bool = True) -> None: - if self._is_on == on: - return - self._is_on = on - if fire and self._on_change: - self._on_change(self._is_on, self) - - def toggle(self, fire: bool = True) -> None: - self.set_toggled(not self._is_on, fire=fire) - - def on_click(self, button=None): - if not self.is_enabled: - return - # Flip state first - self.toggle(fire=True) - # Only call per-click handler if provided - if self.function_to_call: - super().on_click(button) - - def _resolve_colors(self): - base_bg, base_border = super()._resolve_colors() - if not self._is_on: - return base_bg, base_border - - bg = self._toggled_colors.background or base_bg - fg = self._toggled_colors.foreground or base_border - if self.is_hover: - bg = self._toggled_colors.hover_background or bg - fg = self._toggled_colors.hover_foreground or fg - return bg, fg diff --git a/UI/list_frame.py b/UI/list_frame.py deleted file mode 100644 index eabd93f..0000000 --- a/UI/list_frame.py +++ /dev/null @@ -1,186 +0,0 @@ -# list_frame.py -from typing import Callable, Iterable, Iterator, List, Optional, Sequence -import pygame -from UI.frame import Frame - -RowBuilder = Callable[[int, Frame], None] -ElementFactory = Callable[[Frame, int], Frame] - - -class RowContainer(Frame): - """A thin container for one row inside a ListFrame.""" - def __init__(self, parent: Frame, index: int, row_height: int, **kwargs): - # Position rows at (0, index*row_height) within the ListFrame - super().__init__(parent=parent, x=0, y=index * row_height, - width=1.0, height=row_height, - width_is_percent=True, height_is_percent=False, - **kwargs) - self.index = index - self.row_height = row_height - - def set_index_and_y(self, index: int) -> None: - self.index = index - self.y = index * self.row_height - - -class ListFrame(Frame): - """ - A vertical list container that repeats a row blueprint N times. - - Build options: - A) row_builder(index, row_parent): create row contents for each index - B) element_factories: sequence of callables (parent, index) -> Frame - - Public API: - - set_count(n), set_row_height(h), rebuild() - - get_row(i) -> RowContainer - - __len__, __iter__, __getitem__ - - update_row(i, rebuild: bool = False, fn: Optional[RowBuilder] = None) - - Notes: - - All child element positions are relative to their RowContainer, - which itself is positioned inside the ListFrame at y = index * row_height. - - Z-ordering and input handling inherit from Frame. - """ - def __init__( - self, - parent: Optional[Frame] = None, - *, - x: float = 0, - y: float = 0, - width: float = 100, - height: float = 100, - x_is_percent: bool = False, - y_is_percent: bool = False, - width_is_percent: bool = False, - height_is_percent: bool = False, - z_index: int = 0, - x_align: str = "left", - y_align: str = "top", - background_color: Optional[pygame.Color] = None, - row_height: int = 24, - count: int = 0, - row_builder: Optional[RowBuilder] = None, - element_factories: Optional[Sequence[ElementFactory]] = None, - ): - super().__init__( - parent=parent, x=x, y=y, width=width, height=height, - x_is_percent=x_is_percent, y_is_percent=y_is_percent, - width_is_percent=width_is_percent, height_is_percent=height_is_percent, - z_index=z_index, x_align=x_align, y_align=y_align, - background_color=background_color - ) - self._row_height = int(row_height) - self._count = int(count) - self._row_builder: Optional[RowBuilder] = row_builder - self._factories: Optional[List[ElementFactory]] = list(element_factories) if element_factories else None - - self._rows: List[RowContainer] = [] - if self._count > 0: - self._materialize_rows() - - # ------------ public API ------------ - - def set_count(self, n: int) -> None: - n = max(0, int(n)) - if n == self._count: - return - self._count = n - self._resize_rows() - - def set_row_height(self, h: int) -> None: - h = max(1, int(h)) - if h == self._row_height: - return - self._row_height = h - # Reposition rows and update their heights - for i, row in enumerate(self._rows): - row.row_height = h - row.height = h - row.set_index_and_y(i) - - def get_row(self, index: int) -> RowContainer: - return self._rows[index] - - def rebuild(self) -> None: - """Fully rebuild all rows (e.g., after changing builder/factories).""" - self._clear_rows() - self._materialize_rows() - - def update_row(self, index: int, rebuild: bool = False, fn: Optional[RowBuilder] = None) -> None: - """Optionally clear and rebuild a single row with a temporary or new builder.""" - row = self._rows[index] - if rebuild: - # Clear children of this row - for ch in list(row.children): - row.children.remove(ch) - builder = fn or self._row_builder - if builder is not None: - builder(index, row) - elif self._factories: - for f in self._factories: - f(row, index) - - # --- Pythonic container behavior --- - def __len__(self) -> int: - return self._count - - def __iter__(self) -> Iterator[RowContainer]: - return iter(self._rows) - - def __getitem__(self, i: int) -> RowContainer: - return self.get_row(i) - - # ------------ internals ------------ - - def _clear_rows(self) -> None: - # Detach row containers from our children list - for row in self._rows: - if row in self.children: - self.children.remove(row) - self._rows.clear() - - def _materialize_rows(self) -> None: - for i in range(self._count): - row = RowContainer(parent=self, index=i, row_height=self._row_height) - self._rows.append(row) - # Build row contents - if self._row_builder is not None: - self._row_builder(i, row) - elif self._factories: - for f in self._factories: - f(row, i) - - # Keep our overall children z-sorted (your Frame.add_child sorts on insert, - # but we added directly via RowContainer(parent=self), so re-sort here) - self.children.sort(key=lambda c: c.z_index, reverse=True) - - def _resize_rows(self) -> None: - """Grow/shrink rows to match current count; re-use existing when possible.""" - cur = len(self._rows) - if self._count == cur: - return - - if self._count < cur: - # Remove extras from the end - to_remove = self._rows[self._count:] - for row in to_remove: - if row in self.children: - self.children.remove(row) - self._rows = self._rows[:self._count] - else: - # Add new rows - for i in range(cur, self._count): - row = RowContainer(parent=self, index=i, row_height=self._row_height) - self._rows.append(row) - if self._row_builder is not None: - self._row_builder(i, row) - elif self._factories: - for f in self._factories: - f(row, i) - - # Reposition all rows to keep indices consistent - for i, row in enumerate(self._rows): - row.set_index_and_y(i) - - self.children.sort(key=lambda c: c.z_index, reverse=True) diff --git a/UI/modal.py b/UI/modal.py deleted file mode 100644 index 53f3358..0000000 --- a/UI/modal.py +++ /dev/null @@ -1,249 +0,0 @@ -import pygame -from UI.frame import Frame -from UI.section_frame import Section -from UI.text import TextStyle - -from UI.input.button import Button, ButtonColors - -class _Scrim(Frame): - """Full-screen overlay that blocks interaction with underlying UI.""" - def __init__(self, *, parent, z_index=10_000, alpha=160): - # Cover the whole root; percent sizing + centered at (0,0) top-left. - super().__init__( - parent=parent, x=0, y=0, width=1.0, height=1.0, - x_is_percent=True, y_is_percent=True, - width_is_percent=True, height_is_percent=True, - z_index=z_index - ) - self._alpha = alpha - self._capture_drag = None # set by Modal while dragging - self._capture_release = None # set by Modal while dragging - self.hide() # start hidden - - def draw(self, surface: pygame.Surface) -> None: - if self.is_effectively_hidden: - return - abs_x, abs_y, abs_w, abs_h = self.get_absolute_geometry() - # Semi-transparent scrim - scrim = pygame.Surface((abs_w, abs_h), pygame.SRCALPHA) - scrim.fill((0, 0, 0, self._alpha)) - surface.blit(scrim, (abs_x, abs_y)) - - # Draw children (e.g., the modal panel) - for ch in reversed(self.children): - ch.draw(surface) - - # Clicking the scrim closes the modal (if no child handled the click) - def on_click(self, button=None): - # Delegate: the Modal sets this from its constructor - if hasattr(self, "_request_close"): - self._request_close() - - def process_mouse_move(self, px, py): - # If a modal is dragging, forward move regardless of where the mouse is. - if callable(self._capture_drag): - self._capture_drag(px, py) - super().process_mouse_move(px, py) - - def process_mouse_release(self, px, py, button): - # If a modal is dragging, ensure the release reaches it. - if callable(self._capture_release): - self._capture_release(px, py, button) - super().process_mouse_release(px, py, button) - -class _DragCapture(Frame): - """Full-screen invisible mouse-capture layer used during modal drag in floating mode.""" - def __init__(self, *, parent, on_move, on_release, z_index): - super().__init__( - parent=parent, x=0, y=0, width=1.0, height=1.0, - x_is_percent=True, y_is_percent=True, - width_is_percent=True, height_is_percent=True, - z_index=z_index - ) - self._on_move = on_move - self._on_release = on_release - # No background; purely for event capture - - def process_mouse_move(self, px, py): - if callable(self._on_move): - self._on_move(px, py) - super().process_mouse_move(px, py) - - def process_mouse_release(self, px, py, button): - if callable(self._on_release): - self._on_release(px, py, button) - super().process_mouse_release(px, py, button) - -class Modal(Section): - """ - Modal with two modes: - - overlay=True : uses a full-screen scrim (blocks clicks behind) - - overlay=False : no scrim; modal floats and does NOT block other buttons - """ - def __init__(self, *, parent, title: str, width: int = 480, height: int = 320, header_height: int = 32, - on_close=None, z_index: int = 10_001, - header_bg: pygame.Color = pygame.Color("#dbdbdb"), - background_color: pygame.Color = pygame.Color("#ffffff"), - title_style: TextStyle | None = None, - overlay: bool = True, - scrim_alpha: int = 160, - **kwargs - ): - super().__init__( - parent=(parent if not overlay else _Scrim(parent=parent, z_index=z_index - 1, alpha=scrim_alpha)), - title=title, - width=width, height=height, - header_height=header_height, - header_bg=header_bg, - background_color=background_color, - title_style=title_style, - z_index=z_index, - x=0.5, y=0.5, - x_is_percent=True, y_is_percent=True, - **kwargs - ) - - self.x_align = "center" - self.y_align = "center" - - self._on_close = on_close - self._overlay = overlay - self._dragging = False - self._drag_offset = (0, 0) - self._drag_capture_layer = None # only used when overlay == False - - if overlay: - self._scrim = self.parent - self._scrim.hide(True) - self._scrim._request_close = self.close - else: - self._scrim = None - self.hide(True) - - close_btn_style = TextStyle( - color=pygame.Color("#4a4a4a"), - font_size=min(20, header_height - 8), - ) - self._close_btn = Button( - self.close, - x=8, y=(header_height - 24) // 2, - width=24, height=24, - text="X", - text_style=close_btn_style, - x_align="right", - parent=self.header, - colors=ButtonColors( - background=header_bg, - foreground=header_bg, - hover_background=pygame.Color("#b3b4b6"), - disabled_background=header_bg, - disabled_foreground=header_bg - ), - ) - - # Public API - def open(self): - if self._scrim is not None: - self._scrim.show(True) # shows panel via scrim - else: - self.show(True) # floating mode: just show panel - - def close(self): - if self._scrim is not None: - self._scrim.hide(True) - else: - self.hide(True) - if callable(self._on_close): - self._on_close() - - # ESC to close (works in both modes) - def on_key_event(self, event): - try: - if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE: - self.close() - except Exception: - pass - - # --- Dragging support --- - - def _begin_drag(self, px, py): - # Anchor the pointer offset from the modal's top-left corner - abs_x, abs_y, _, _ = self.get_absolute_geometry() - self._dragging = True - self._drag_offset = (px - abs_x, py - abs_y) - - # While dragging, switch to absolute pixel positioning - self.x_is_percent = False - self.y_is_percent = False - self.x_align = "left" - self.y_align = "top" - - if self._scrim is not None: - # Overlay mode: use scrim as move/release forwarder - self._scrim._capture_drag = self._on_drag_move - self._scrim._capture_release = self._on_drag_release - else: - # Floating mode: create a temporary full-screen capture layer - self._drag_capture_layer = _DragCapture( - parent=self.parent, - on_move=self._on_drag_move, - on_release=self._on_drag_release, - z_index=self.z_index + 10_000 - ) - - def _end_drag(self): - self._dragging = False - if self._scrim is not None: - self._scrim._capture_drag = None - self._scrim._capture_release = None - if self._drag_capture_layer is not None: - # Remove the capture layer - try: - self.parent.children.remove(self._drag_capture_layer) - except ValueError: - pass - self._drag_capture_layer = None - - def _on_drag_move(self, px, py): - if not self._dragging: - return - parent_x, parent_y, parent_w, parent_h = self.parent.get_absolute_geometry() - - new_x = px - self._drag_offset[0] - parent_x - new_y = py - self._drag_offset[1] - parent_y - - # Optional: keep modal inside parent bounds (comment out if you don’t want clamping) - w, h = self.size - new_x = max(0, min(new_x, max(0, parent_w - w))) - new_y = max(0, min(new_y, max(0, parent_h - h))) - - self.x = new_x - self.y = new_y - - def _on_drag_release(self, px, py, button): - if button == "left": - self._end_drag() - - # Hook into your existing event methods - - def process_mouse_press(self, px, py, button): - super().process_mouse_press(px, py, button) - - if button != "left": - return - - # Start dragging if press was on the header (but not on the close button) - if self.header.contains_point(px, py) and not self._close_btn.contains_point(px, py): - self._begin_drag(px, py) - - def process_mouse_move(self, px, py): - # If dragging, update first so the UI feels snappy - if self._dragging: - self._on_drag_move(px, py) - super().process_mouse_move(px, py) - - def process_mouse_release(self, px, py, button): - # Ensure we end dragging even if the release happens off the modal - if self._dragging: - self._on_drag_release(px, py, button) - super().process_mouse_release(px, py, button) \ No newline at end of file diff --git a/UI/section_frame.py b/UI/section_frame.py deleted file mode 100644 index a2864f2..0000000 --- a/UI/section_frame.py +++ /dev/null @@ -1,255 +0,0 @@ -import pygame - -from UI.frame import Frame -from UI.text import Text, TextStyle - -from UI.input.button import Button, ButtonColors - -class Section(Frame): - def __init__( - self, *, - parent, - title: str, - x=0, y=0, width=100, height=100, - x_is_percent=False, y_is_percent=False, - width_is_percent=False, height_is_percent=False, - z_index=0, - background_color=pygame.Color("#ffffff"), - header_height=32, - header_bg=pygame.Color("#dbdbdb"), - title_style: TextStyle | None = None, - title_align: str = "left", - collapsible: bool = False, - **kwargs - ): - self._initializing = True - self.collapsible = collapsible - self.collapsed = False - - self._header_buttons: list[Frame] = [] - self.header_action_gap: int = 0 - - body_padding = kwargs.pop("padding", (0, 0, 0, 0)) - - super().__init__( - parent=parent, x=x, y=y, width=width, height=height, - x_is_percent=x_is_percent, y_is_percent=y_is_percent, - width_is_percent=width_is_percent, height_is_percent=height_is_percent, - z_index=z_index, background_color=background_color, - padding=(0, 0, 0, 0), - **kwargs - ) - - # Save original (expanded) height config so we can restore it - self._saved_height = height - self._saved_height_is_percent = height_is_percent - - # Header bar - self.header = Frame( - parent=self, - x=0, y=0, - width=1.0, height=header_height, - width_is_percent=True, - background_color=header_bg, - z_index=z_index + 1 - ) - - # Title text - if title_style is None: - title_style = TextStyle( - color=pygame.Color("#7a7a7a"), - font_size=24, - font_name="assets/fonts/SofiaSans-Regular.ttf", - ) - self.title = Text( - text=title, - parent=self.header, - x=(0.5 if title_align == "center" else 8), - y=self.header.height // 2, - x_is_percent=(title_align == "center"), - y_is_percent=False, - x_align=title_align, - y_align="center", - style=title_style, - ) - - # Collapse toggle button - if self.collapsible: - self.toggle_btn = Button( - self.toggle_collapse, - x=0, y=0, - width=(header_height / 3) * 2, height=header_height, - text="-", - parent=self.header, - x_align="right", y_align="top", - colors=ButtonColors( - background=header_bg, - foreground=header_bg, - hover_background=pygame.Color("#b3b4b6"), - disabled_background=header_bg, - disabled_foreground=header_bg - ), - text_style=TextStyle( - color=pygame.Color("#7a7a7a"), - font_size=min(20, header_height - 8), - ) - ) - else: - self.toggle_btn = None - - # Body (content area) - self.body = Frame( - parent=self, - x=0, y=self.header.height, - width=1.0, - width_is_percent=True, - height=max(0, height - header_height) if not height_is_percent else 0, - height_is_percent=not height_is_percent, - z_index=z_index, - padding=body_padding - ) - - self._initializing = False - self._layout_header_actions() - - def _layout_header_actions(self): - """ - Packs the collapse toggle (if any) at the far right, then the custom - header buttons to its left with a fixed gap. - """ - offset = 0 - - # Keep collapse toggle pinned at the far right - if self.toggle_btn is not None: - self.toggle_btn.x_align = "right" - self.toggle_btn.y_align = "top" - self.toggle_btn.x = 0 # zero offset from the right edge - self.toggle_btn.y = 0 # it already fills header height - offset = self.toggle_btn.width + self.header_action_gap - - # Pack custom buttons right-to-left in insertion order - for btn in self._header_buttons: - # x is an offset from the right edge because x_align="right" - btn.x = offset - # re-center vertically in case header height changed - btn.y = max(0, (self.header.height - btn.height) // 2) - offset += btn.width + self.header_action_gap - - # --- public helper (optional) --- - def set_collapsed(self, value: bool): - if value == self.collapsed: - return - - self.collapsed = value - if self.toggle_btn: - self.toggle_btn.set_text("+" if self.collapsed else "-") - - if self.collapsed: - # Save current size/fill config - self._saved_height = self.height - self._saved_height_is_percent = self.height_is_percent - self._saved_fill_remaining_height = getattr(self, "_saved_fill_remaining_height", self.fill_remaining_height) - - # Clamp Section to header-only and stop filling parent - self.fill_remaining_height = False - self.height_is_percent = False - self.height = self.header.height - - # Hide only the body subtree - self._for_each_in_body(lambda f: f.add_hidden_reason("COLLAPSED")) - - else: - # Restore original size/fill config - self.height_is_percent = self._saved_height_is_percent - self.height = self._saved_height - self.fill_remaining_height = getattr(self, "_saved_fill_remaining_height", self.fill_remaining_height) - - # Unhide the body subtree - self._for_each_in_body(lambda f: f.remove_hidden_reason("COLLAPSED")) - - self._layout_header_actions() - - def add_header_button(self, button: Frame): - """ - Add a control to the header, aligned to the right. Items are packed - right-to-left with self.header_action_gap spacing. - """ - # Ensure the button lives under the header - button.parent = self.header - # Horizontal alignment from the right, we'll position via x offset - button.x_align = "right" - # Vertically center in the header - button.y_align = "top" - button.y = max(0, (self.header.height - button.height) // 2) - - # Make sure it's tracked and rendered above header background - if button not in self._header_buttons: - self._header_buttons.append(button) - - self._layout_header_actions() - - def add_to_header(self, child): - # Keep existing API working; route through right-side packer if it's a button-like thing - # Otherwise, just drop it in at (0,0) and let caller manage it. - try: - return self.add_header_button(child) - except Exception: - self.header.add_child(child) - - - def toggle_collapse(self): - self.set_collapsed(not self.collapsed) - - def add_child(self, child): - if getattr(self, "_initializing", False) or not hasattr(self, "body"): - return super().add_child(child) - return self.body.add_child(child) - - def _for_each_in_body(self, fn): - stack = [self.body] - while stack: - node = stack.pop() - fn(node) - stack.extend(node.children) - - def add_to_header(self, child): - try: - return self.add_header_button(child) - except Exception: - self.header.add_child(child) - - def get_content_geometry(self): - # Use the section's outer rect (no header offset). Padding lives on body now. - abs_x, abs_y, abs_w, abs_h = Frame.get_absolute_geometry(self) - # Section keeps zero padding; body owns padding. If you ever want chrome padding, set it here. - pad_top, pad_right, pad_bottom, pad_left = self.padding - inner_x = abs_x + pad_left - inner_y = abs_y + pad_top - inner_w = max(0, abs_w - pad_left - pad_right) - inner_h = max(0, abs_h - pad_top - pad_bottom) - return inner_x, inner_y, inner_w, inner_h - - def _layout(self): - _, _, sec_w, sec_h = self.get_absolute_geometry() - self.body.y = self.header.height - self.body.height_is_percent = False - # With the section height now clamped to header.height when collapsed, - # this naturally becomes 0. Otherwise, it's the remaining space. - self.body.height = max(0, sec_h - self.header.height) - self._layout_header_actions() - - def draw(self, surface: pygame.Surface) -> None: - self._layout() - super().draw(surface) - - def process_mouse_move(self, px, py): - self._layout() - super().process_mouse_move(px, py) - - def process_mouse_press(self, px, py, button): - self._layout() - super().process_mouse_press(px, py, button) - - def process_mouse_release(self, px, py, button): - self._layout() - super().process_mouse_release(px, py, button) diff --git a/UI/styles.py b/UI/styles.py deleted file mode 100644 index b816942..0000000 --- a/UI/styles.py +++ /dev/null @@ -1,59 +0,0 @@ -from __future__ import annotations -import pygame -from UI.text import TextStyle -from UI.input.button import ButtonColors -from UI.input.radio import SelectedColors - - -# ---- Text Styles ----------------------------------------------------------- - -def make_button_text_style() -> TextStyle: - return TextStyle(color=pygame.Color("#5a5a5a"), font_size=20) - -def make_display_text_style(font_size = 18) -> TextStyle: - return TextStyle( - color=pygame.Color(32, 32, 32), - font_size=font_size, - font_name="assets/fonts/SofiaSans-Regular.ttf", -) - -def make_settings_text_style() -> TextStyle: - return TextStyle( - color=pygame.Color(32, 32, 32), - font_size=20, - font_name="assets/fonts/SofiaSans-Regular.ttf", -) - - -# ---- Radio / Button shared styling ---------------------------------------- - - -# Base (unselected) colors for radio buttons -BASE_BUTTON_COLORS = ButtonColors( - hover_foreground=pygame.Color("#5a5a5a") -) - -# Colors when a radio is selected -SELECTED_RADIO_COLORS = SelectedColors( - background=pygame.Color("#b3b4b6"), - hover_background=pygame.Color("#b3b4b6"), - foreground=pygame.Color("#b3b4b6"), - hover_foreground=pygame.Color("#5a5a5a"), -) - -# Text style used by radios -RADIO_TEXT_STYLE = TextStyle( - font_size=16, - color=pygame.Color("#5a5a5a"), - hover_color=pygame.Color("#5a5a5a"), - disabled_color=pygame.Color("#5a5a5a"), -) - - -# ---- Crosshair Styling ----------------------------------------------------- - -# Crosshair visual properties -CROSSHAIR_COLOR = pygame.Color(64, 64, 64, 200) # Dark grey with alpha -CROSSHAIR_LENGTH = 20 -CROSSHAIR_THICKNESS = 2 -CROSSHAIR_GAP = 5 \ No newline at end of file diff --git a/UI/text.py b/UI/text.py deleted file mode 100644 index fd2e038..0000000 --- a/UI/text.py +++ /dev/null @@ -1,285 +0,0 @@ -import pygame -import pygame.freetype as freetype -from typing import Tuple, Optional -from dataclasses import dataclass, field -from UI.frame import Frame - -def default_color() -> pygame.Color: - return pygame.Color("#b3b4b6") - -@dataclass -class TextStyle: - """Style configuration for text rendering""" - color: pygame.Color = field(default_factory=default_color) - font_size: int = 32 - font_name: Optional[str] = None - bold: bool = False - italic: bool = False - - hover_color: Optional[pygame.Color] = None - disabled_color: Optional[pygame.Color] = None - -class Text(Frame): - def __init__(self, - text: str, - x: int, y: int, - style: Optional[TextStyle] = None, - x_align: str = "left", y_align: str = "top", - max_width: Optional[int] = None, - truncate_mode: str = "none", - show_tooltip_on_hover: bool = True, - **frame_kwargs): - - super().__init__(x=x, y=y, width=0, height=0, **frame_kwargs) - - self.mouse_passthrough = True - self.text = text - self.style = style or TextStyle() - self.x_align = x_align - self.y_align = y_align - self.max_width = max_width - self.truncate_mode = truncate_mode - self.show_tooltip_on_hover = show_tooltip_on_hover - - self._is_hover = False - self._is_enabled = True - - self._font = self._create_font() - self._surface = None - self._render_text = text - self._update_surface() - - @property - def debug_outline_color(self) -> pygame.Color: - return pygame.Color(0, 0, 255) - - def _create_font(self) -> freetype.Font: - """Create a FreeType font object based on style""" - try: - font = freetype.Font(self.style.font_name, self.style.font_size) - except Exception: - font = freetype.SysFont(None, self.style.font_size) - - font.strong = self.style.bold - font.oblique = self.style.italic - return font - - def _current_color(self) -> pygame.Color: - # Resolve stateful color with sensible fallbacks - if not self._is_enabled and self.style.disabled_color is not None: - return self.style.disabled_color - if self._is_hover and self.style.hover_color is not None: - return self.style.hover_color - return self.style.color - - def _update_surface(self) -> None: - """Render the text to a surface using FreeType""" - if not self._font: - return - self._render_text = self.text - if self.max_width and self.truncate_mode != "none": - self._render_text = self._ellipsize(self.text, self.max_width, self.truncate_mode) - self._surface, _ = self._font.render( - self._render_text, - fgcolor=self._current_color(), - size=self.style.font_size, - ) - - @property - def size(self) -> Tuple[int, int]: - return self._surface.get_size() if self._surface else (0, 0) - - def contains_point(self, px, py): - # allow hover detection - abs_x, abs_y, w, h = self.get_absolute_geometry() - return abs_x <= px <= abs_x + w and abs_y <= py <= abs_y + h - - def set_text(self, text: str) -> None: - """Update the displayed text""" - if self.text != text: - self.text = text - self._update_surface() - - def set_style(self, style: TextStyle) -> None: - """Update the text style""" - self.style = style - self._font = self._create_font() - self._update_surface() - - def set_color(self, color) -> None: - """Set the base text color and re-render the surface.""" - if not isinstance(color, pygame.Color): - color = pygame.Color(color) - if self.style.color != color: - self.style.color = color - self._update_surface() - - def get_color(self) -> pygame.Color: - """Return the current resolved text color.""" - return self._current_color() - - def set_is_hover(self, is_hover: bool) -> None: - if self._is_hover != is_hover: - self._is_hover = is_hover - self._update_surface() - - def set_is_enabled(self, is_enabled: bool) -> None: - if self._is_enabled != is_enabled: - self._is_enabled = is_enabled - self._update_surface() - - def get_absolute_geometry(self): - parent_x, parent_y, parent_w, parent_h = ( - self.parent.get_absolute_geometry() if self.parent else (0, 0, *pygame.display.get_surface().get_size()) - ) - - draw_x = self.x * parent_w if self.x_is_percent else self.x - draw_y = self.y * parent_h if self.y_is_percent else self.y - draw_x += parent_x - draw_y += parent_y - - text_w, text_h = self._surface.get_size() if self._surface else (0, 0) - - # Apply alignment like in draw() - if self.x_align == "center": - draw_x -= text_w // 2 - elif self.x_align == "right": - draw_x -= text_w - - if self.y_align == "center": - draw_y -= text_h // 2 - elif self.y_align == "bottom": - draw_y -= text_h - - return draw_x, draw_y, text_w, text_h - - # --- Truncation helpers --- - def _measure_width(self, s: str) -> int: - return self._font.get_rect(s, size=self.style.font_size).width - - def _ellipsize(self, s: str, max_w: int, mode: str) -> str: - if max_w is None: - return s - if self._measure_width(s) <= max_w: - return s - - ell = "…" - ell_w = self._measure_width(ell) - if ell_w > max_w: - return "" - - def fit_end(prefix: str) -> str: - lo, hi = 0, len(prefix) - best = "" - while lo <= hi: - mid = (lo + hi) // 2 - cand = prefix[:mid] + ell - if self._measure_width(cand) <= max_w: - best = cand - lo = mid + 1 - else: - hi = mid - 1 - return best - - def fit_start(suffix: str) -> str: - lo, hi = 0, len(suffix) - best = "" - while lo <= hi: - mid = (lo + hi) // 2 - cand = ell + suffix[-mid:] if mid > 0 else ell - if self._measure_width(cand) <= max_w: - best = cand - lo = mid + 1 - else: - hi = mid - 1 - return best - - if mode == "end": - return fit_end(s) - if mode == "start": - return fit_start(s) - if mode == "middle": - left, right = 0, 0 - best = ell - while left + right < len(s): - cand = s[:left+1] + ell + (s[-right:] if right else "") - if self._measure_width(cand) <= max_w: - left += 1 - best = cand - else: - break - cand = s[:left] + ell + s[-(right+1):] - if self._measure_width(cand) <= max_w: - right += 1 - best = cand - else: - break - return best - - return s - - # --- Rendering --- - def draw(self, surface: pygame.Surface) -> None: - if not self._surface or self.is_effectively_hidden: - return - - parent_x, parent_y, parent_w, parent_h = ( - self.parent.get_absolute_geometry() if self.parent else (0, 0, *surface.get_size()) - ) - - draw_x = self.x * parent_w if self.x_is_percent else self.x - draw_y = self.y * parent_h if self.y_is_percent else self.y - draw_x += parent_x - draw_y += parent_y - - text_w, text_h = self._surface.get_size() - - if self.x_align == "center": - draw_x -= text_w // 2 - elif self.x_align == "right": - draw_x -= text_w - - if self.y_align == "center": - draw_y -= text_h // 2 - elif self.y_align == "bottom": - draw_y -= text_h - - surface.blit(self._surface, (draw_x, draw_y)) - - # Tooltip rendering - if ( - self.show_tooltip_on_hover - and self._render_text != self.text - ): - mx, my = pygame.mouse.get_pos() - if self.contains_point(mx, my): - self._draw_tooltip(surface, self.text, mx, my) - - def _draw_tooltip(self, surface, text, x, y): - tooltip_font = self._create_font() - tip_surface, _ = tooltip_font.render(text, fgcolor=pygame.Color("black")) - padding = 6 - cursor_offset = 12 - margin = 6 - - tw, th = tip_surface.get_size() - rect = pygame.Rect(x + cursor_offset, y + cursor_offset, tw + padding * 2, th + padding * 2) - sw, sh = surface.get_size() - - if rect.right > sw - margin: - rect.x = x - cursor_offset - rect.w - if rect.right > sw - margin: - rect.x = sw - rect.w - margin - if rect.x < margin: - rect.x = margin - - if rect.bottom > sh - margin: - rect.y = y - cursor_offset - rect.h - if rect.bottom > sh - margin: - rect.y = sh - rect.h - margin - if rect.y < margin: - rect.y = margin - - pygame.draw.rect(surface, pygame.Color(255, 255, 224), rect) - pygame.draw.rect(surface, pygame.Color("black"), rect, 1) - surface.blit(tip_surface, (rect.x + padding, rect.y + padding)) diff --git a/UI/tooltip.py b/UI/tooltip.py deleted file mode 100644 index 10eabd9..0000000 --- a/UI/tooltip.py +++ /dev/null @@ -1,180 +0,0 @@ -import pygame -from UI.frame import Frame -from UI.text import Text -from UI.styles import make_display_text_style - - -class Tooltip(Frame): - """ - Self-attaching, multi-line tooltip using Forge's global make_display_text_style(). - - Example: - tip = Tooltip.attach(some_frame, "Hello\nWorld") - tip.set_text("Updated text") - tip.detach() - """ - - def __init__( - self, - parent, - text: str, - *, - font_size: int = 20, - padding: int = 6, - bg_color=(255, 255, 224), - border_color=(0, 0, 0), - follow_cursor: bool = True, - margin: int = 6, - cursor_offset: int = 12, - z_index: int = 10_000, - line_spacing: int = 2, - style_overrides: dict | None = None, - ): - super().__init__(parent=parent, x=0, y=0, width=0, height=0, z_index=z_index) - self.mouse_passthrough = True - - self._padding = padding - self._bg_color = pygame.Color(*bg_color) - self._border_color = pygame.Color(*border_color) - self._follow_cursor = follow_cursor - self._margin = margin - self._cursor_offset = cursor_offset - self._line_spacing = line_spacing - self._font_size = font_size - - # base style from global Forge styling system - self._text_style = make_display_text_style(font_size=font_size) - if style_overrides: - for k, v in style_overrides.items(): - if hasattr(self._text_style, k): - setattr(self._text_style, k, v) - - self._target = None - self._orig_enter = None - self._orig_leave = None - self._orig_hover = None - - self._line_widgets: list[Text] = [] - self.set_text(text) - self.hide() - - # ---------------- attach / detach ---------------- - @classmethod - def attach(cls, target: Frame, text: str, **kwargs) -> "Tooltip": - """Attach to any Frame without editing its class.""" - root = target - while root.parent is not None: - root = root.parent - tip = cls(parent=root, text=text, **kwargs) - tip._bind(target) - return tip - - def detach(self) -> None: - """Unbind and remove from scene.""" - if self._target: - self._target.on_hover_enter = self._orig_enter - self._target.on_hover_leave = self._orig_leave - self._target.on_hover = self._orig_hover - self._target = None - if self.parent and self in self.parent.children: - self.parent.children.remove(self) - - # ---------------- event wrapping ---------------- - def _bind(self, target: Frame) -> None: - self._target = target - self._orig_enter = target.on_hover_enter - self._orig_leave = target.on_hover_leave - self._orig_hover = target.on_hover - - def wrapped_enter(): - self._orig_enter() - self._show_at_mouse() - - def wrapped_leave(): - self._orig_leave() - self.hide() - - def wrapped_hover(): - self._orig_hover() - if self._follow_cursor and not self.is_effectively_hidden: - self._reposition_to_mouse() - - target.on_hover_enter = wrapped_enter - target.on_hover_leave = wrapped_leave - target.on_hover = wrapped_hover - - # ---------------- text layout ---------------- - def set_text(self, text: str): - """Supports multiple lines using '\\n'.""" - for w in self._line_widgets: - if w in self.children: - self.children.remove(w) - self._line_widgets.clear() - - lines = text.split("\n") if text else [""] - y_cursor = self._padding - max_w = 0 - - for line in lines: - tw = Text( - text=line, - x=self._padding, - y=y_cursor, - style=self._text_style, - ) - tw.mouse_passthrough = True - self.add_child(tw) - self._line_widgets.append(tw) - - lw, lh = tw.size - y_cursor += lh + self._line_spacing - max_w = max(max_w, lw) - - if self._line_widgets: - y_cursor -= self._line_spacing - - self.width = max_w + self._padding * 2 - self.height = y_cursor + self._padding - - # ---------------- positioning ---------------- - def _bring_to_front(self): - if not self.parent: - return - max_z = max((getattr(ch, "z_index", 0) for ch in self.parent.children), default=0) - if self.z_index <= max_z: - self.z_index = max_z + 1 - self.parent.children.sort(key=lambda c: c.z_index, reverse=True) - - def _show_at_mouse(self): - self._reposition_to_mouse() - self._bring_to_front() - self.show() - - def _reposition_to_mouse(self): - mx, my = pygame.mouse.get_pos() - sw, sh = pygame.display.get_surface().get_size() - w, h = self.width, self.height - - x = mx + self._cursor_offset - y = my + self._cursor_offset - - if x + w > sw - self._margin: - x = mx - w - self._cursor_offset - if y + h > sh - self._margin: - y = my - h - self._cursor_offset - - x = max(self._margin, min(x, sw - w - self._margin)) - y = max(self._margin, min(y, sh - h - self._margin)) - - self.x, self.y = x, y - self._bring_to_front() - - # ---------------- draw ---------------- - def draw(self, surface: pygame.Surface) -> None: - if self.is_effectively_hidden: - return - abs_x, abs_y, w, h = self.get_absolute_geometry() - pygame.draw.rect(surface, self._bg_color, (abs_x, abs_y, w, h)) - pygame.draw.rect(surface, self._border_color, (abs_x, abs_y, w, h), 1) - for child in reversed(self.children): - child.draw(surface) From 7e148d139d7339dffeeff750284e89d86fc27c0c Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Tue, 20 Jan 2026 00:04:26 -0900 Subject: [PATCH 10/46] removed remaining leftovers --- UI/modals/automation_settings_modal.py | 346 ------------ UI/modals/camera_settings_modal.py | 619 ---------------------- UI/modals/sample_settings_modal.py | 419 --------------- UI/overlays/interactive_camera_overlay.py | 394 -------------- UI/overlays/red_detection_mark_overlay.py | 493 ----------------- UI/ui_layout.py | 434 --------------- 6 files changed, 2705 deletions(-) delete mode 100644 UI/modals/automation_settings_modal.py delete mode 100644 UI/modals/camera_settings_modal.py delete mode 100644 UI/modals/sample_settings_modal.py delete mode 100644 UI/overlays/interactive_camera_overlay.py delete mode 100644 UI/overlays/red_detection_mark_overlay.py delete mode 100644 UI/ui_layout.py diff --git a/UI/modals/automation_settings_modal.py b/UI/modals/automation_settings_modal.py deleted file mode 100644 index 4f35116..0000000 --- a/UI/modals/automation_settings_modal.py +++ /dev/null @@ -1,346 +0,0 @@ -import pygame - -from UI.modal import Modal -from printer.automated_controller import AutomatedPrinter -from printer.automation_config import AutomationSettingsManager - -from UI.focus_overlay import FocusOverlay - -from UI.input.text_field import TextField -from UI.input.button import Button, ButtonShape, ButtonColors -from UI.input.scroll_frame import ScrollFrame -from UI.input.slider import Slider -from UI.input.radio import RadioButton, RadioGroup - -from UI.tooltip import Tooltip -from UI.text import Text, TextStyle - -from UI.styles import ( - make_button_text_style, - make_display_text_style, - make_settings_text_style, - BASE_BUTTON_COLORS, - SELECTED_RADIO_COLORS, - RADIO_TEXT_STYLE, -) - - -class _Layout: - """Simple vertical slot layout using a fixed section offset.""" - def __init__(self, offset: int = 60): - self.offset = offset - self._i = 0 - - def next_y(self) -> int: - y = self.offset * self._i - self._i += 1 - return y - - -def _add_pct_slider( - *, - parent, - x: int, - y: int, - title: str, - initial_pct_0to1: float, - on_change_pct_0to1 -): - """ - Render a labeled 0..100% slider with 0.1% resolution. - The underlying callback receives a float in [0.0, 1.0]. - Returns a setter: set_pct_0to1(float) -> None for external syncing. - """ - Text(title, parent=parent, x=x, y=y + 6, style=make_button_text_style()) - - ui_value = max(0.0, min(100.0, float(initial_pct_0to1) * 100.0)) - - slider = Slider( - parent=parent, x=x + 125, y=y, width=230, height=32, - min_value=0.0, max_value=100.0, initial_value=ui_value, - step=0.1, tick_count=0, with_buttons=True, - ) - - value_text = Text(f"{ui_value:.1f}%", parent=parent, x=x + 360, y=y + 8, style=make_button_text_style()) - - def _on_slider(val: float): - val = max(0.0, min(100.0, float(val))) - value_text.set_text(f"{val:.1f}%") - try: - on_change_pct_0to1(val / 100.0) - except Exception as e: - print(f"[Automation Settings] Failed to apply '{title}' = {val}% → {e}") - - slider.on_change = _on_slider - - def set_pct_0to1(p: float): - p = max(0.0, min(1.0, float(p))) - slider.set_value(p * 100.0, notify=False) - value_text.set_text(f"{p * 100.0:.1f}%") - - return set_pct_0to1 - - -def add_save_load_reset_section(modal, automated_controller: AutomatedPrinter, sync_modal_from_automation, y: int, x: int = 8) -> None: - import tkinter as tk - from tkinter import filedialog - btn_w, btn_h = 88, 28 - spacing = 12 - y += 8 - - # Save - def on_save(): - try: - automated_controller.save_automation_settings() # persists to active file (with backups) - except Exception as e: - print(f"[Automation Save] Failed: {e}") - print("Saved Settings") - - Button( - on_save, - x=x, y=y, width=btn_w, height=btn_h, - text="Save", parent=modal, - colors=BASE_BUTTON_COLORS, text_style=RADIO_TEXT_STYLE, - ) - - # Load (from arbitrary YAML) - def on_load(): - root = tk.Tk(); root.withdraw() - cfg_dir = automated_controller.get_automation_config_dir() - filepath = filedialog.askopenfilename( - initialdir=str(cfg_dir), - title="Select Automation Config File", - filetypes=[("YAML files", "*.yaml"), ("All files", "*.*")], - ) - root.destroy() - if not filepath: - return - try: - # Parity with camera settings loader - loaded = AutomationSettingsManager.load_from_file(filepath) - automated_controller.set_automation_settings(loaded, persist=False) # apply immediately - sync_modal_from_automation(modal, automated_controller) # refresh widgets - except Exception as e: - print(f"[Automation Load] Failed to load/apply '{filepath}': {e}") - print("Loaded Settings") - - Button( - on_load, - x=x + (btn_w + spacing), y=y, width=btn_w, height=btn_h, - text="Load", parent=modal, - colors=BASE_BUTTON_COLORS, text_style=RADIO_TEXT_STYLE, - ) - - # Reset → restore defaults into active (with backup), apply, and persist - def on_reset(): - try: - automated_controller.restore_default_automation_settings(persist=True) - sync_modal_from_automation(modal, automated_controller) - except Exception as e: - print(f"[Automation Reset] Failed to restore defaults: {e}") - print("Reset Settings") - - Button( - on_reset, - x=x + 2*(btn_w + spacing), y=y, width=btn_w, height=btn_h, - text="Reset", parent=modal, - colors=BASE_BUTTON_COLORS, text_style=RADIO_TEXT_STYLE, - ) - - -def build_automation_settings_modal(modal: Modal, automated_controller: AutomatedPrinter): - - scroll_area = ScrollFrame(parent=modal, x=0, y=0, width=modal.width, height=365) - layout = _Layout(offset=40) - s = automated_controller.automation_settings - - # Image name format - image_name_format_height = 8 + layout.next_y() - Text("Image Name Format: ", parent=scroll_area, x=8, y=image_name_format_height + 5, style=make_button_text_style()) - format_field = TextField( - parent=scroll_area, - x=220, y=image_name_format_height, width=250, - allowed_pattern=r'^[^\\/:*?"<>|\x00-\x1F]+$', - border_color=pygame.Color("#b3b4b6"), text_color=pygame.Color("#5a5a5a") - ) - format_field.set_text(s.image_name_template) - - def _on_template_change(text: str): - # empty -> fallback to existing template so we don't persist a blank - tpl = text.strip() or automated_controller.automation_settings.image_name_template - automated_controller.update_automation_settings(persist=False, image_name_template=tpl) - - format_field.on_change = _on_template_change - Tooltip.attach(format_field, - "Format Options\n\ -{x}, {y}, {z} - Position coordinates (supports zero-padding and custom decimal delimiter)\n\ -{i} - Image index\n\ -{f} - Focus score\n\ -{d:%Y%m%d} - Date/time (customizable with standard strftime format codes)\n\ -\n\ -Example:\n\ -{d:%Y%m%d}_X={x}_Y={y}_Image_{i} -> 20251021_X=010.40_Y=000.04_Image_1" - ) - - - row_y = layout.next_y() - Text("Zero-Pad Coordinates", parent=scroll_area, x=8, y=row_y + 5, style=make_button_text_style()) - - def on_zero_pad_change(selected_val): - # Accept "true"/"false" (string) or button.value - value = True if (selected_val == "true" or getattr(selected_val, "value", None) == "true") else False - automated_controller.update_automation_settings(persist=False, zero_pad=value) - - zero_group = RadioGroup(allow_deselect=False, on_change=on_zero_pad_change) - RadioButton(lambda: None, x=220, y=row_y, width=56, height=32, text="True", - value="true", group=zero_group, parent=scroll_area, - colors=BASE_BUTTON_COLORS, selected_colors=SELECTED_RADIO_COLORS, text_style=RADIO_TEXT_STYLE) - RadioButton(lambda: None, x=280, y=row_y, width=64, height=32, text="False", - value="false", group=zero_group, parent=scroll_area, - colors=BASE_BUTTON_COLORS, selected_colors=SELECTED_RADIO_COLORS, text_style=RADIO_TEXT_STYLE) - - # Initialize from current settings (default True if missing) - zero_group.set_value("true" if automated_controller.automation_settings.zero_pad else "false") - - # Decimal delimiter (mutually exclusive) - row_y = layout.next_y() - Text("Decimal Delimiter", parent=scroll_area, x=8, y=row_y + 5, style=make_button_text_style()) - - def on_delim_change(selected_btn): - val = None if selected_btn is None else selected_btn.value - # Only accept the four allowed delimiters - if val in {"_", "-", "=", "."}: - automated_controller.update_automation_settings(persist=False, delimiter=val) - - delim_group = RadioGroup(allow_deselect=False, on_change=on_delim_change) - - # Buttons: "_", "-", "=", "." - RadioButton(lambda: None, x=220, y=row_y, width=36, height=32, text="_", - value="_", group=delim_group, parent=scroll_area, - colors=BASE_BUTTON_COLORS, selected_colors=SELECTED_RADIO_COLORS, text_style=RADIO_TEXT_STYLE) - RadioButton(lambda: None, x=260, y=row_y, width=36, height=32, text="-", - value="-", group=delim_group, parent=scroll_area, - colors=BASE_BUTTON_COLORS, selected_colors=SELECTED_RADIO_COLORS, text_style=RADIO_TEXT_STYLE) - RadioButton(lambda: None, x=300, y=row_y, width=36, height=32, text="=", - value="=", group=delim_group, parent=scroll_area, - colors=BASE_BUTTON_COLORS, selected_colors=SELECTED_RADIO_COLORS, text_style=RADIO_TEXT_STYLE) - RadioButton(lambda: None, x=340, y=row_y, width=36, height=32, text=".", - value=".", group=delim_group, parent=scroll_area, - colors=BASE_BUTTON_COLORS, selected_colors=SELECTED_RADIO_COLORS, text_style=RADIO_TEXT_STYLE) - - # Initialize from current settings (default ".") - delim_group.set_value(automated_controller.automation_settings.delimiter) - - - # Focus Scale - row_y = layout.next_y() - Text("Focus Scale", parent=scroll_area, x=8, y=row_y + 5, style=make_button_text_style()) - - focus_scale_slider = Slider( - parent=scroll_area, x=153, y=row_y, width=230, height=32, - min_value=0.0, max_value=1.0, initial_value=getattr(automated_controller.machine_vision, "scale_factor", 1.0), - step=0.001, tick_count=0, with_buttons=True, - ) - - focus_scale_value = Text( - f"{getattr(automated_controller.machine_vision, 'scale_factor', 1.0):.3f}", - parent=scroll_area, x=390, y=row_y + 8, style=make_button_text_style() - ) - - def on_focus_scale_change(val: float): - try: - val = max(0.0, min(1.0, float(val))) - automated_controller.update_automation_settings(persist=False, scale_factor=val) - automated_controller.machine_vision.scale_factor = val - focus_scale_value.set_text(f"{val:.3f}") - except Exception as e: - print(f"[Automation Settings] Failed to set focus scale: {e}") - - focus_scale_slider.on_change = on_focus_scale_change - - - # Machine Vision Exclusion Zones - Text("Machine Vision Exclusion Zones", parent=scroll_area, x=8, y=8 + layout.next_y(), style=make_button_text_style()) - - slider_setters = {} - - row_y = layout.next_y() - slider_setters["top"] = _add_pct_slider( - parent=scroll_area, x=28, y=row_y, - title="Top Side:", - initial_pct_0to1=getattr(s, "inset_top_pct", 0.0), - on_change_pct_0to1=lambda v: automated_controller.update_automation_settings( - persist=False, inset_top_pct=float(v)) - ) - - row_y = layout.next_y() - slider_setters["left"] = _add_pct_slider( - parent=scroll_area, x=28, y=row_y, - title="Left Side:", - initial_pct_0to1=getattr(s, "inset_left_pct", 0.0), - on_change_pct_0to1=lambda v: automated_controller.update_automation_settings( - persist=False, inset_left_pct=float(v)) - ) - - row_y = layout.next_y() - slider_setters["bottom"] = _add_pct_slider( - parent=scroll_area, x=28, y=row_y, - title="Bottom Side:", - initial_pct_0to1=getattr(s, "inset_bottom_pct", 0.0), - on_change_pct_0to1=lambda v: automated_controller.update_automation_settings( - persist=False, inset_bottom_pct=float(v)) - ) - - row_y = layout.next_y() - slider_setters["right"] = _add_pct_slider( - parent=scroll_area, x=28, y=row_y, - title="Right Side:", - initial_pct_0to1=getattr(s, "inset_right_pct", 0.0), - on_change_pct_0to1=lambda v: automated_controller.update_automation_settings( - persist=False, inset_right_pct=float(v)) - ) - - def sync_modal_from_automation(modal_obj, controller): - st = controller.automation_settings - - # template - try: - format_field.set_text(st.image_name_template or "") - except Exception: - pass - - # zero-pad - try: - zero_group.set_value("true" if st.zero_pad else "false") - except Exception: - pass - - # delimiter - try: - delim_group.set_value(st.delimiter if st.delimiter in {"_", "-", "=", "."} else ".") - except Exception: - pass - - # focus scale - try: - fs = float(st.scale_factor) - focus_scale_slider.set_value(fs, notify=False) - focus_scale_value.set_text(f"{fs:.3f}") - except Exception: - pass - - # sliders - try: - slider_setters["top"](float(getattr(st, "inset_top_pct", 0.0))) - slider_setters["left"](float(getattr(st, "inset_left_pct", 0.0))) - slider_setters["bottom"](float(getattr(st, "inset_bottom_pct", 0.0))) - slider_setters["right"](float(getattr(st, "inset_right_pct", 0.0))) - except Exception: - pass - - add_save_load_reset_section( - modal, - automated_controller, - sync_modal_from_automation, - y=modal.height - 80 - ) \ No newline at end of file diff --git a/UI/modals/camera_settings_modal.py b/UI/modals/camera_settings_modal.py deleted file mode 100644 index daa63bd..0000000 --- a/UI/modals/camera_settings_modal.py +++ /dev/null @@ -1,619 +0,0 @@ -from __future__ import annotations -import pygame -import tkinter as tk -from tkinter import filedialog -from pathlib import Path - -from UI.text import Text -from UI.input.text_field import TextField -from UI.input.slider import Slider -from UI.input.radio import RadioButton, RadioGroup -from UI.input.button import Button, ButtonColors -from UI.input.scroll_frame import ScrollFrame - -from UI.styles import ( - make_settings_text_style, - BASE_BUTTON_COLORS, - SELECTED_RADIO_COLORS, - RADIO_TEXT_STYLE, -) - -from camera.camera_settings import CameraSettingsManager # adjust import if needed - -# --------------------------------------------------------------------------- -# Constants / patterns -# --------------------------------------------------------------------------- -NUMERIC_PATTERN = r"^-?\d*\.?\d*$" # existing for slider text fields -DIGITS_SIGNED = r"^-?\d{0,5}$" # allow up to 5 digits while typing; clamp on commit - -# ---- Sync helpers ---- -def _ensure_sync_registry(modal): - if not hasattr(modal, "_settings_syncers"): - modal._settings_syncers = [] - -def _register_syncer(modal, fn): - _ensure_sync_registry(modal) - modal._settings_syncers.append(fn) - -def sync_modal_from_camera(modal, camera): - # Run all registered syncers to update the UI from camera.settings - for fn in getattr(modal, "_settings_syncers", []): - try: - fn() - except Exception as e: - print(f"[Modal Sync] syncer failed: {e}") - -# --------------------------------------------------------------------------- -# Layout helper -# --------------------------------------------------------------------------- -class _Layout: - """Simple vertical slot layout using a fixed section offset.""" - def __init__(self, offset: int = 60): - self.offset = offset - self._i = 0 - - def next_y(self) -> int: - y = self.offset * self._i - self._i += 1 - return y - - -# --------------------------------------------------------------------------- -# Low-level builders (reusable widgets) -# --------------------------------------------------------------------------- - -def _fmt_value(v, value_type: type, decimals: int | None) -> str: - if value_type is int: - try: - return str(int(float(v))) - except Exception: - return "0" - if decimals is not None: - try: - return f"{float(v):.{decimals}f}" - except Exception: - return f"{0.0:.{decimals}f}" - # Default float-ish formatting but hide trailing .0 - try: - fv = float(v) - return str(int(fv)) if fv.is_integer() else str(fv) - except Exception: - return "0" - - -def create_numeric_setting( - *, - modal, - camera, - settings, - title: str, - y: int, - attr: str, # e.g. "exposure" - min_value: float, - max_value: float, - value_type: type = int, - tick_count: int = 8, - decimals: int | None = None, - x: int = 8, -) -> None: - """Build a labeled slider + numeric text field bound to a single settings attr.""" - Text(title, parent=modal, x=x, y=y + 8, style=make_settings_text_style()) - - cur = getattr(settings, attr) - slider = Slider( - parent=modal, x=x, y=y + 28, width=200, height=32, - min_value=min_value, max_value=max_value, initial_value=cur, - tick_count=tick_count, with_buttons=True, - ) - - text_field = TextField( - parent=modal, x=x + 208, y=y + 28, width=65, height=32, - placeholder=str(cur), allowed_pattern=NUMERIC_PATTERN, - border_color=pygame.Color("#b3b4b6"), text_color=pygame.Color("#5a5a5a"), - ) - - last_applied = [None] - - def apply_value(v): - try: - fv = float(v) - except (TypeError, ValueError): - fv = float(getattr(settings, attr)) - clamped = max(min_value, min(max_value, fv)) - if last_applied[0] is not None and clamped == last_applied[0]: - return - camera.update_settings( - persist=False, - **{attr: int(clamped) if value_type is int else float(clamped)}, - ) - text_field.set_text(_fmt_value(clamped, value_type, decimals), emit=False) - last_applied[0] = clamped - - def on_slider(val: float): - apply_value(val) - - slider.on_change = on_slider - - def on_text_change(txt: str): - # Update the slider visual while typing, but do not apply until commit - if txt in ("", "-", ".", "-."): - return - try: - v = float(txt) - except ValueError: - return - v = max(min_value, min(max_value, v)) - slider.value = v - - text_field.on_text_change = on_text_change - - def on_commit(txt: str): - if txt in ("", "-", ".", "-."): - text_field.set_text(_fmt_value(slider.value, value_type, decimals), emit=False) - return - apply_value(txt) - if last_applied[0] is not None: - slider.value = last_applied[0] - - text_field.on_commit = on_commit - text_field.set_text(_fmt_value(cur, value_type, decimals), emit=False) - - def _sync_from_camera(): - cur_val = getattr(camera.settings, attr) - slider.value = float(cur_val) - text_field.set_text(_fmt_value(cur_val, value_type, decimals), emit=False) - - _register_syncer(modal, _sync_from_camera) - - - -def create_rgb_triplet_setting( - *, - modal, - camera, - title: str, - y: int, - get_vals, # () -> tuple[int, int, int] - set_field_name: str, # name of settings field to update via update_settings(...) - per_channel_bounds: list[tuple[int, int]] | None = None, - x: int = 8, -) -> None: - """Build three numeric fields (R/G/B) for an RGB-like tuple setting.""" - Text(title, parent=modal, x=x, y=y + 8, style=make_settings_text_style()) - - Text("R", parent=modal, x=x, y=y + 34, style=make_settings_text_style()) - Text("G", parent=modal, x=x + 86, y=y + 34, style=make_settings_text_style()) - Text("B", parent=modal, x=x + 172, y=y + 34, style=make_settings_text_style()) - - current = list(get_vals()) - bounds = per_channel_bounds or [(0, 255), (0, 255), (0, 255)] - - def make_commit(idx: int, tf: TextField): - lo, hi = bounds[idx] - def _commit(txt: str): - try: - v = int(txt) - except (TypeError, ValueError): - v = lo - v = max(lo, min(hi, v)) - current[idx] = v - camera.update_settings(persist=False, **{set_field_name: tuple(current)}) - tf.set_text(str(v), emit=False) - return _commit - - r_field = TextField( - parent=modal, x=x + 16, y=y + 28, width=64, height=32, - placeholder=str(current[0]), allowed_pattern=DIGITS_SIGNED, - border_color=pygame.Color("#b3b4b6"), text_color=pygame.Color("#5a5a5a"), - ) - r_field.on_commit = make_commit(0, r_field) - r_field.set_text(str(current[0]), emit=False) - - g_field = TextField( - parent=modal, x=x + 102, y=y + 28, width=64, height=32, - placeholder=str(current[1]), allowed_pattern=DIGITS_SIGNED, - border_color=pygame.Color("#b3b4b6"), text_color=pygame.Color("#5a5a5a"), - ) - g_field.on_commit = make_commit(1, g_field) - g_field.set_text(str(current[1]), emit=False) - - b_field = TextField( - parent=modal, x=x + 188, y=y + 28, width=64, height=32, - placeholder=str(current[2]), allowed_pattern=DIGITS_SIGNED, - border_color=pygame.Color("#b3b4b6"), text_color=pygame.Color("#5a5a5a"), - ) - b_field.on_commit = make_commit(2, b_field) - b_field.set_text(str(current[2]), emit=False) - - def _sync_from_camera(): - vals = list(get_vals()) # this already reads from settings - r_field.set_text(str(vals[0]), emit=False) - g_field.set_text(str(vals[1]), emit=False) - b_field.set_text(str(vals[2]), emit=False) - - _register_syncer(modal, _sync_from_camera) - -# --------------------------------------------------------------------------- -# Individual setting sections -# --------------------------------------------------------------------------- - -def add_file_format_section(modal, camera, settings, *, y: int, x: int = 8) -> None: - Text("File Format", parent=modal, x=x, y=y + 8, style=make_settings_text_style()) - - def on_image_format_change(selected_btn): - camera.update_settings(persist=False, fformat=(None if selected_btn is None else selected_btn.value)) - - base_y = y + 28 - image_format = RadioGroup(allow_deselect=False, on_change=on_image_format_change) - RadioButton(lambda: None, x=x, y=base_y, width=48, height=32, text="png", - value="png", group=image_format, parent=modal, - colors=BASE_BUTTON_COLORS, selected_colors=SELECTED_RADIO_COLORS, text_style=RADIO_TEXT_STYLE) - RadioButton(lambda: None, x=x + 56, y=base_y, width=56, height=32, text="jpeg", - value="jpeg", group=image_format, parent=modal, - colors=BASE_BUTTON_COLORS, selected_colors=SELECTED_RADIO_COLORS, text_style=RADIO_TEXT_STYLE) - RadioButton(lambda: None, x=x + 120, y=base_y, width=56, height=32, text="jpg", - value="jpg", group=image_format, parent=modal, - colors=BASE_BUTTON_COLORS, selected_colors=SELECTED_RADIO_COLORS, text_style=RADIO_TEXT_STYLE) - RadioButton(lambda: None, x=x + 184, y=base_y, width=56, height=32, text="tiff", - value="tiff", group=image_format, parent=modal, - colors=BASE_BUTTON_COLORS, selected_colors=SELECTED_RADIO_COLORS, text_style=RADIO_TEXT_STYLE) - - # Initialize from current settings - image_format.set_value(settings.fformat) - - _register_syncer(modal, lambda: image_format.set_value(camera.settings.fformat)) - - -def add_camera_temperature_setting(modal, camera, settings, *, y: int, x: int = 8) -> None: - create_numeric_setting( - modal=modal, camera=camera, settings=settings, - title="Camera Temperature", y=y, - min_value=settings.temp_min, max_value=settings.temp_max, - attr="temp", value_type=int, x=x, - ) - - -def add_auto_exposure_section(modal, camera, settings, *, y: int, x: int = 8) -> None: - Text("Use Auto Exposure", parent=modal, x=x, y=y + 8, style=make_settings_text_style()) - - def on_auto_expo_change(selected_val): - value = True if (selected_val == "true" or getattr(selected_val, "value", None) == "true") else False - camera.update_settings(persist=False, auto_expo=value) - - base_y = y + 28 - group = RadioGroup(allow_deselect=False, on_change=on_auto_expo_change) - RadioButton(lambda: None, x=x, y=base_y, width=48, height=32, text="True", - value="true", group=group, parent=modal, - colors=BASE_BUTTON_COLORS, selected_colors=SELECTED_RADIO_COLORS, text_style=RADIO_TEXT_STYLE) - RadioButton(lambda: None, x=x + 56, y=base_y, width=56, height=32, text="False", - value="false", group=group, parent=modal, - colors=BASE_BUTTON_COLORS, selected_colors=SELECTED_RADIO_COLORS, text_style=RADIO_TEXT_STYLE) - - group.set_value("true" if settings.auto_expo else "false") - - _register_syncer( - modal, - lambda g=group: g.set_value("true" if camera.settings.auto_expo else "false") - ) - - - -# Scalar slider wrappers (one function per setting) - -def add_exposure_setting(modal, camera, settings, *, y: int, x: int = 8) -> None: - create_numeric_setting(modal=modal, camera=camera, settings=settings, - title="Exposure", y=y, attr="exposure", - min_value=settings.exposure_min, max_value=settings.exposure_max, - value_type=int, x=x) - - - -def add_tint_setting(modal, camera, settings, *, y: int, x: int = 8) -> None: - create_numeric_setting(modal=modal, camera=camera, settings=settings, - title="Tint", y=y, attr="tint", - min_value=settings.tint_min, max_value=settings.tint_max, - value_type=int, x=x) - - -def add_contrast_setting(modal, camera, settings, *, y: int, x: int = 8) -> None: - create_numeric_setting(modal=modal, camera=camera, settings=settings, - title="Contrast", y=y, attr="contrast", - min_value=settings.contrast_min, max_value=settings.contrast_max, - value_type=int, x=x) - - -def add_hue_setting(modal, camera, settings, *, y: int, x: int = 8) -> None: - create_numeric_setting(modal=modal, camera=camera, settings=settings, - title="Hue", y=y, attr="hue", - min_value=settings.hue_min, max_value=settings.hue_max, - value_type=int, x=x) - - -def add_saturation_setting(modal, camera, settings, *, y: int, x: int = 8) -> None: - create_numeric_setting(modal=modal, camera=camera, settings=settings, - title="Saturation", y=y, attr="saturation", - min_value=settings.saturation_min, max_value=settings.saturation_max, - value_type=int, x=x) - - -def add_brightness_setting(modal, camera, settings, *, y: int, x: int = 8) -> None: - create_numeric_setting(modal=modal, camera=camera, settings=settings, - title="Brightness", y=y, attr="brightness", - min_value=settings.brightness_min, max_value=settings.brightness_max, - value_type=int, x=x) - - -def add_gamma_setting(modal, camera, settings, *, y: int, x: int = 8) -> None: - create_numeric_setting(modal=modal, camera=camera, settings=settings, - title="Gamma", y=y, attr="gamma", - min_value=settings.gamma_min, max_value=settings.gamma_max, - value_type=int, x=x) - - -def add_sharpening_setting(modal, camera, settings, *, y: int, x: int = 8) -> None: - create_numeric_setting(modal=modal, camera=camera, settings=settings, - title="Sharpening", y=y, attr="sharpening", - min_value=settings.sharpening_min, max_value=settings.sharpening_max, - value_type=int, x=x) - - -def add_linear_tone_mapping_section(modal, camera, settings, *, y: int, x: int = 8) -> None: - Text("Use Linear Tone Mapping", parent=modal, x=x, y=y + 8, style=make_settings_text_style()) - - def on_linear_change(selected_val): - value = 1 if (selected_val == "true" or getattr(selected_val, "value", None) == "true") else 0 - camera.update_settings(persist=False, linear=value) - - base_y = y + 28 - group = RadioGroup(allow_deselect=False, on_change=on_linear_change) - RadioButton(lambda: None, x=x, y=base_y, width=48, height=32, text="True", - value="true", group=group, parent=modal, - colors=BASE_BUTTON_COLORS, selected_colors=SELECTED_RADIO_COLORS, text_style=RADIO_TEXT_STYLE) - RadioButton(lambda: None, x=x + 56, y=base_y, width=56, height=32, text="False", - value="false", group=group, parent=modal, - colors=BASE_BUTTON_COLORS, selected_colors=SELECTED_RADIO_COLORS, text_style=RADIO_TEXT_STYLE) - - group.set_value("true" if settings.linear == 1 else "false") - - _register_syncer( - modal, - lambda g=group: g.set_value("true" if camera.settings.linear == 1 else "false") - ) - - - -def add_curved_tone_mapping_section(modal, camera, settings, *, y: int, x: int = 8) -> None: - Text("Curved Tone Mapping", parent=modal, x=x, y=y + 8, style=make_settings_text_style()) - - def on_curved_change(selected_btn): - camera.update_settings(persist=False, curve=(None if selected_btn is None else selected_btn.value)) - - base_y = y + 28 - group = RadioGroup(allow_deselect=False, on_change=on_curved_change) - RadioButton(lambda: None, x=x, y=base_y, width=104, height=32, text="Logarithmic", - value="Logarithmic", group=group, parent=modal, - colors=BASE_BUTTON_COLORS, selected_colors=SELECTED_RADIO_COLORS, text_style=RADIO_TEXT_STYLE) - RadioButton(lambda: None, x=x + 112, y=base_y, width=104, height=32, text="Polynomial", - value="Polynomial", group=group, parent=modal, - colors=BASE_BUTTON_COLORS, selected_colors=SELECTED_RADIO_COLORS, text_style=RADIO_TEXT_STYLE) - RadioButton(lambda: None, x=x + 224, y=base_y, width=48, height=32, text="Off", - value="Off", group=group, parent=modal, - colors=BASE_BUTTON_COLORS, selected_colors=SELECTED_RADIO_COLORS, text_style=RADIO_TEXT_STYLE) - - group.set_value(settings.curve) - - _register_syncer( - modal, - lambda g=group: g.set_value(camera.settings.curve) - ) - - - - - -def add_level_range_low_setting(modal, camera, settings, *, y: int, x: int = 8) -> None: - lr_min = settings.levelrange_min - lr_max = settings.levelrange_max - lr_bounds = [(lr_min, lr_max), (lr_min, lr_max), (lr_min, lr_max)] - - def get_level_low_rgb(): - lr = settings.levelrange_low - return (lr[0], lr[1], lr[2]) - - create_rgb_triplet_setting( - modal=modal, camera=camera, title="Level Range Low", y=y, - get_vals=get_level_low_rgb, set_field_name="levelrange_low", - per_channel_bounds=lr_bounds, x=x, - ) - - -def add_level_range_high_setting(modal, camera, settings, *, y: int, x: int = 8) -> None: - lr_min = settings.levelrange_min - lr_max = settings.levelrange_max - lr_bounds = [(lr_min, lr_max), (lr_min, lr_max), (lr_min, lr_max)] - - def get_level_high_rgb(): - lr = settings.levelrange_high - return (lr[0], lr[1], lr[2]) - - create_rgb_triplet_setting( - modal=modal, camera=camera, title="Level Range High", y=y, - get_vals=get_level_high_rgb, set_field_name="levelrange_high", - per_channel_bounds=lr_bounds, x=x, - ) - - -def add_white_balance_gain_setting(modal, camera, settings, *, y: int, x: int = 8) -> None: - wb_min = settings.wbgain_min - wb_max = settings.wbgain_max - wb_bounds = [(wb_min, wb_max), (wb_min, wb_max), (wb_min, wb_max)] - - def get_wbgain(): - return settings.wbgain - - create_rgb_triplet_setting( - modal=modal, camera=camera, title="White Balance Gain", y=y, - get_vals=get_wbgain, set_field_name="wbgain", - per_channel_bounds=wb_bounds, x=x, - ) - - -def add_overlay_controls_section(modal, overlay, *, y: int, x: int = 8) -> None: - """Add toggles for crosshair and DPI display visibility.""" - if overlay is None: - return - - Text("Camera Overlay Controls", parent=modal, x=x, y=y + 8, style=make_settings_text_style()) - - btn_w, btn_h = 135, 28 - spacing = 12 - base_y = y + 28 - - # Crosshair toggle - def toggle_crosshair(): - overlay.toggle_crosshair() - crosshair_btn.set_text( - "Crosshair: ON" if overlay.crosshair_visible else "Crosshair: OFF" - ) - - crosshair_btn = Button( - toggle_crosshair, - x=x, y=base_y, width=btn_w, height=btn_h, - text=f"Crosshair: {'ON' if overlay.crosshair_visible else 'OFF'}", - parent=modal, - colors=BASE_BUTTON_COLORS, - text_style=RADIO_TEXT_STYLE, - ) - - # DPI display toggle - def toggle_dpi(): - overlay.toggle_dpi_display() - dpi_btn.set_text( - "DPI Display: ON" if overlay.dpi_display_visible else "DPI Display: OFF" - ) - - dpi_btn = Button( - toggle_dpi, - x=x + (btn_w + spacing), y=base_y, width=btn_w, height=btn_h, - text=f"DPI Display: {'ON' if overlay.dpi_display_visible else 'OFF'}", - parent=modal, - colors=BASE_BUTTON_COLORS, - text_style=RADIO_TEXT_STYLE, - ) - - -def add_save_load_reset_section(modal, camera, *, y: int, x: int = 8) -> None: - btn_w, btn_h = 88, 28 - spacing = 12 - y += 8 - - # Save - Button( - lambda: camera.save_settings(), - x=x, y=y, width=btn_w, height=btn_h, - text="Save", parent=modal, - colors=BASE_BUTTON_COLORS, text_style=RADIO_TEXT_STYLE, - ) - - # Load - def on_load(): - root = tk.Tk(); root.withdraw() - cfg_dir = camera.get_config_dir() - filepath = filedialog.askopenfilename( - initialdir=str(cfg_dir), - title="Select Camera Config File", - filetypes=[("YAML files", "*.yaml"), ("All files", "*.*")], - ) - root.destroy() - if not filepath: - return - try: - loaded = CameraSettingsManager.load_from_file(filepath) - camera.set_settings(loaded, persist=False) # applies immediately - sync_modal_from_camera(modal, camera) # <-- refresh widgets in-place - except Exception as e: - print(f"[Load Settings] Failed to load/apply '{filepath}': {e}") - - Button( - on_load, - x=x + (btn_w + spacing), y=y, width=btn_w, height=btn_h, - text="Load", parent=modal, - colors=BASE_BUTTON_COLORS, text_style=RADIO_TEXT_STYLE, - ) - - # Reset - def on_reset(): - try: - camera.restore_default_settings(persist=True) # applies + saves active - sync_modal_from_camera(modal, camera) # <-- refresh widgets in-place - except Exception as e: - print(f"[Reset Settings] Failed to restore defaults: {e}") - - Button( - on_reset, - x=x + 2*(btn_w + spacing), y=y, width=btn_w, height=btn_h, - text="Reset", parent=modal, - colors=BASE_BUTTON_COLORS, text_style=RADIO_TEXT_STYLE, - ) - -# --------------------------------------------------------------------------- -# Orchestrator -# --------------------------------------------------------------------------- - -def build_camera_settings_modal(modal, camera, overlay=None) -> None: - """Populate the provided Modal with camera setting controls. - This applies values live via camera.update_settings(persist=False). - - Args: - modal: The Modal instance to populate with controls - camera: Camera instance with settings - overlay: Optional InteractiveCameraOverlay instance for overlay controls - - Layout is organized into vertically stacked sections. Each section is - built by a dedicated function to keep this module modular and easy to - extend or rearrange. - """ - _ensure_sync_registry(modal) - modal._settings_syncers.clear() - settings = camera.settings - - scroll_area = ScrollFrame(parent=modal, x=0, y= 0, width=modal.width, height=580) - layout = _Layout(offset=60) - - - # File format - add_file_format_section(scroll_area, camera, settings, y=layout.next_y()) - - # Camera temperature - add_camera_temperature_setting(scroll_area, camera, settings, y=layout.next_y()) - - # Auto exposure toggle - add_auto_exposure_section(scroll_area, camera, settings, y=layout.next_y()) - - # Core scalar sliders - add_exposure_setting(scroll_area, camera, settings, y=layout.next_y()) - add_tint_setting(scroll_area, camera, settings, y=layout.next_y()) - add_contrast_setting(scroll_area, camera, settings, y=layout.next_y()) - add_hue_setting(scroll_area, camera, settings, y=layout.next_y()) - add_saturation_setting(scroll_area, camera, settings, y=layout.next_y()) - add_brightness_setting(scroll_area, camera, settings, y=layout.next_y()) - add_gamma_setting(scroll_area, camera, settings, y=layout.next_y()) - add_sharpening_setting(scroll_area, camera, settings, y=layout.next_y()) - - # Tone mapping toggles - add_linear_tone_mapping_section(scroll_area, camera, settings, y=layout.next_y()) - add_curved_tone_mapping_section(scroll_area, camera, settings, y=layout.next_y()) - - # Level ranges + WB - add_level_range_low_setting(scroll_area, camera, settings, y=layout.next_y()) - add_level_range_high_setting(scroll_area, camera, settings, y=layout.next_y()) - add_white_balance_gain_setting(scroll_area, camera, settings, y=layout.next_y()) - - # Overlay controls (crosshair and DPI display) - if overlay is not None: - add_overlay_controls_section(scroll_area, overlay, y=layout.next_y()) - - add_save_load_reset_section( - modal, camera, - y=modal.height-80 - ) \ No newline at end of file diff --git a/UI/modals/sample_settings_modal.py b/UI/modals/sample_settings_modal.py deleted file mode 100644 index af0a954..0000000 --- a/UI/modals/sample_settings_modal.py +++ /dev/null @@ -1,419 +0,0 @@ -""" -Sample Settings Modal - Manual configuration of sample positions for the printer. -""" - -import pygame -from typing import List, Tuple, Callable - -from UI.frame import Frame -from UI.modal import Modal -from UI.text import Text, TextStyle -from UI.input.button import Button, ButtonColors -from UI.input.text_field import TextField -from UI.input.scroll_frame import ScrollFrame -from UI.list_frame import ListFrame -from UI.styles import make_button_text_style, make_display_text_style - -from printer.automated_controller import AutomatedPrinter - - -def build_sample_settings_modal(modal: Modal, controller: AutomatedPrinter) -> None: - """ - Build the sample settings modal UI for configuring sample positions. - - Args: - modal: The Modal frame to populate - controller: The printer controller instance - """ - - # Container for all settings - content = modal.body - - y_offset = 10 - - # ========== Camera Height Section ========== - Text( - text="Camera Height (mm):", - parent=content, - x=10, y=y_offset, - style=make_display_text_style(16) - ) - - camera_height_field = TextField( - parent=content, - x=10, y=y_offset + 25, - width=200, height=30, - placeholder="0.00", - border_color=pygame.Color("#b3b4b6"), - text_color=pygame.Color("#5a5a5a") - ) - - def set_camera_height_from_position(): - """Set camera height field to current Z position""" - z_mm = controller.position.z / 100.0 - camera_height_field.set_text(f"{z_mm:.2f}") - - Button( - set_camera_height_from_position, - x=220, y=y_offset + 25, - width=175, height=30, - text="Set from Current", - parent=content, - text_style=make_button_text_style() - ) - - # Note about camera height - Text( - text="Must be above and out of focus of all samples", - parent=content, - x=10, y=y_offset + 60, - style=TextStyle( - font_size=12, - color=pygame.Color("#7a7a7a"), - font_name="assets/fonts/SofiaSans-Regular.ttf" - ) - ) - - y_offset += 90 - - # ========== Y Start Offset Section ========== - Text( - text="Y Start Offset (mm):", - parent=content, - x=10, y=y_offset, - style=make_display_text_style(16) - ) - - y_start_field = TextField( - parent=content, - x=10, y=y_offset + 25, - width=200, height=30, - placeholder="0.00", - border_color=pygame.Color("#b3b4b6"), - text_color=pygame.Color("#5a5a5a") - ) - - def set_y_start_from_position(): - """Set Y start field to current Y position""" - y_mm = controller.position.y / 100.0 - y_start_field.set_text(f"{y_mm:.2f}") - - Button( - set_y_start_from_position, - x=220, y=y_offset + 25, - width=175, height=30, - text="Set from Current", - parent=content, - text_style=make_button_text_style() - ) - - y_offset += 70 - - # ========== Calibration Y Position Section ========== - Text( - text="Calibration Y Position (mm):", - parent=content, - x=10, y=y_offset, - style=make_display_text_style(16) - ) - - calibration_y_field = TextField( - parent=content, - x=10, y=y_offset + 25, - width=200, height=30, - placeholder="220.00", - border_color=pygame.Color("#b3b4b6"), - text_color=pygame.Color("#5a5a5a") - ) - - def set_calibration_y_from_position(): - """Set calibration Y field to current Y position""" - y_mm = controller.position.y / 100.0 - calibration_y_field.set_text(f"{y_mm:.2f}") - - Button( - set_calibration_y_from_position, - x=220, y=y_offset + 25, - width=175, height=30, - text="Set from Current", - parent=content, - text_style=make_button_text_style() - ) - - y_offset += 70 - - # ========== Calibration Z Position Section ========== - Text( - text="Calibration Z Position (mm):", - parent=content, - x=10, y=y_offset, - style=make_display_text_style(16) - ) - - calibration_z_field = TextField( - parent=content, - x=10, y=y_offset + 25, - width=200, height=30, - placeholder="26.00", - border_color=pygame.Color("#b3b4b6"), - text_color=pygame.Color("#5a5a5a") - ) - - def set_calibration_z_from_position(): - """Set calibration Z field to current Z position""" - z_mm = controller.position.z / 100.0 - calibration_z_field.set_text(f"{z_mm:.2f}") - - Button( - set_calibration_z_from_position, - x=220, y=y_offset + 25, - width=175, height=30, - text="Set from Current", - parent=content, - text_style=make_button_text_style() - ) - - y_offset += 70 - - # ========== Sample X Offsets Section ========== - Text( - text="Sample X Offsets:", - parent=content, - x=10, y=y_offset, - style=make_display_text_style(16) - ) - - y_offset += 30 - - # Scroll frame for sample list - scroll_height = 340 - scroll_area = ScrollFrame( - parent=content, - x=10, y=y_offset, - width=445, height=scroll_height, - background_color=pygame.Color("#f5f5f5"), - scrollbar_width=12 - ) - - # Store references to sample fields (index, field) tuples - sample_fields: List[Tuple[int, TextField]] = [] - - def build_sample_row(i: int, parent: Frame) -> None: - """Build a row for each sample's X offset""" - sample_num = i + 1 # Display number (1-based) - sample_index = i + 1 # Config key (also 1-based) - - # Sample label - Text( - text=f"Sample {sample_num}:", - parent=parent, - x=5, y=15, - style=make_display_text_style(14) - ) - - # Go To button (C) - def go_to_sample(): - """Move to this sample's X position using calibration Y and Z""" - try: - x_mm = float(x_field.text or "0.0") - cal_y_mm = float(calibration_y_field.text or "220.0") - cal_z_mm = float(calibration_z_field.text or "26.0") - - print(f"Moving to Sample {sample_num} calibration position: X={x_mm:.2f}, Y={cal_y_mm:.2f}, Z={cal_z_mm:.2f}") - - # Convert to ticks (0.01 mm units) - x_ticks = int(x_mm * 100) - y_ticks = int(cal_y_mm * 100) - z_ticks = int(cal_z_mm * 100) - - from printer.models import Position - target_pos = Position(x=x_ticks, y=y_ticks, z=z_ticks) - controller.move_to_position(target_pos) - - except ValueError as e: - print(f"Error parsing position values: {e}") - - Button( - go_to_sample, - x=100, y=5, - width=30, height=30, - text="C", - parent=parent, - text_style=TextStyle(font_size=14, color=pygame.Color("#5a5a5a")) - ) - - # X offset field - x_field = TextField( - parent=parent, - x=135, y=5, - width=115, height=30, - placeholder="0.00", - border_color=pygame.Color("#b3b4b6"), - text_color=pygame.Color("#5a5a5a") - ) - sample_fields.append((sample_index, x_field)) # Store with config index - - # Set from current button (only sets X) - def set_x_from_current(): - x_mm = controller.position.x / 100.0 - x_field.set_text(f"{x_mm:.2f}") - print(f"Sample {sample_num}: Set X offset to {x_mm:.2f} mm from current position") - - Button( - set_x_from_current, - x=260, y=5, - width=160, height=30, - text="Set X from Current", - parent=parent, - text_style=TextStyle(font_size=14, color=pygame.Color("#5a5a5a")) - ) - - # Create list of sample rows - num_samples = controller.get_num_slots() - sample_list = ListFrame( - parent=scroll_area, - x=0, y=0, - width=1.0, height=num_samples * 35, - width_is_percent=True, - row_height=35, - count=num_samples, - row_builder=build_sample_row - ) - - y_offset += scroll_height + 10 - - # ========== Load current values ========== - def load_values_from_config(): - """Load current values from printer config""" - try: - # Load calibration positions from proper fields - calibration_y = getattr(controller.config, 'calibration_y', 220.0) - calibration_z = getattr(controller.config, 'calibration_z', 26.0) - - calibration_y_field.set_text(f"{calibration_y:.2f}") - calibration_z_field.set_text(f"{calibration_z:.2f}") - - # Check if sample_positions exists and is a dict - if not hasattr(controller.config, 'sample_positions'): - print("No sample_positions found in config") - return - - if not isinstance(controller.config.sample_positions, dict): - print(f"sample_positions is not a dict: {type(controller.config.sample_positions)}") - return - - if len(controller.config.sample_positions) == 0: - print("sample_positions dict is empty") - return - - # Load camera height (from first sample's Z, assuming all share this) - # sample_positions is a dict with 1-based integer keys (1, 2, 3, ...) - if 1 in controller.config.sample_positions: - first_sample = controller.config.sample_positions[1] - if isinstance(first_sample, dict): - camera_z = first_sample.get("z", 0.0) - camera_height_field.set_text(f"{camera_z:.2f}") - - # Load Y start (from first sample's Y) - y_start = first_sample.get("y", 0.0) - y_start_field.set_text(f"{y_start:.2f}") - else: - print(f"First sample is not a dict: {type(first_sample)}") - return - - # Load each sample's X offset - for sample_index, field in sample_fields: - if sample_index in controller.config.sample_positions: - sample_pos = controller.config.sample_positions[sample_index] - if isinstance(sample_pos, dict): - x_offset = sample_pos.get("x", 0.0) - field.set_text(f"{x_offset:.2f}") - else: - print(f"Sample {sample_index} is not a dict: {type(sample_pos)}") - except Exception as e: - import traceback - print(f"Error loading sample settings: {e}") - print(traceback.format_exc()) - - # ========== Bottom Buttons ========== - button_y = y_offset + 5 - - def save_settings(): - """Save settings to printer config""" - try: - # Parse camera height, Y start, and calibration positions - camera_z = float(camera_height_field.text or "0.0") - y_start = float(y_start_field.text or "0.0") - calibration_y = float(calibration_y_field.text or "220.0") - calibration_z = float(calibration_z_field.text or "26.0") - - # Save calibration positions to proper fields - controller.config.calibration_y = calibration_y - controller.config.calibration_z = calibration_z - - # Create a fresh sample_positions dict to prevent accumulation of extra entries - new_sample_positions = {} - - # Update each sample position from the UI fields - for sample_index, field in sample_fields: - x_offset = float(field.text or "0.0") - - # Create the sample position entry - new_sample_positions[sample_index] = { - "x": x_offset, - "y": y_start, - "z": camera_z - } - - # Replace the entire sample_positions dict with our clean version - controller.config.sample_positions = new_sample_positions - - # Save the config - from printer.printerConfig import PrinterSettingsManager - PrinterSettingsManager.save(controller.CONFIG_SUBDIR, controller.config) - - print(f"Sample settings saved successfully ({len(new_sample_positions)} sample positions)") - modal.close() - - except ValueError as e: - print(f"Error parsing values: {e}") - except Exception as e: - import traceback - print(f"Error saving sample settings: {e}") - print(traceback.format_exc()) - - def reset_settings(): - """Reload settings from printer config""" - try: - # Reload the config from disk - from printer.printerConfig import PrinterSettingsManager - controller.config = PrinterSettingsManager.load(controller.CONFIG_SUBDIR) - - # Update UI fields - load_values_from_config() - - print("Sample settings reloaded from config") - - except Exception as e: - print(f"Error reloading sample settings: {e}") - - Button( - save_settings, - x=10, y=button_y, - width=150, height=40, - text="Save", - parent=content, - text_style=make_button_text_style() - ) - - Button( - reset_settings, - x=170, y=button_y, - width=150, height=40, - text="Reset", - parent=content, - text_style=make_button_text_style() - ) - - # Load initial values when modal is built - load_values_from_config() \ No newline at end of file diff --git a/UI/overlays/interactive_camera_overlay.py b/UI/overlays/interactive_camera_overlay.py deleted file mode 100644 index c2b91d1..0000000 --- a/UI/overlays/interactive_camera_overlay.py +++ /dev/null @@ -1,394 +0,0 @@ -""" -Interactive camera overlay UI component. - -This overlay provides a crosshair for visual reference and handles user -interactions (click-to-move, wheel-to-zoom) by delegating to the controller's -calibration and movement system. -""" - -import pygame -from typing import Optional - -from UI.frame import Frame -from UI.camera_view import CameraView -from UI.text import Text, TextStyle -from UI.styles import CROSSHAIR_COLOR - -class InteractiveCameraOverlay(Frame): - """ - UI overlay that renders a crosshair in the center of the camera view. - Supports click-to-move and mousewheel Z-axis control. - Delegates calibration and movement logic to the controller. - """ - def __init__( - self, - camera_view: CameraView, - controller, # AutomatedPrinter instance with CameraCalibrationMixin - visible: bool = True, - - # Crosshair visual properties - crosshair_color: Optional[pygame.Color] = None, - crosshair_length: int = 20, - crosshair_thickness: int = 2, - crosshair_gap: int = 5, - ): - super().__init__( - parent=camera_view, - x=0, y=0, - width=1, height=1, - x_is_percent=True, y_is_percent=True, - width_is_percent=True, height_is_percent=True, - z_index=camera_view.z_index + 1, - background_color=None - ) - - self.camera_view = camera_view - self.controller = controller - self.visible = visible - - # Separate visibility controls - self.crosshair_visible = True - self.dpi_display_visible = True - - # Enable click handling - self.mouse_passthrough = False - - # Crosshair properties - self.crosshair_color = crosshair_color or CROSSHAIR_COLOR - self.crosshair_length = crosshair_length - self.crosshair_thickness = crosshair_thickness - self.crosshair_gap = crosshair_gap - - # Cache overlay surface - self._overlay = None - self._overlay_size = None - - # DPI display text (top-right corner with 10px margin) - # Use 100% width minus margin, left aligned - self.dpi_text = Text( - text="", - parent=self, - x=0.99, y=10, # 98% across (leaves ~2% margin) - x_is_percent=True, - y_is_percent=False, - x_align="right", # Right-align the text at that position - y_align="top", - style=TextStyle( - color=pygame.Color(255, 255, 255), - font_size=16 - ) - ) - self.dpi_text.mouse_passthrough = True - self._update_dpi_text() - - # ==================== Visibility Control ==================== - - def toggle_overlay(self) -> None: - """Toggle visibility of the crosshair overlay.""" - self.visible = not self.visible - - def set_visible(self, value: bool) -> None: - """Set visibility of the crosshair overlay.""" - self.visible = bool(value) - - def toggle_crosshair(self) -> None: - """Toggle visibility of the crosshair only.""" - self.crosshair_visible = not self.crosshair_visible - - def set_crosshair_visible(self, value: bool) -> None: - """Set visibility of the crosshair.""" - self.crosshair_visible = bool(value) - - def show_crosshair(self) -> None: - """Show the crosshair.""" - self.crosshair_visible = True - - def hide_crosshair(self) -> None: - """Hide the crosshair.""" - self.crosshair_visible = False - - def toggle_dpi_display(self) -> None: - """Toggle visibility of the DPI display text.""" - self.dpi_display_visible = not self.dpi_display_visible - - def set_dpi_display_visible(self, value: bool) -> None: - """Set visibility of the DPI display text.""" - self.dpi_display_visible = bool(value) - - def show_dpi_display(self) -> None: - """Show the DPI display text.""" - self.dpi_display_visible = True - - def hide_dpi_display(self) -> None: - """Hide the DPI display text.""" - self.dpi_display_visible = False - - def set_crosshair_color(self, color: pygame.Color) -> None: - """Update the crosshair color.""" - self.crosshair_color = color - - def set_crosshair_properties( - self, - length: Optional[int] = None, - thickness: Optional[int] = None, - gap: Optional[int] = None - ) -> None: - """Update crosshair geometry properties.""" - if length is not None: - self.crosshair_length = length - if thickness is not None: - self.crosshair_thickness = thickness - if gap is not None: - self.crosshair_gap = gap - - # ==================== Calibration Helpers ==================== - - def _update_dpi_text(self) -> None: - """Update the DPI display text based on current calibration status.""" - # Check calibration directly without calling is_calibrated() to avoid recursion - if not hasattr(self.controller, 'M_inv') or self.controller.M_inv is None: - self.dpi_text.set_text("") - return - - # Get DPI directly from controller - dpi = getattr(self.controller, '_cal_dpi', None) - if dpi is not None: - self.dpi_text.set_text(f"Estimated DPI: {dpi:.1f}") - else: - self.dpi_text.set_text("") - - def run_calibration(self) -> None: - """ - Trigger camera calibration through the controller. - This will move the printer and determine the pixel-to-world mapping. - """ - if not hasattr(self.controller, 'start_camera_calibration'): - self.controller.status( - "Controller does not support camera calibration", - True - ) - return - - self.controller.start_camera_calibration() - # Update DPI display after calibration - self._update_dpi_text() - - def is_calibrated(self) -> bool: - """Check if camera calibration is available.""" - if not hasattr(self.controller, 'M_inv'): - return False - return self.controller.M_inv is not None - - # ==================== Mouse Event Handlers ==================== - - def on_click(self, button=None) -> None: - """Handle click events to move printer to clicked position.""" - if not self.camera_view.camera.initialized: - return - - if not self.is_calibrated(): - self.controller.status( - "Cannot move: run calibration first (call run_calibration())", - True - ) - return - - # Get mouse position - mouse_x, mouse_y = pygame.mouse.get_pos() - - # Get the camera frame rectangle - fr = self.camera_view.get_frame_rect() - if not fr: - return - - fx, fy, fw, fh = fr - - # Check if click is within camera frame - if not (fx <= mouse_x <= fx + fw and fy <= mouse_y <= fy + fh): - return - - # Convert display coordinates to image coordinates - # The camera_view may scale/letterbox the image, so we need to account for that - rel_x = mouse_x - fx - rel_y = mouse_y - fy - - # Get calibration image dimensions from controller - cal_status = self.controller.get_calibration_status() - img_w = cal_status.get('image_width') - img_h = cal_status.get('image_height') - - if img_w is None or img_h is None: - self.controller.status("Calibration image dimensions not available", True) - return - - # Calculate scaling factor (camera_view letterboxes to fit) - # The displayed image maintains aspect ratio within the frame - img_aspect = img_w / img_h if img_h > 0 else 1.0 - frame_aspect = fw / fh if fh > 0 else 1.0 - - if img_aspect > frame_aspect: - # Image is wider - letterbox top/bottom - display_w = fw - display_h = fw / img_aspect - offset_x = 0 - offset_y = (fh - display_h) / 2 - else: - # Image is taller - letterbox left/right - display_w = fh * img_aspect - display_h = fh - offset_x = (fw - display_w) / 2 - offset_y = 0 - - # Adjust for letterboxing - adj_x = rel_x - offset_x - adj_y = rel_y - offset_y - - # Check if click is in the actual image area - if not (0 <= adj_x <= display_w and 0 <= adj_y <= display_h): - self.controller.status("Click outside image area", False) - return - - # Scale to image coordinates - pixel_x = (adj_x / display_w) * img_w - pixel_y = (adj_y / display_h) * img_h - - # Use controller's vision movement (relative to current position) - self.controller.move_to_vision_point(pixel_x, pixel_y, relative=True) - - def on_wheel(self, dx: int, dy: int, px: int, py: int) -> bool: - """ - Handle mousewheel events to adjust Z-axis position when hovering over camera view. - - Args: - dx: Horizontal wheel movement (unused) - dy: Vertical wheel movement (positive = wheel up = Z up) - px: Mouse X position - py: Mouse Y position - - Returns: - True if the event was handled, False otherwise - """ - if not self.camera_view.camera.initialized: - return False - - # Check if mouse is over the camera frame - fr = self.camera_view.get_frame_rect() - if not fr: - return False - - fx, fy, fw, fh = fr - if not (fx <= px <= fx + fw and fy <= py <= fy + fh): - return False - - # Get current position - current_pos = self.controller.get_position() - - # Use minimum step size: 4 ticks = 0.04mm (printer's minimum step) - MIN_STEP_TICKS = 4 - - # Calculate Z change (positive dy = wheel up = move Z up) - dz_ticks = dy * MIN_STEP_TICKS - - # Calculate new Z position - new_z_ticks = current_pos.z + int(round(dz_ticks)) - new_z_mm = new_z_ticks / 100.0 - - # Bounds check - max_z = self.controller.get_max_z() - if not (0 <= new_z_mm <= max_z): - self.controller.status( - f"Z position out of bounds: {new_z_mm:.2f}mm (max: {max_z}mm)", - False - ) - return True - - # Send move command - self.controller.enqueue_printer( - f"G0 Z{new_z_mm:.2f}", - message=f"Z: {new_z_mm:.2f}mm", - log=False - ) - - return True - - # ==================== Drawing ==================== - - def _get_overlay(self, surface_size: tuple[int, int]) -> pygame.Surface: - """Return an RGBA overlay the size of the target surface (recreate on resize).""" - if self._overlay is None or self._overlay_size != surface_size: - self._overlay_size = surface_size - self._overlay = pygame.Surface(surface_size, flags=pygame.SRCALPHA) - else: - self._overlay.fill((0, 0, 0, 0)) - return self._overlay - - def draw(self, surface: pygame.Surface) -> None: - """Draw the crosshair overlay if visible and camera is initialized.""" - if not self.visible: - return - - if not self.camera_view.camera.initialized: - return - - fr = self.camera_view.get_frame_rect() - if not fr: - return - - fx, fy, fw, fh = fr - - # Update DPI display - if self.dpi_display_visible: - self._update_dpi_text() - else: - self.dpi_text.set_text("") - - # Only draw crosshair if crosshair_visible is True - if self.crosshair_visible: - # Build/resize overlay and clear it - overlay = self._get_overlay(surface.get_size()) - overlay.fill((0, 0, 0, 0)) - - # Calculate center point of the camera frame - center_x = fx + fw // 2 - center_y = fy + fh // 2 - - # Draw crosshair lines with gap in the middle - - # Horizontal line (left and right segments) - pygame.draw.line( - overlay, - self.crosshair_color, - (center_x - self.crosshair_gap - self.crosshair_length, center_y), - (center_x - self.crosshair_gap, center_y), - self.crosshair_thickness - ) - pygame.draw.line( - overlay, - self.crosshair_color, - (center_x + self.crosshair_gap, center_y), - (center_x + self.crosshair_gap + self.crosshair_length, center_y), - self.crosshair_thickness - ) - - # Vertical line (top and bottom segments) - pygame.draw.line( - overlay, - self.crosshair_color, - (center_x, center_y - self.crosshair_gap - self.crosshair_length), - (center_x, center_y - self.crosshair_gap), - self.crosshair_thickness - ) - pygame.draw.line( - overlay, - self.crosshair_color, - (center_x, center_y + self.crosshair_gap), - (center_x, center_y + self.crosshair_gap + self.crosshair_length), - self.crosshair_thickness - ) - - # Composite overlay onto the screen surface - surface.blit(overlay, (0, 0)) - - # Draw children (including DPI text if visible) - for child in reversed(self.children): - child.draw(surface) \ No newline at end of file diff --git a/UI/overlays/red_detection_mark_overlay.py b/UI/overlays/red_detection_mark_overlay.py deleted file mode 100644 index e6cd076..0000000 --- a/UI/overlays/red_detection_mark_overlay.py +++ /dev/null @@ -1,493 +0,0 @@ -""" -Red mark detection overlay for camera view. - -This overlay detects red registration marks in real-time from the camera feed -and displays the detection results with center line and distance from center. -""" - -import pygame -import numpy as np -import cv2 -from typing import Optional, List, Tuple - -from UI.frame import Frame -from UI.camera_view import CameraView -from UI.text import Text, TextStyle - - -class RedMarkDetectionOverlay(Frame): - """ - Real-time red mark detection overlay that shows detected registration marks - and displays distance from image center. - """ - def __init__( - self, - camera_view: CameraView, - visible: bool = False, - - # Detection parameters - min_area: int = 50, - max_area: int = 10000, - max_aspect_ratio: float = 3.0, - red_threshold_percentile: int = 75, - - # Stabilization / smoothing parameters - smoothing_alpha: float = 0.25, - deadband_px: float = 2.0, - max_step_px_per_frame: float = 30.0, - - # Orientation switching: if marks are clustered on one side, draw a horizontal line - side_cluster_fraction: float = 0.85, - side_cluster_margin: float = 0.18, - - # Visual parameters - valid_mark_color: pygame.Color = pygame.Color(0, 255, 0), # Green - filtered_mark_color: pygame.Color = pygame.Color(255, 0, 0), # Red - center_line_color: pygame.Color = pygame.Color(255, 255, 0), # Yellow - text_color: pygame.Color = pygame.Color(255, 255, 255), # White - text_bg_color: pygame.Color = pygame.Color(0, 0, 0, 180), # Semi-transparent black - ): - super().__init__( - parent=camera_view, - x=0, y=0, - width=1, height=1, - x_is_percent=True, y_is_percent=True, - width_is_percent=True, height_is_percent=True, - z_index=camera_view.z_index + 2, - background_color=None - ) - - self.camera_view = camera_view - self.visible = visible - self.mouse_passthrough = True - - # Detection parameters - self.min_area = min_area - self.max_area = max_area - self.max_aspect_ratio = max_aspect_ratio - self.red_threshold_percentile = red_threshold_percentile - - # Stabilization parameters - self.smoothing_alpha = float(smoothing_alpha) - self.deadband_px = float(deadband_px) - self.max_step_px_per_frame = float(max_step_px_per_frame) - - # Orientation switching parameters - self.side_cluster_fraction = float(side_cluster_fraction) - self.side_cluster_margin = float(side_cluster_margin) - - # Visual parameters - self.valid_mark_color = valid_mark_color - self.filtered_mark_color = filtered_mark_color - self.center_line_color = center_line_color - self.text_color = text_color - self.text_bg_color = text_bg_color - - # Detection results (updated each frame) - self.valid_centers: List[Tuple[float, float]] = [] - self.filtered_centers: List[Tuple[float, float]] = [] - # Raw per-frame measurements - self.mean_x_raw: Optional[float] = None - self.mean_y_raw: Optional[float] = None - - # Stabilized (displayed) measurements - self.mean_x: Optional[float] = None - self.mean_y: Optional[float] = None - self.distance_from_center: Optional[float] = None - self.image_width: Optional[int] = None - self.image_height: Optional[int] = None - - # Jump threshold: if raw value is this many pixels away, jump instantly - self.jump_threshold_px: float = 50.0 - - # Cached overlay surface - self._overlay = None - self._overlay_size = None - - # Filter state - self._smoothed_mean_x: Optional[float] = None - self._smoothed_mean_y: Optional[float] = None - - # Line orientation: 'vertical' (default) or 'horizontal' - self._line_orientation: str = 'vertical' - - # Hysteresis counter to prevent rapid flipping - self._cluster_frames: int = 0 - - # ==================== Public API ==================== - - def toggle(self) -> None: - """Toggle visibility of the detection overlay.""" - self.visible = not self.visible - - def set_visible(self, value: bool) -> None: - """Set visibility of the detection overlay.""" - self.visible = bool(value) - - def is_visible(self) -> bool: - """Check if overlay is currently visible.""" - return self.visible - - def set_jump_threshold(self, threshold_px: float) -> None: - """ - Set the jump threshold in pixels. - When the raw measurement differs from the smoothed value by more than this - threshold, the smoothed value will jump directly to the raw value instead - of gradually moving towards it. - - Args: - threshold_px: Jump threshold in pixels (default: 50.0) - """ - self.jump_threshold_px = float(threshold_px) - - # ==================== Detection ==================== - - def _detect_red_marks(self, img_rgb: np.ndarray) -> Tuple[List[Tuple[float, float]], List[Tuple[float, float]]]: - """ - Detect red registration marks in an RGB image. - - Args: - img_rgb: RGB image as numpy array (H, W, 3) - - Returns: - Tuple of (valid_centers, filtered_centers) - """ - # Extract channels - r = img_rgb[:, :, 0].astype(float) - g = img_rgb[:, :, 1].astype(float) - b = img_rgb[:, :, 2].astype(float) - - # Enhanced red isolation: R - max(G, B) to handle reflections better - red_isolated = r - np.maximum(g, b) - red_isolated = np.clip(red_isolated, 0, 255) - - # Normalize to 0-255 range - if red_isolated.max() > 0: - red_isolated = (red_isolated / red_isolated.max() * 255).astype(np.uint8) - else: - red_isolated = red_isolated.astype(np.uint8) - - # Apply morphological operations to clean up noise - kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3)) - red_isolated = cv2.morphologyEx(red_isolated, cv2.MORPH_CLOSE, kernel) - - # Adaptive thresholding to handle varying lighting - threshold = ( - np.percentile(red_isolated[red_isolated > 0], self.red_threshold_percentile) - if np.any(red_isolated > 0) - else 50 - ) - binary = (red_isolated > threshold).astype(np.uint8) * 255 - - # Find connected components - num_labels, labels, stats, centroids = cv2.connectedComponentsWithStats(binary, connectivity=8) - - # Get image dimensions - img_height = img_rgb.shape[0] - lower_half_y = img_height / 2 - - # Filter components - restrict to lower half of image - centers = [] - filtered_centers = [] - - for i in range(1, num_labels): # Skip background (label 0) - area = stats[i, cv2.CC_STAT_AREA] - if self.min_area < area < self.max_area: - x, y, w, h = ( - stats[i, cv2.CC_STAT_LEFT], - stats[i, cv2.CC_STAT_TOP], - stats[i, cv2.CC_STAT_WIDTH], - stats[i, cv2.CC_STAT_HEIGHT] - ) - aspect_ratio = max(w, h) / min(w, h) if min(w, h) > 0 else float('inf') - - if aspect_ratio < self.max_aspect_ratio: - center = (float(centroids[i][0]), float(centroids[i][1])) - # Check if in lower half - if center[1] >= lower_half_y: - centers.append(center) - else: - filtered_centers.append(center) - - # Remove outliers using IQR method on X coordinates - valid_centers = centers.copy() - outlier_centers = [] - - if len(centers) > 3: # Need at least 4 points for meaningful outlier detection - x_coords = np.array([x for x, y in centers]) - q1 = np.percentile(x_coords, 25) - q3 = np.percentile(x_coords, 75) - iqr = q3 - q1 - lower_bound = q1 - 1.5 * iqr - upper_bound = q3 + 1.5 * iqr - - valid_centers = [] - outlier_centers = [] - for center in centers: - if lower_bound <= center[0] <= upper_bound: - valid_centers.append(center) - else: - outlier_centers.append(center) - - # Combine all filtered centers - all_filtered = filtered_centers + outlier_centers - - return valid_centers, all_filtered - - # ==================== Stabilization ==================== - - def _update_smoothed_value(self, prev: Optional[float], raw: Optional[float]) -> Optional[float]: - """Update a stabilized value from a noisy per-frame measurement. - - - If there is no raw measurement this frame, hold the previous value. - - Jump threshold: if raw is too far from prev, jump directly to it. - - Deadband: ignore tiny changes. - - Slew-rate limit: clamp maximum change per frame. - - EMA: ease toward the (clamped) target. - """ - if raw is None: - return prev - if prev is None: - return float(raw) - - delta = float(raw) - float(prev) - - # If the raw value is too far away, jump directly to it - if abs(delta) > self.jump_threshold_px: - return float(raw) - - if abs(delta) <= self.deadband_px: - return prev - - if self.max_step_px_per_frame > 0: - delta = float(np.clip(delta, -self.max_step_px_per_frame, self.max_step_px_per_frame)) - - alpha = float(np.clip(self.smoothing_alpha, 0.0, 1.0)) - return float(prev + alpha * delta) - - def _clustered_on_one_side(self, centers: List[Tuple[float, float]], img_w: int) -> bool: - """Return True if most detected marks are clustered on the left or right side.""" - if not centers or img_w <= 0: - self._cluster_frames = 0 - return False - - xs = np.array([c[0] for c in centers], dtype=np.float32) - margin = float(np.clip(self.side_cluster_margin, 0.0, 0.45)) - left_edge = img_w * margin - right_edge = img_w * (1.0 - margin) - - left_frac = float(np.mean(xs <= left_edge)) - right_frac = float(np.mean(xs >= right_edge)) - frac = max(left_frac, right_frac) - - # Simple hysteresis: require a couple consecutive frames before switching to horizontal. - if frac >= self.side_cluster_fraction: - self._cluster_frames += 1 - else: - # decay rather than hard reset, to avoid flicker - self._cluster_frames = max(0, self._cluster_frames - 1) - - return self._cluster_frames >= 2 - - # ==================== Drawing ==================== - - def _get_overlay(self, surface_size: Tuple[int, int]) -> pygame.Surface: - """Return an RGBA overlay the size of the target surface (recreate on resize).""" - if self._overlay is None or self._overlay_size != surface_size: - self._overlay_size = surface_size - self._overlay = pygame.Surface(surface_size, flags=pygame.SRCALPHA) - else: - self._overlay.fill((0, 0, 0, 0)) - return self._overlay - - def _draw_info_text(self, overlay: pygame.Surface, fr: Tuple[int, int, int, int]) -> None: - """Draw information text at the top of the overlay.""" - if not self.valid_centers and not self.filtered_centers: - return - - fx, fy, fw, fh = fr - - # Prepare text lines - lines = [] - lines.append(f"Valid marks: {len(self.valid_centers)}") - if self.filtered_centers: - lines.append(f"Filtered: {len(self.filtered_centers)}") - - if self.image_width is not None and self.image_height is not None: - lines.append(f"Line: {self._line_orientation}") - - if self._line_orientation == 'horizontal': - if self.mean_y is not None and self.image_height is not None and self.distance_from_center is not None: - lines.append(f"Center Y (stable): {self.mean_y:.1f} px") - if self.mean_y_raw is not None: - lines.append(f"Center Y (raw): {self.mean_y_raw:.1f} px") - - center_y = self.image_height / 2 - if center_y > 0: - percent = (self.distance_from_center / center_y) * 100 - direction = "down" if self.mean_y > center_y else "up" - lines.append(f"Distance from center: {self.distance_from_center:.1f} px ({percent:.1f}% {direction})") - else: - if self.mean_x is not None and self.image_width is not None and self.distance_from_center is not None: - lines.append(f"Center X (stable): {self.mean_x:.1f} px") - if self.mean_x_raw is not None: - lines.append(f"Center X (raw): {self.mean_x_raw:.1f} px") - - center_x = self.image_width / 2 - if center_x > 0: - percent = (self.distance_from_center / center_x) * 100 - direction = "right" if self.mean_x > center_x else "left" - lines.append(f"Distance from center: {self.distance_from_center:.1f} px ({percent:.1f}% {direction})") - - # Render text with background - font = pygame.font.Font(None, 24) - y_offset = fy + 10 - - for line in lines: - text_surface = font.render(line, True, self.text_color) - text_rect = text_surface.get_rect() - text_rect.topleft = (fx + 10, y_offset) - - # Draw semi-transparent background - bg_rect = text_rect.inflate(10, 4) - bg_surface = pygame.Surface((bg_rect.width, bg_rect.height), pygame.SRCALPHA) - bg_surface.fill(self.text_bg_color) - overlay.blit(bg_surface, bg_rect.topleft) - - # Draw text - overlay.blit(text_surface, text_rect) - y_offset += text_rect.height + 2 - - def draw(self, surface: pygame.Surface) -> None: - """Draw the detection overlay if visible and camera is initialized.""" - if not self.visible: - return - - if not self.camera_view.camera.initialized: - return - - fr = self.camera_view.get_frame_rect() - if not fr: - return - - fx, fy, fw, fh = fr - - # Get the current camera frame - arr = self.camera_view.camera.get_last_frame(prefer="latest", wait_for_still=False) - if arr is None: - return - - # Ensure RGB format - if arr.dtype != np.uint8: - arr = np.clip(arr, 0, 255).astype(np.uint8) - if arr.ndim == 2: - arr = np.stack([arr]*3, axis=-1) - - img_h, img_w = arr.shape[:2] - self.image_width = img_w - self.image_height = img_h - - # Detect red marks - self.valid_centers, self.filtered_centers = self._detect_red_marks(arr) - - # Raw mean measurements - if self.valid_centers: - self.mean_x_raw = float(np.mean([x for x, y in self.valid_centers])) - self.mean_y_raw = float(np.mean([y for x, y in self.valid_centers])) - else: - self.mean_x_raw = None - self.mean_y_raw = None - - # Update stabilized values - self._smoothed_mean_x = self._update_smoothed_value(self._smoothed_mean_x, self.mean_x_raw) - self._smoothed_mean_y = self._update_smoothed_value(self._smoothed_mean_y, self.mean_y_raw) - self.mean_x = self._smoothed_mean_x - self.mean_y = self._smoothed_mean_y - - # Decide whether we should draw a horizontal or vertical line - clustered = self._clustered_on_one_side(self.valid_centers, img_w) - self._line_orientation = 'horizontal' if clustered else 'vertical' - - # Distance from center depends on which line we're drawing - if self._line_orientation == 'horizontal': - if self.mean_y is not None: - center_y = img_h / 2.0 - self.distance_from_center = float(abs(self.mean_y - center_y)) - else: - self.distance_from_center = None - else: - if self.mean_x is not None: - center_x = img_w / 2.0 - self.distance_from_center = float(abs(self.mean_x - center_x)) - else: - self.distance_from_center = None - - # Build/resize overlay and clear it - overlay = self._get_overlay(surface.get_size()) - overlay.fill((0, 0, 0, 0)) - - # Calculate scaling factor (image coordinates to display coordinates) - scale_x = fw / img_w - scale_y = fh / img_h - - # Draw filtered out centers in red - for x, y in self.filtered_centers: - display_x = fx + x * scale_x - display_y = fy + y * scale_y - pygame.draw.circle(overlay, self.filtered_mark_color, (int(display_x), int(display_y)), 5) - - # Draw valid center dots in green - for x, y in self.valid_centers: - display_x = fx + x * scale_x - display_y = fy + y * scale_y - pygame.draw.circle(overlay, self.valid_mark_color, (int(display_x), int(display_y)), 5) - - # Draw the stabilized line - if self._line_orientation == 'horizontal': - if self.mean_y is not None: - display_mean_y = fy + self.mean_y * scale_y - pygame.draw.line( - overlay, - self.center_line_color, - (fx, int(display_mean_y)), - (fx + fw, int(display_mean_y)), - 2 - ) - - # Also draw the image center line for reference (dimmer) - center_y = img_h / 2 - display_center_y = fy + center_y * scale_y - pygame.draw.line( - overlay, - (*self.center_line_color[:3], 100), - (fx, int(display_center_y)), - (fx + fw, int(display_center_y)), - 1 - ) - else: - if self.mean_x is not None: - display_mean_x = fx + self.mean_x * scale_x - pygame.draw.line( - overlay, - self.center_line_color, - (int(display_mean_x), fy), - (int(display_mean_x), fy + fh), - 2 - ) - - # Also draw the image center line for reference (dimmer) - center_x = img_w / 2 - display_center_x = fx + center_x * scale_x - pygame.draw.line( - overlay, - (*self.center_line_color[:3], 100), - (int(display_center_x), fy), - (int(display_center_x), fy + fh), - 1 - ) - - # Draw info text - self._draw_info_text(overlay, fr) - - # Composite overlay onto the screen surface - surface.blit(overlay, (0, 0)) \ No newline at end of file diff --git a/UI/ui_layout.py b/UI/ui_layout.py deleted file mode 100644 index b9d594e..0000000 --- a/UI/ui_layout.py +++ /dev/null @@ -1,434 +0,0 @@ -from dataclasses import dataclass -from typing import List, Tuple -import os -import sys -import subprocess - -import pygame - -from printer.automated_controller import AutomatedPrinter - -from UI.text import Text, TextStyle -from UI.frame import Frame -from UI.section_frame import Section -from UI.modal import Modal -from UI.camera_view import CameraView -from UI.focus_overlay import FocusOverlay -from UI.list_frame import ListFrame -from UI.flex_frame import FlexFrame - -from UI.overlays.interactive_camera_overlay import InteractiveCameraOverlay -from UI.overlays.red_detection_mark_overlay import RedMarkDetectionOverlay - -from UI.input.text_field import TextField -from UI.input.button import Button, ButtonShape, ButtonColors -from UI.input.button_icon import ButtonIcon -from UI.input.toggle_button import ToggleButton, ToggledColors -from UI.input.scroll_frame import ScrollFrame -from UI.styles import ( - make_button_text_style, - make_display_text_style, - make_settings_text_style, -) -from UI.modals.camera_settings_modal import build_camera_settings_modal -from UI.modals.automation_settings_modal import build_automation_settings_modal -from UI.modals.sample_settings_modal import build_sample_settings_modal - -RIGHT_PANEL_WIDTH = 400 - -@dataclass -class ControlPanel: - frame: Frame - sample_label: Text - inc_button: Button - dec_button: Button - go_button: Button - speed_display: Text - position_display: Text - -def make_button(fn, x, y, w, h, text, shape=ButtonShape.RECTANGLE, z_index = 0, args_provider=None): - btn = Button( - function_to_call=fn, - x=x, y=y, - width=w, height=h, - text=text, - text_style=make_button_text_style(), - args_provider=args_provider, - shape=shape, - z_index=z_index - ) - return btn - -def create_control_panel( - root_frame: Frame, - movementSystem: AutomatedPrinter, - camera, - current_sample_index: int -) -> Tuple[Frame, Text, Button, Button, Button, Text, Text]: - """ - Builds the right-side control panel and returns: - control_frame, sample_label, increment_button, decrement_button, go_to_sample_button, - speed_display, position_display - """ - - control_frame = _build_right_control_panel(root_frame) - - # --- Camera View - camera_view = CameraView( - camera=camera, - parent=root_frame, - x=0, y=0, - width=1.0, height=1.0, - x_is_percent=True, y_is_percent=True, - width_is_percent=True, height_is_percent=True, - z_index=0, - background_color=pygame.Color("black"), - right_margin_px=RIGHT_PANEL_WIDTH # reserve space for the control panel - ) - machine_vision_overlay = FocusOverlay(camera_view, movementSystem.machine_vision) - interactive_overlay = InteractiveCameraOverlay(camera_view, movementSystem) - detection_overlay = RedMarkDetectionOverlay( - camera_view=camera_view, - visible=False - ) - - # --- Control Box --- - control_box = Section( - parent=control_frame, - title="Control", - collapsible=True, - x=0, y=0, width=1.0, height=250, - width_is_percent=True - ) - speed_display, position_display = _build_movement_controls(control_box, movementSystem) - - # --- Automation Box --- - automation_box = Section( - parent=control_frame, - title="Automation", - collapsible=True, - x=0, y=0, width=1.0, height=140, - width_is_percent=True - ) - automation_settings_modal = Modal(parent=root_frame, title="Automation Settings", overlay=False, width=500, height=445) - build_automation_settings_modal(automation_settings_modal, movementSystem) - _build_automation_control(automation_box, movementSystem, machine_vision_overlay, automation_settings_modal) - - # --- Camera Settings Modal --- - camera_settings_modal = Modal(parent=root_frame, title="Camera Settings", overlay=False, width=308, height=660) - build_camera_settings_modal(camera_settings_modal, camera, interactive_overlay) - - # --- Camera Settings --- - camera_control = Section( - parent=control_frame, - title="Camera Control", - collapsible=True, - x=0, y=0, width=1.0, height=258, - width_is_percent=True - ) - _build_camera_control(camera_control, movementSystem, camera, detection_overlay, camera_settings_modal) - - # --- Sample Box --- - sample_box = Section( - parent=control_frame, - title="Sample Management", - collapsible=True, - x=0, y=0, width=1.0, fill_remaining_height=True, - width_is_percent=True, padding=(0,0,10,0) - ) - go_to_sample_button, decrement_button, increment_button, sample_label = _build_sample_box( - sample_box, movementSystem, camera, current_sample_index - ) - - return ( - sample_label, - increment_button, - decrement_button, - go_to_sample_button, - speed_display, - position_display - ) - -def _build_right_control_panel(root_frame) -> Frame: - # --- Control Panel Container (plain Frame) --- - control_frame = Frame( - parent=root_frame, - x=0, y=0, - width=RIGHT_PANEL_WIDTH, - height=1.0, # fill vertical space of root - height_is_percent=True, - x_align='right', - y_align='top', - background_color=pygame.Color("#b3b4b6") - ) - - # --- Title Bar (not part of flex) --- - title_bar = Frame( - parent=control_frame, - x=0, y=0, - width=1.0, - height=50, - width_is_percent=True, - background_color=pygame.Color("#909398") - ) - - title_text = Text( - parent=title_bar, - text="FORGE", - x=10, y=10, - x_align="left", - y_align="top", - style=TextStyle( - color=pygame.Color("white"), - font_size=40, - bold=True, - font_name="assets/fonts/SofiaSans-Light.ttf" - ) - ) - - # --- Content Column (this is the flex container) --- - content_column = FlexFrame( - parent=control_frame, - x=0, - y=50, # start 50px down - width=RIGHT_PANEL_WIDTH, - height=0, # ignored when fill_remaining_height=True - height_is_percent=False, - padding=(10, 10, 10, 10), - gap=10, - fill_child_width=True, - align_horizontal="left", - - # key bits: - fill_remaining_height=True, # <-- stretch to parent's bottom - auto_height_to_content=False # <-- avoid fighting with fill-to-bottom - ) - - # Return both so caller can attach sections to content_column - return content_column - -def _build_movement_controls(control_box, movementSystem)-> Frame: - - # Movement buttons - control_box.add_child(make_button(movementSystem.move_x_right, 10, 55, 80, 80, "<", ButtonShape.DIAMOND)) - control_box.add_child(make_button(movementSystem.move_x_left, 100, 55, 80, 80, ">", ButtonShape.DIAMOND)) - control_box.add_child(make_button(movementSystem.move_y_backward, 55, 10, 80, 80, "^", ButtonShape.DIAMOND)) - control_box.add_child(make_button(movementSystem.move_y_forward, 55, 100, 80, 80, "v", ButtonShape.DIAMOND)) - - control_box.add_child(make_button(movementSystem.move_z_up, 200, 53, 40, 40, "+")) - control_box.add_child(make_button(movementSystem.move_z_down, 200, 103, 40, 40, "-")) - - # Speed Buttons - control_box.add_child(make_button(movementSystem.increase_speed, 250, 53, 40, 40, "S+")) - control_box.add_child(make_button(movementSystem.decrease_speed, 250, 103, 40, 40, "S-")) - control_box.add_child(make_button(movementSystem.increase_speed_fast, 300, 53, 40, 40, "F+")) - control_box.add_child(make_button(movementSystem.decrease_speed_fast, 300, 103, 40, 40, "F-")) - - # Homing Button - control_box.add_child(make_button(movementSystem.home, 70, 70, 50, 50, "H", ButtonShape.DIAMOND, z_index=1)) - - # --- Live readouts --- - speed_display = Text( - text=f"Speed: {movementSystem.speed / 100:.2f}", - parent=control_box, - x=200, y=155, - x_align="left", - y_align="top", - style=make_display_text_style() - ) - - position_display = Text( - text=f"X: {movementSystem.position.x/100:.2f} Y: {movementSystem.position.y/100:.2f} Z: {movementSystem.position.z/100:.2f}", - parent=control_box, - x=343, y=175, - x_align="right", - y_align="top", - style=make_display_text_style() - ) - - return speed_display, position_display - - -def _build_sample_box(sample_box, movementSystem, camera, current_sample_index): - # --- Sample navigation (callbacks assigned later in main.py) --- - button_height = 40 - - # 1st Row - go_to_sample_button = Button(None, parent=sample_box, - x=10, y=10, width=150, height=button_height, text="Go to Sample", text_style=make_button_text_style()) - - decrement_button = Button(None, parent=sample_box, - x=170, y=10, width=40, height=button_height, text="-", text_style=make_button_text_style()) - - sample_label = Text(f"Sample {current_sample_index}", parent=sample_box, - x=220, y=20, x_align="left", y_align="top", style=make_button_text_style()) - - increment_button = Button(None, parent=sample_box, - x=330, y=10, width=40, height=button_height, text="+", text_style=make_button_text_style()) - - # 2nd Row - def build_row(i: int, parent: Frame) -> None: - on_overrides = ToggledColors( - background=pygame.Color("#7ed957"), - hover_background=pygame.Color("#6bc24b"), - foreground=pygame.Color("#2f6f2a"), - hover_foreground=pygame.Color("#2f6f2a"), - ) - - def on_state_changed(state: bool, btn: ToggleButton): - # Fires only when the ON/OFF value changes. - btn.set_text("X" if state else "") - - ToggleButton( - parent=parent, - x=0, y=0, width=30, height=30, - text="", # label is independent of state; change it in on_change if you want - toggled=False, - on_change=on_state_changed, - toggled_colors=on_overrides, - text_style=make_button_text_style() - ) - - Text( - text=f"Sample {i+1}:", - parent=parent, - x=40, y=5, - style=make_button_text_style() - ) - - TextField(parent=parent, x=150, y=0, width=180, height=30, placeholder=f"Sample {i+1} Name", border_color=pygame.Color("#b3b4b6"), text_color=pygame.Color("#5a5a5a")) - - scroll_area = ScrollFrame(parent=sample_box, x=10, y= 60, width=RIGHT_PANEL_WIDTH - 40, height=295, fill_remaining_height=True) - - lst = ListFrame(parent=scroll_area, x=10, y=10, width=1.0, height=700, - width_is_percent=True, - row_height=35, count=movementSystem.get_num_slots(), row_builder=build_row) - - movementSystem.sample_list = lst - - return go_to_sample_button, decrement_button, increment_button, sample_label#, pos1_display, pos2_display - - -def _build_camera_control(camera_control, movementSystem: AutomatedPrinter, camera, detection_overlay, camera_settings_modal): - - # Header Settings Button - settings = Button(lambda: camera_settings_modal.open(), x=0, y=0, - width=camera_control.header.height, - height=camera_control.header.height, - parent=camera_control.header, - colors=ButtonColors( - background=pygame.Color("#dbdbdb"), - foreground=pygame.Color("#dbdbdb"), - hover_background=pygame.Color("#b3b4b6"), - disabled_background=pygame.Color("#dbdbdb"), - disabled_foreground=pygame.Color("#dbdbdb") - ) - ) - camera_control.add_header_button(settings) - ButtonIcon( - parent_button=settings, - image="assets/gear.png", - normal_replace=(122, 122, 122, 255), - hover_replace=(122, 122, 122, 255), - size=(camera_control.header.height - 8, camera_control.header.height - 8), # explicit size in pixels - inset_px=0 - ) - - # Body of Camera Control - camera_control.add_child(make_button( - camera.capture_and_save, - 10, 10, 117, 40, "Take Photo" - )) - - path_label = Text(f"Save Path: {camera.capture_path}", parent=camera_control, - x=10, y=60, x_align="left", y_align="top", style=make_display_text_style(), truncate_mode="middle", max_width=RIGHT_PANEL_WIDTH - 20 - 20) - - def on_set_path(): - path_label.set_text(f"Save Path: {camera.select_capture_path()}") - - Button(on_set_path, 132, 10, 117, 40, "Set Path", parent=camera_control, text_style=make_button_text_style()) - - - Button(lambda: movementSystem.start_autofocus(), 10, 85, 117, 40, "Autofocus", parent=camera_control, text_style=make_button_text_style()) - Button(lambda: movementSystem.start_fine_autofocus(), 132, 85, 167, 40, "Fine Autofocus", parent=camera_control, text_style=make_button_text_style()) - - def open_capture_folder(): - """Open the capture folder in the system's default file explorer.""" - # Convert relative paths to absolute - folder = os.path.abspath(camera.capture_path) - - if not os.path.isdir(folder): - print(f"Path does not exist or is not a folder: {folder}") - return - - if sys.platform.startswith("win"): - os.startfile(folder) # type: ignore[attr-defined] - elif sys.platform.startswith("darwin"): # macOS - subprocess.run(["open", folder]) - else: # Linux and other Unix - subprocess.run(["xdg-open", folder]) - - print("Opened Image Output Folder") - - Button(open_capture_folder,x=254, y=10, width=117, height=40, text="Open Path", parent=camera_control, text_style=make_button_text_style()) - #3rd Row - Button(lambda: movementSystem.go_to_calibration_pattern(), 10, 130, 117, 40, "Go to Slide", parent=camera_control, text_style=make_button_text_style()) - Button(lambda: movementSystem.start_camera_calibration(), 132, 130, 207, 40, "Calibrate Movement", parent=camera_control, text_style=make_button_text_style()) - - #4th row - # Create sample settings modal - sample_settings_modal = Modal( - parent=camera_control.parent.parent.parent, # Attach to root (camera_control -> FlexFrame -> Frame -> root) - title="Sample Position Settings", - overlay=False, - width=465, - height=780 - ) - build_sample_settings_modal(sample_settings_modal, movementSystem) - - Button(lambda: detection_overlay.toggle(), 10, 175, 127, 40, "Sample Cal.", parent=camera_control, text_style=make_button_text_style()) - Button(lambda: sample_settings_modal.open(), 142, 175, 167, 40, "Sample Settings", parent=camera_control, text_style=make_button_text_style()) - - -def _build_automation_control(automation_box, movementSystem, machine_vision_overlay, automation_settings_modal): - - settings = Button(lambda: automation_settings_modal.open(), x=0, y=0, - width=automation_box.header.height, - height=automation_box.header.height, - parent=automation_box.header, - colors=ButtonColors( - background=pygame.Color("#dbdbdb"), - foreground=pygame.Color("#dbdbdb"), - hover_background=pygame.Color("#b3b4b6"), - disabled_background=pygame.Color("#dbdbdb"), - disabled_foreground=pygame.Color("#dbdbdb") - ) - ) - automation_box.add_header_button(settings) - ButtonIcon( - parent_button=settings, - image="assets/gear.png", - normal_replace=(122, 122, 122, 255), - hover_replace=(122, 122, 122, 255), - size=(automation_box.header.height - 8, automation_box.header.height - 8), # explicit size in pixels - inset_px=0 - ) - - - Button(movementSystem.start_automation, 10, 10, 115, 40, "Start", parent=automation_box, text_style=make_button_text_style()) - Button(movementSystem.stop, 133, 10, 115, 40, "Stop" , parent=automation_box, text_style=make_button_text_style()) - Button(movementSystem.toggle_pause, 255, 10, 115, 40, "Pause", parent=automation_box, text_style=make_button_text_style()) - - def toggle_overlay(): - print("Toggling Overlay") - machine_vision_overlay.toggle_overlay() - - Button(toggle_overlay,x=10, y=60, width=117, height=40,text="Toggle MV", parent=automation_box, text_style=make_button_text_style()) - - def toggle_overlay(): - print("Setting Hot Pixel Map") - machine_vision_overlay.clear_hot_pixel_map() - count = machine_vision_overlay.build_hot_pixel_map(include_soft=True) - print(f"Marked {count} hot tiles invalid") - - Button(toggle_overlay,x=132, y=60, width=212, height=40, text="MV Hot Pixel Filter", parent=automation_box, text_style=make_button_text_style()) \ No newline at end of file From 63337010a042939b48306f694cc631452744089e Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Tue, 20 Jan 2026 00:04:38 -0900 Subject: [PATCH 11/46] missed one --- UI/focus_overlay.py | 228 -------------------------------------------- 1 file changed, 228 deletions(-) delete mode 100644 UI/focus_overlay.py diff --git a/UI/focus_overlay.py b/UI/focus_overlay.py deleted file mode 100644 index 4067f3c..0000000 --- a/UI/focus_overlay.py +++ /dev/null @@ -1,228 +0,0 @@ -import pygame -from typing import Tuple, Set - -from UI.frame import Frame -from UI.camera_view import CameraView - -from image_processing.machine_vision import MachineVision - - -class FocusOverlay(Frame): - """ - UI overlay that renders the focused tiles and (optionally) the invalid/hot tiles - produced by MachineVision. - """ - def __init__( - self, - camera_view: "CameraView", - mv: MachineVision, # Injected machine vision instance (optional) - visible: bool = False, - - # Visual styles (overlay-only concerns) - alpha_hard: int = 100, - border_alpha_hard: int = 200, - alpha_soft: int = 50, - border_alpha_soft: int = 120, - invalid_alpha_fill: int = 90, - invalid_alpha_border: int = 180, - invalid_border_w: int = 2, - soft_border_w: int = 1, - hard_border_w: int = 2, - draw_invalid: bool = True, - ): - super().__init__(parent=camera_view, x=0, y=0, width=1, height=1, - x_is_percent=True, y_is_percent=True, - width_is_percent=True, height_is_percent=True, - z_index=camera_view.z_index + 1, - background_color=None) - - self.camera_view = camera_view - self.mv = mv - - # Visuals - self.visible = visible - self.draw_invalid = draw_invalid - - self.soft_fill = pygame.Color(255, 180, 0, alpha_soft) - self.soft_border = pygame.Color(255, 180, 0, border_alpha_soft) - self.soft_border_w = soft_border_w - - self.hard_fill = pygame.Color(0, 200, 255, alpha_hard) - self.hard_border = pygame.Color(0, 200, 255, border_alpha_hard) - self.hard_border_w = hard_border_w - - self.invalid_fill = pygame.Color(255, 0, 0, invalid_alpha_fill) - self.invalid_border = pygame.Color(255, 0, 0, invalid_alpha_border) - self.invalid_border_w = invalid_border_w - self.mouse_passthrough = True - - # Edge margin overlays (translucent red) - self.draw_edge_margins = True - self.edge_fill = pygame.Color(255, 0, 0, 80) # translucent red fill - self.edge_border = pygame.Color(255, 0, 0, 160) # slightly less translucent border - self.edge_border_w = 1 - - # cache overlay to avoid realloc each frame - self._overlay = None - self._overlay_size = None - - # ---------- convenience pass-throughs for callers ---------- - def toggle_overlay(self) -> None: - self.visible = not self.visible - - def set_enabled(self, value: bool) -> None: - self.enabled = bool(value) - - def clear_hot_pixel_map(self) -> None: - self.mv.clear_hot_pixel_map() - - def build_hot_pixel_map( - self, - duration_sec: float = 1.0, - *, - dilate: int = 0, - min_hits: int = 1, - max_fps: int = 30, - include_soft: bool = True, - ): - return self.mv.build_hot_pixel_map( - duration_sec=duration_sec, - dilate=dilate, - min_hits=min_hits, - max_fps=max_fps, - include_soft=include_soft, - ) - - def is_tile_invalid(self, col: int, row: int) -> bool: - return self.mv.is_tile_invalid(col, row) - - - def _get_overlay(self, surface_size: tuple[int, int]) -> pygame.Surface: - """Return an RGBA overlay the size of the target surface (recreate on resize).""" - if self._overlay is None or self._overlay_size != surface_size: - self._overlay_size = surface_size - self._overlay = pygame.Surface(surface_size, flags=pygame.SRCALPHA) - else: - # Clear with fully transparent color - self._overlay.fill((0, 0, 0, 0)) - return self._overlay - - # ------------------------------- draw ------------------------------- - def draw(self, surface: pygame.Surface) -> None: - if not self.visible: - return - - # Build/resize overlay and clear it fully transparent - overlay = self._get_overlay(surface.get_size()) - overlay.fill((0, 0, 0, 0)) - - fr = self.camera_view.get_frame_rect() - if not fr: - return - - fx, fy, fw, fh = fr - - # Get raw frame shape so we can map RAW → GUI coordinates - raw = self.mv.capture_current_frame(color="rgb", source="latest") - if raw is None: - return - h, w = raw.shape[:2] - - # Scale factors from RAW pixel space to the drawn frame rectangle - sx = float(fw) / float(w) if w else 1.0 - sy = float(fh) / float(h) if h else 1.0 - - # Compute focused tiles once in RAW - res = self.mv.compute_focused_tiles(include_soft=True, filter_invalid=True) - soft_tiles = res["soft"] - hard_tiles = res["hard"] - - # All blits/draws go to overlay (supports alpha) - def _blit_rect(fill_color, border_color, border_w, rect): - if rect.width <= 0 or rect.height <= 0: - return - pygame.draw.rect(overlay, fill_color, rect) - pygame.draw.rect(overlay, border_color, rect, border_w) - - def rect_from_raw(tile, fx, fy, sx, sy): - # Snap both edges (shared boundaries) and derive size from them - left = fx + int(round(tile.x * sx)) - top = fy + int(round(tile.y * sy)) - right = fx + int(round((tile.x + tile.w) * sx)) - bottom = fy + int(round((tile.y + tile.h) * sy)) - w = max(0, right - left) - h = max(0, bottom - top) - return pygame.Rect(left, top, w, h) - - # --- draw edge margin overlays (percent insets from each edge) --- - if self.draw_edge_margins and self.mv is not None: - l_pct, r_pct, t_pct, b_pct = self.mv.get_edge_margins() - - # Clamp - l_pct = max(0.0, min(1.0, float(l_pct))) - r_pct = max(0.0, min(1.0, float(r_pct))) - t_pct = max(0.0, min(1.0, float(t_pct))) - b_pct = max(0.0, min(1.0, float(b_pct))) - - # Sizes in screen pixels - left_w = int(round(fw * l_pct)) - right_w = int(round(fw * r_pct)) - top_h = int(round(fh * t_pct)) - bottom_h = int(round(fh * b_pct)) - - # Interior (safe) rect - inner_x = fx + left_w - inner_y = fy + top_h - inner_w = max(0, fw - left_w - right_w) - inner_h = max(0, fh - top_h - bottom_h) - - # TOP (owns corners) - if top_h > 0: - pygame.draw.rect(overlay, self.edge_fill, (fx, fy, fw, top_h), 0) - - # BOTTOM (owns corners) - if bottom_h > 0: - pygame.draw.rect(overlay, self.edge_fill, (fx, fy + fh - bottom_h, fw, bottom_h), 0) - - # LEFT (trimmed to avoid overlap) - usable_h = max(0, fh - top_h - bottom_h) - if left_w > 0 and usable_h > 0: - pygame.draw.rect(overlay, self.edge_fill, (fx, fy + top_h, left_w, usable_h), 0) - - # RIGHT (trimmed) - if right_w > 0 and usable_h > 0: - pygame.draw.rect(overlay, self.edge_fill, (fx + fw - right_w, fy + top_h, right_w, usable_h), 0) - - # Single border around the central (non-red) region - if inner_w > 0 and inner_h > 0 and self.edge_border_w > 0: - pygame.draw.rect( - overlay, - self.edge_border, - pygame.Rect(inner_x, inner_y, inner_w, inner_h), - width=self.edge_border_w - ) - - # Soft tiles - for t in soft_tiles: - r = rect_from_raw(t, fx, fy, sx, sy) - _blit_rect(self.soft_fill, self.soft_border, self.soft_border_w, r) - - for t in hard_tiles: - r = rect_from_raw(t, fx, fy, sx, sy) - _blit_rect(self.hard_fill, self.hard_border, self.hard_border_w, r) - - # Invalid (hot) tiles - if self.draw_invalid and self.mv.invalid_tiles: - interior_raw = self.mv.get_interior_rect_pixels(w, h) # RAW coords - for (col, row) in self.mv.invalid_tiles: - t = self.mv.tile_rect_from_index(col, row) # RAW rect - # only draw if fully inside the interior - if (t.left >= interior_raw.left and - t.top >= interior_raw.top and - t.right <= interior_raw.right and - t.bottom <= interior_raw.bottom): - r = rect_from_raw(t, fx, fy, sx, sy) - _blit_rect(self.invalid_fill, self.invalid_border, self.invalid_border_w, r) - - # Composite overlay (with alpha) onto the actual screen surface - surface.blit(overlay, (0, 0)) \ No newline at end of file From 46e6319b04d38a92ea55deef446151bdd51740ab Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Fri, 23 Jan 2026 02:56:48 -0900 Subject: [PATCH 12/46] updated some hardware --- hardware/X Axis Motor Cover.3mf | Bin 0 -> 122458 bytes hardware/bedspacer.3mf | Bin 0 -> 66088 bytes hardware/handle.3mf | Bin 0 -> 101173 bytes 3 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 hardware/X Axis Motor Cover.3mf create mode 100644 hardware/bedspacer.3mf create mode 100644 hardware/handle.3mf diff --git a/hardware/X Axis Motor Cover.3mf b/hardware/X Axis Motor Cover.3mf new file mode 100644 index 0000000000000000000000000000000000000000..770a5f1396868cb5133f5c48b807dcbdbf8f8410 GIT binary patch literal 122458 zcmZ6x2Ut^0&?qb-BGN z4<>X7Awb|i==**Dy_e@XuxEF6W_HfZ>=g91h_47-x^(Fp@LamoVW28>{@>4yOSdkG zstSqPdwAJ9xC{RMO?2tTr7tH}{=z09{{Fet&}p;HFX_W|WoB#w+6VSD@nQKAR$8GfnY)Yxj@hd3VI|iDKCK za-PEZD&Nb9BRnMUY&Fg1?D(V_c7~scz@HK%6(hpV=b9A>NBfY7v(@DpHrV;`amh9e zzvvZlwl-sPc1Wv^Kb~QWI6lROm^D=kVM?mc4!Z+kr@Oy!NW!WRGW;B$Hmn$4CmeB} z=H=ctta!S`cJ6i7O|5u-cJlsL*g2*8$yOHZptNse`#i)a8+LZEoOgD(DP)=?Gq%M- z`1K?C?9aX=@_aSR28`db=sM5Ywn>+1k5)YYOckltA*-ku5t>qvrxdx@X^}IaK!1sc^IzWMj_%DofdJHMfUuZwV6 zPhj9aA~Y+W0RU(C61MZh$?C3?Uu=q%*)59r_~G-TKM^NF3@5PjGo)g;&QJABq6c2L zWiu)}*3R|n2}gS8`}@tXoyt@_f0AFHz_m^r=5V`6rVO$+&nv{20nwArN4#dOnKhij;rO8Jf_Wjf$p<)gY#*K+OM&Wd6MzxsV|rZ_PA2?1Gct9T(%%#{zWz^uMH7aK~0VDTOE|so*($GC1*IHEf3dGB37mPLR;q9NwC)F<6?0{gZobJG@n1 zFnnthKg${f$)1*LsGeTeN`-(td9lbQyBDv*w=nRoRDa&`0J*@hZNA7?nB|Fpjh)UI zaTvgeN3DFBY(T?WH`qo|@EWUo4mU3JT_fG$v-T`MI<$%z^GXpG<~5gcbQqxmgHCc?1_I>_lQ5K7niFV-P{3Osx8Fb?KPd zttUguozxT0TjwN2>6_N_qa(9EYx+@GyuRK1XNt1Ik8l-RH>(rwizXbqOJEPZkvBS; zTbr5f^$fOWe2_$_P?aDFsOCpQ=UH&;jOLvjW>ONSy2s(2R>qSl7&!JYzpEGlK`lQo zhCbQuC*YyRG{LV-Wk@fCZnMqof7==S)T2t)#BJjnySCiLl>Y>o zmlm@wvZ?2M|gaS5`jMNYGS5H4#%^U_6vg*TV zu`8?AF9sIw$KiL|l+SZ+yPGe%L-JE0&HX ztx1XwG3>8=Wi6Ynw^UfY*b3vM{KDrJC;W&7F+-PHIT(IB4_;3MwWDJ8#ngWd5(Lyt zF6}MCuK1d@&RT|by|@q}CY9{DuGC3h30N?YHA7(Riug^UYnJ_BwxRHt7wpe1INkMy zQYAAROCQPFcm_Uo&I%2EI3bAfYrq`^Mc3bf;`6) zUChq(d)cNh*gGE)Xw@6?pBUw}#Dyhw4fPZUm4%u-BDAYBr3FuHR>Kh6ajl$K5BSxg z>S+f=Ee^Vtm5bND{ zzAPEZEDt`HAtYCvU>Pj9mrvh1Fcq=d7f9X$73+!HeRn|Io5-%|+m&-f*s!`-CpStd zCmzYEw{9EcA%q7&MWOmad>rGNdIvsr5855o{c~ku@k(k8WTh%twDvCu^E6? z?us>AkMO*3!8gXWE^GRHWjqQfToTT;RZfg_EPnwS3}@6E$-jz({J9n?*V%o7Y|i~G zlN~z1@{$Q#lZui4)LScVFTlSSCT&5~!bNE}l)oaLZl>TMDXwR)7NCaVz<>N~LEwCOW(QKIQnx zG`Z1P_UdQpBCZEt``_Fj5gHwd367wD^eB8x=1cJFj&8I50})9d*1m^^iUItkLfeT) zr`G1W#qRz6nVdTn_c!mO(+Evfui)0p2)iL0d&-hP`4famyzNgm{oo7tgKX#@H;1qG>| z)rduHKkY=N!?vRSis17XtXTz3A*L7;mZ3;dME?_t7H@@I73xx4iMYuf+wyE`rv*mW z@2GVxD54u58ZXm_*QBNx!cne$m2UQg4z^!)TvM0i%(~L*I^t!z|GS=Ot!dt$@7+75 zCKAt@3D+c4Z}5h}L)RzW!gKl`1RFCM(IA|WO3ZzG%G%}GAGR8c3>_gKUDOyClR}x^ zI~9U>oS}gU$%{kYy&1X8FAOu7Q1|lkKerfXBq5m{u8Q4iCaB+uq^zc^vUBAdO0O9Q z*+o+*@6VQ*@#%g*;Gi@?hizZkQBuklTJbp9ijq=;TN~SnVf~wJ2ypRo>*!g7M`c;d zx8ezrMZ@fz@BpaP0H>%4l7_L5y$)CS8f+=9zbv=eNe|D4 z$OhC?m|;4y3X;YyyRJlcoOndbJVsMqoo4Fe@!<92cO`?4^3_jP5tdsuC`qvU-@&k{ zR2=j-YIjAvjgfLrqdX`zq@iyDHR|=4rHUy`^WbWJucKa`ij5o%bdy78aE@(QK15kx zQCpMyGl_y&$TRmlA*CIj4@3hG&hn!xbWmb89MlJo3;D<^Q+>e{2w`-s!k`A`ru16a z+m;d3t9fIXo|p^c13f{G+TP}H*N-`CWJKSd^Lh*F z2=t7W$>nb<@Y+siO5)3QRRV=vb%HL(U7t%Tu-Q!yksD0xOo1cr9mKp}nJST10qNC# ziD4bJb7kTBM#gkBk{6;apF97G8ks6p%!=UxRJ=@}z}lAScolooG)5B5<(M&K9|^aF znDq!%r@cHjyc#y({gpZUXBPx7uAlew6oS3EZ|77-WoJ!;@Iq(}J%Qj6_1TbCOThXY zg&`E^r?OXRSlqYoihN% z0t^XFOUIq>g^+ceGh=V+!wiI8;UbExFQWl|^1(&cKDjazT0=f(w!?Z=(+NV-t#)>u zD-{hLM>KB09eZ}tn@ynrfc@YZ44KDPS~y^AnDlOGpKL`{-)ynhSw-UBYOr^ z6<+HW!UcJ~$bp))vQL?45$2)5rxPDm;0 zB|eB@o4bQrJ+-R$74(9!;pU9jacvk^XOp3fpW_!X@y1vl~93JUF*_8Lf3`zmSh zefBW<<3ZbonNnP{U|_4 z#Hmk^O>+OLo=_J2P*EH6GV%6(l);mP)F6`^d+MYqGCpT~XqBPmvQGVOqjjVDC?{mC zjRD)n)I9+q1nt;OF02WHG{uz;QpE+tpynd3>(x+dF~0^XSfK)NZ##q(C^Tpn8J&t_ z8Owvhd&`#0%8wp~kMW+xA3qy}6VB+c1=$*Ap#=&qbP;OdKY6?K^BUrTy+PZfO&$7d zW@^7Deb-;vK=mG@6#_^Y{kMO~<%VutzwEzGlT$Es@)S}Eugb5L42YHqAbBNousWm< zuezfjle=K&Iu4Oj0kubNxDj;?NHrQ#vKu$k)chp0LS0Y_{Qc`@$PAvZ%UceuuwKTwH*S-;p8bGXfu%ZoN+ zU!F*&C?&p@U=q*3D;5jNFL;dS*Q~SeSF?b8bO~!3aX+N7S(DVo0Tom9vVDFxy&$RC=VxZ9*M=}Qh_7U$83)KtW821l zFfpJQvkJQFXSWkw@av|X%9C<|20F)+XgL?M;<5GcB_-bmd z)3<7+9b3bK`Jg--{iv3?tnOKpu$6D7_Yn`wwVx9LCN6hw<32~w5hIu<3JzbY?Cq42 zCJQTXyriBz6uMi8z1*xe*R(LBXdK#)oM8%P?KrZm7jLnc?GklHItn6@no>@vFvsN4 zo|rmt?hG+%1RfAU;T6)&kc6?~uJ{Jxe)1xzTyG8>sYyQl-n;fkAcewi>yq6}CfW1tVtiQrDG(U24m2w=*^z#cdaSY6phZW3YIy0T z{V(~^#D~^D>|Di6Ejz2Q-zcqluncbk?(IuA(lJmrFnT`kwK zGv7tCIXN7UY)Y4<4>o3N+>XAbIF)ikV{p+9GPwEf%f`X_d)ap$u5;f0^I4t$Ao?Tu$ z&JZWX+V!N7q0Md}oDP{0dSQZdunc0fXkD~y=q=y8(3b6p7G$;WvK06s+!}e34UP&i z-S=%GL3D%j>fQ2L?Kc~A5`Dmpwjt-6yPN{S8O#W;KIyKsP<5Ei1k1%3ihSp*4`W9y zn+Om6{P&i(Ygg??<*&G?q6}J{?Nasm76JRoq;MK47;t-q1;MkzVxV`xwQ#yJI||7O z^I8chq$a@vtok1yd2q}KVNdD%Zt9>VdZ6^uoQqnmGxSp&w(7Gw) zzVhx~plGyi?5IhkVa?|DjlIT@hDq3$&nrGo^PvkYgH@){ z$DN&w=;!0l?rxjhT~EKCS-=`FVD4b>i?{WJ5v^>_FFjM{21lflQKiHWwRev5J3>hX zo-5D5T!OU;g$4P&dO>>COi7iaG7Ft~o01kzYpne53Ex#uIM#&4DS{f}>1FS{vKEOF zMMSAJnNtoK){k9=({&P!{WLjMI<07UDT7s&R86|qlT~>%7y>=C+lg!DYmhH#vhhbF z$s`n`Z9^*4Sf1L!@&du&_e~@zu8K|u6Q}9EYVxAEkj>w~v5&AKAGS)7+Xllgl1|PH z6OQL^gl?OJL5|aX`S(_P^_R8Qj1i)~ih^zsknWZw74>~AAa0c{0;YT!%vRkB1ar(( z4d*wx-O0h%TfvJ#j$7Fwj-C_k1%|UgB}1$Nk@V(Ys5gjfHl+!mv~jEw|B=y3I%A8F zjJ_EQa$FmV(D41cYiqG{khiG}2>iM;Iop~W>}$0z_)_R&8oJuQ1PhZ94b%k~&o7WN zGp+w#AsI9}^6EHfi(w{momIeToR|%E5nG1V^hu51aV`Uqi~6DvaNq7*YI=S?o)<@6 zAm~-$6L4%$K+qlMcLd;Wov~Gw-qW1Y!YZT4^6CFM z^>@uI^gn=gz3~4=7xU2+{r{o+hqt>6>=Hcyg~_QnZeL*fOUQa%X!zZ$<3tKZD6;RJ zGW=nsBk$fdd}H$Q6nWA=nC_&`WOn9Kh%{S~<@;0sJP`p0Oq(C5%tpI^QUGrY!^NMl z`Bnk)N|F{F2zQ0Seb*m_%i)3|uli4IFXJv$13vKT_rJZsI4r@e_%9w{tW9%4IQck1 z)V|0n>-Tm#KuQnR4CG-$@1Ab~r{)BDAW@%|+;;$$RWmQdmHt;yb;9T-; zLB5t3=azWFHcVKG(;ce3;E1u#dpt?pZvfyLpvJ-g)%!86wedh7NMP4M4+2M0{^QSm z(D_2kfTZH$5dV%Q-yB>BurW$24kRHlMzVii!6|;5QJ-=M@NiPAg&)ixDd!jXKtSZE zo3r2cxZfcwX5Y*BQBHwztaXg&soG2EckDn8K6kUYKwc$4J24tDqT<-}*{RPFYL|8m zsuJxUEpzzpm5hB~Qt9)_htwGcIEvpC{u=|rL!r?d=}bDR+R)U;g8u*m?p^=_J0)-Z zH({$h@$dpgz^jWD-KW%y0M%Ff>?#r}fUaGTW({!fdpD?0JtlHN!ocyGynR5*hP*xC zDpDyJ>+D>o5F@K`trv^{)wX|swDocG0JD_DtRa01aTf$aYWn_THG_0Z-u@}VC5c=% zzx{9!@;TQzIwWZOD=k37O(Cez&UF=tKuOJ*=qVqN^2<*( z{)LwC-%$m)kY@uR{kP%(<}ctD!Y*dJ%P22irfM%BQMv2s=D~%G=-L0lU>fs(&41}-Kb&yZaw%prdNL&(Jwy}x)W zzw*>cYPj^@10e`VrM=c3r9epo8YhhvMitX{#!zI(oK6d);sH;mCf^;L`!Rb_t|a&0 z6Ci6lsKOc?y`K$W%LX7=zC4^fGaI4<_QnHK1%+{s>L)3aCRsEfBl{2>6+$!{@HN@X z`)rH7U(6_tfH5DMacG}ud^fWw66DFZ&>5wZ^A~v-v?upK^)Tz^QIpN4XJfYGdbZ-h z8GzN7C1nOQSU=A;-uNNJanKjo@4$t-v-FMhke?Ur#_BV5K}0m0dYiI&!i`?kC->>_ zi;G9$5n6vY{eQU+8}VXE4k_vVpW^~XCf|J1p<_4j-v0_f$od9VEAqXZwXa(E0N}(I z!(uI>6{;AAE~xqf89V#bq9esnrPRO)XwdF|&&{r`a9^OsiDqkMsWYl#@E`T;I9=EE z{!+($KzMm4>At5D-YrhgGtNbg4GjJ|4=J`GB#%5zy!WE|h-qlvKPjxIO8p^JI*xAB znst7HQ(YYGX?9Gy%W1%XVI{YrMXwheNaSbdk2cdUQt#S!HydjlDVEtk0w|~a8x)=G zwZXj4R)HxCkuI>sQV@KHCh=fKSQJs~`LY!WJyDsMhn_?oC$=dnB-45J4s--oG))ad z4=-?2mrI1nI$kK_;@=(9k=rbQ%EmtdN-l!~^t}}@6IRSwjoV+# zsFMf+v|V|KR1c_^Qc{7O7vwG`OmU=u$pCcc*bsl=0yYsQ13Td|mi&H9RfH2i>sKvc!>dB7$sn8Nu?iw+>o`w{sP<23(K8&eqnY0aZz-! z$iH{7#cnR*K=mo&PsX|wQY7FB1niC$OB<5+&m`y31U%$i#HZXPxt!4G{h+hn*#c{k z`n7+SLxB=TUe>$iYWPnRFI2u8`?^gLC^T&;UfdLGfDWu3J_v;NuvX*KO;|205Op!< zBaCDAKTig1Cv3}ZXmLwrE+YFRXz+gw$~2S>2ax;E+%Cj9q~`)HR9LeF0svNw-NNYy zBNoir#f^cV70rfcu9)eG8Ek#tBV zeU}eZ><-oitQ~mpciduYPMWVe{;!{4J7AWs5~ZtRm=VzjHVy-D6`dBI+x`DT7^uHC z1`dE+YvE&sP>8h2g)w@Q4-LlCycV&1i69Z|9`_u)_dHq6{heXO8g6M z8KC}N0^E21WC5RUU$q!ARr?=9l#0kZPz7{h8+1DL-)RAejG>_OT0q7Ze1#84nXO`DOkE`eM)mIh&9M0JA2Ib6b5#x~RGKFCL zpycT{{nNQdGFPXet_UjhQK&C1Vo4Ij1C2qBeqR(k^+aF6?#O2Ds4h11Qf4gT$A81*0zdC;H5sddx(a#&LO+zwV z!7>uS_;6()%K*5D*`8CR-X{s5Op)oE*362}W9|`}nSm(%p5=*?v-CfW0sSI;JQyN3eH1zk(Z)UBea%-Jnv!x+nJuCda2`u86L<> zI&Qgvb>Tbyv5yFQ=mydf0nem2gaRoDH}rFjD5&OCPof zw+3e&z1SLsWJE{*hXdg>bhX+?;S4}MD}+jU?~6fx%}tl>?c4H&E_x_2YPYA7e?^=+Hw#*Egq&eICd&l1#f z^0@esn*+y{=HF4xJrNLekmvVOlkcrVt)D;kQ=f5oW)yVE6Qd447kHdWTAYZxb>Vh& z`iolAPKcYh9-)I>PrqT%bpi3dStniF6s14#rADHz?gp?>uq*s+NM|AMI2ZPH1ApL2 zB9W!j9|X_caj1b%!4o4*`2wy9vy%#!`%m)5^ozPYAu)sv$IlASLC=n9hV1$+wRFw6 z*TlS}Nbs{#&k7i)Upqb2dV9Rv^6Oo)Z!F<*yObtnC{iJtL~ zm|BGR<_&OmlUw&289%YB;prmCI1Mu#ZDYnFcKw5=Y5oY~`+LxJW^-gTe;#XYVQ;9nGW&^&o>86~({wBpG_0?Y z@HXkLJ@}d3eF0I#C`T02)?|cLP_&Hs%Ah50=8gCUmZuPUMg;j=R|4fQBicE+76~B- zANd7rT>!J+s#sYmO9TKOy_nN>xl;;wOyH&8-vV9TGpr8>{wv}raRqe%E3Q`C{X<&}bP5D5=zFRzw9PeoI11wH4q(W-8}|6{uw@Mmb#ekM)6+ zYM9M3UA52LfNcrl>ptkTZCj9^5EtGs>v0B&nP z^{iiJ3If;b?YoxTbcp$OEd)1?u@m*JhOCB^^YHo+ER36aI z`5P9;_XAWWzYt6BwASCzt+s*a9=S zzFGY#r>F8|VkUm*zRc)_n0IHTPCpv~jZ`{dSf=j(uRB!zVsIem>F_|#B>du|0DYtJ z#tV>v*Lj-6IS8mL2FT&k8*g8O1!PvhHTw`A)|GlcMoV#p;Bt&!=#!&0d1{ZC=mLLU!VbU*5OEu{#Rh~qP;Z^ z0}cVCsolKXV>B!pu7$7cB!ua1mjExKb`(})FLAm(WrlF z4h(%W|J6ev#@+*Y7czjLSSy#(H304m0B(UHQQlu>Lji1CXF;B7fOL%}_pVRUt={+z zCqZ|76IaPGX#uSB4bajBzL7qPu1dw#Y!OHW>8cokb${(jUQ0Nl>>HLD5pi^Eqj-MM zABY5>;n~i9A?XydT&7 z{-w&B%Dit^vngNi>JQiN`mp3)nN3ZK(XL+l|8%;FJT8ke6d@f(S>d&DQ2O7htF$;Ex)((vJD z{&o0c-byJPaouX)p-#j8O96h#qEzZ(+9{T~;rg!>t@rXvP%64|ul^)DQxBLsNs@;D zdZw054)TV?y$SZ;Pc7K9Ixl;yUum7dBFnN(--lkUOj#Nch~Q!Fx&7dpG@XSJqba6p zdYz0od&;`M0AJ`W9q%cNiXqem4MRk}psrw?XOipNsp~5v60UEO##g>tNJ1-LCuSxj$a4_Wh0s|njFinzT3+3t(WtyG z+Wk_+H`w3WtE(2CFrJV1+{88>%AiGUh(hhFS4ZU88kNf_rYK*ll3#!`%L3s0205=v z5~&z#FbrhGhkdu5_21?y1!hed=f@``-_=i|&u=$1v3bR7X7}~^4G-YVWi0~_7W8W^ znzVY$RoKaYyJe)SI#*ffrSY?gGi*AE_Y8d@+;+V&GexSd7u2ZH>lU@fQ$7LAm>0US+dA7&pD3{-uc4iT0U|@dJ zVY~{y=KQguU4^7tqIT2x%{)mpmAJaQRS*3X$8RWI2m=PKZvm*%Ut#hg1ic(F=n+?u ztycRvt83-o?m@|)zqxKb_vIJIQX@T0@=ivlDJe=TLC^n#(Tg3?$?BxWz`G21^imoO z{e|vyz`w%|^O&DwZB?`@HwvEhKC0AXp+{-O?E8L4hO-K(U)QgM97**9HGV&hnfMv> zZyFvNDFr?|+2a&9E2XR_-W!%`Blg>2z&l@rl1a{*$CnS=5&`4mYAh1?ijL1kLh)Y2 z-hu8nL;Dj5@vqbWp!1p&Z*W($hmeOHnn590zNnX5?TGmv7sG*JKR@=MIfo$8SGOWUft9BCNGb#dfnL@ zOy!*BHCcf!SJ)OIA?R%Ee{JBdrcoEP{ zPiF9?J%QP_JVOROq6J!ETX$OU-lmM;&|!i+^XNlg;q!PDqT2!J+thou4O`^!fJcs+{i;+ zCZ1F|K`*l1$8d>JQC%7HM-@DM-p@zj^wZD&?281b&5-iEM4CGCmcuo5A>? z_SXgS%eibSGwxV@<>l$0+w6Mu!TYYyw*yxEP~-E}KfjDeMiU;XTy5yjUtcnvTk?5S z^@9_4lgMK$G7L3Alj!W<>Cq?B6w)6pvD!RGBWMn;rU`jw?njYYrx6Dnc)kggym_)i1&BV1ooyeVI?u)H&+3HGbeDwR1n;akEn@-n+( zj~uy!L5XU50p6=?Y6rHh2X+lJ?(FPG#UxX!Q-Tv5ifkj}*O%RJk?*S8dW4RH7bfbAfx}J+U;vNPKT41t zsLF$%3A4>Gj~E9$9{I-GkU_%SUZ9q*&h~VqioDS16NQW2d#+{Gp2GRc(NPY!-sHYG z)K_D-#lDG|8QC48`ZA}B$)8i6Iu+$RK^slnww;~BQF%IoXVfCuNt^B6j+#|>u5hbm zJ@D?F%Z$|>9GB6KJ!F|F8&Z**nwp~Y_|f!%*7u8QJJJ{>6?zy+Z~RW9`?-6+X-Rgv zvZ&av>88j4_rVdlF)HU|6VxLIcZei{Y`#C}$dt}hL|0Y**`{;z4c`4{!`FF|=s(6a z7U^{K#f^Z5)2ar%n9Mgjn(0UznA$hxlvY$fY43i<9H(j@rO+0*xiGF%%hXu#nOLlei=}HpEsJw%H#&y z3vKa4fMnNv;TYqHe`htvuWly?cO=N5fYP}+Gu2g>UsWTte91@*vco9tec*b3+s}RS z>RM0j{?YSGuLAnloX=iZ%LJ&HV0`He`19zx=8WT50+cRZMeT4?OUzd&kk_iW(E!xpr(+(SuJwkPU87(>9mrM zR_VmIPu}y-p({rR_Vkrh9~WD_@-5;Vc%&y}@&FwCz#(dQgT#zM7{_kkjd|nlLrlNS z<7HwpcyiY#zc%(aD2NVsgYUzQcu0`I@PuK4V2XOKXxXFMswDeERe!Ggh=J_th4gZDGl9SdTd-oR$oRc>4xvz+Q zOHwGEw6bE&&F)veiMS|VczjxY>yOv>^WtjB0-6G;0vzZ|iC`OFcl%y=a?I*WY#7-W z$xa=Tuq3d^l*=6ky^Qcxg ztVGjm;$KRf0O~N_b?om-rP=VPK2U6blSLMZEysOWmel3t;dsR~!pag)bxHloL^jy^Jl1+&CRq*C21^>-X_w|WgMYv=Z9-rFUBS?|%;Nf*Y z^21Hlx6ZX`6b|d}!-Ui}3~Zxla%PrZ1~A@YS0kgA3Q$3{(*k~pqwF5dWw_`1>}S#j zOvg3)_+``-CFSnpmAmy;-_QP!=1O9%8KTW`{)`zFIXENa0HeVxIJ7gy9t|pm}E7Y@PU_> zx^NC$F6P~Cy7b4xpJZWr#E|@_VF z=Vs9xH{SH5)OFo4T(x0~K0_rHYs}{?q@h~|%HF$cB6NKT)pG^e^oC_d;-lAnTOHg> zmb(1s&+EkhIOCo?M$IwUn1Sx{Jp3gyEv~~&aDTd!N-mq|;qL8e+94ObnxmMHs(=)^ zMAwNI#MNOvosGs^$;gH7TB(V0=Xnb;o6ic9t)o9z71qWM|C~xPb|^_*UZ}ig;We7~ z6YzeMAftf2tK;PiDOH6z9|4Fe*|LrAg2po-*!&AR%Ca!y-hep+2Oc3)%g!)gy2cwo z01o&w?x>dm9yio7%H@H0+#gps+jz)_Imht6|fl@%%E6Q5VYiot4sxERHgqBxAd{nCxUOpsPU5%@`S6+#xL? zAztr3-}lxgv^rO{;e&siLwvR!xf{~2q@af+rYdb-O45m)$$Vw3UH-XoP8j+2w7Zc? zLUD&DX@)6k3O^K>Fgv<*faI?Oin#lCK>~#tsk}~txnbR!cYW%PKaLdoDAlvwSIhb> zkuc!;F;T#;%TI!4Gr6=zVbb`G-5<|PfX~4}*1gTHBkt+t>dJt=T=q~cVTn9C*{O}@ z*l&H8!p-;ZKD7OF6zh2=64C!fQ>FVmm)p&e!C{n-4|WRG>vz79DJBM9I!&jI}}2)6;ceZ_AMpS5nc00^&0?Mya*ZaCp|VNet->2(zr*AcGN zp2zU9IE&$2`feaRo;c3Xzro~&Cx_EdC(HNI^uPCBHgTr^h=bbhUWE9CS7}GY3%lav zM1Tv4S!S}~C-)ynHz_*ja`Q^_c^8N|_D<@F* z!0#V1wzq+x+`J-xl1SFv^0d{z0rnY6Jl{bTRL`D(rEwb@?cy%n#;o~&$-*{#tlg)h z0Mz<*aWH777c6T&DCgMj-(bOJK-zIJqoACW1J5MdYQbTG+(Kk@l%vJCx9cvaGGa$Su=MA zH?49{Cwk@5v<43y6nB){;IBJ#MRA8TMFQCdq;r@4)~KM-Pn)q_O6O#VfM<$x8gKIR zP3Pg7$J;-@RRb9WN4I%4FOceSI5CZM?f+!>RqErek&ykTvM4L<=H6!GdX|lFi&>;- z!gZNxkBYv?d70_T;=S3!w@-TX#PB=Ez>zjP31z-5vUVy@I4mO#aN8gCk|RlMRMq}EtpU_yU| zQl9oitf9>qVapczcq8hK-sFq~N&Bynj&{Gd0iD@#;@$rDKBqM;u6&;i-vyg zgMVo^xqoWt4r?>q4bf}jo~Jh0y7b$6C|i>$#3*x(=XPXEny6E0^jatMgO_M(6E`^6 z6-_ZUQeJ0ty2Jzzz=-+=tiC%;Ew8Jc$12~LRJB)~zTZ}0lo~5z!T&n+rom=MFk!EE zkKNxo=%!b=pfX^StQo_7akx#<>EXjMi<97sV{m#VZIhr!lMVA4E+9f|?nHfcEkBxe=s}}FA zwZeU*7BlZj3v=q}hPnGkE_TWn$~)4Uka_VpCpj-#b)J7>3A<-xDyNX)%$^`KYr)T{hs@@z-!M;jIiGYH!o^FCebGg9IkeuR_2El}IYcJOkX0Z+W6g_g zS0=Kmq*e?~cJ@BG+dH^@b5|<-t;F;1)>RyA{Z4PThHa(XEM5RuyOmwZ#gss;2RyvPhoyMyCMLW=N^eV|OSg5T(mY zHzx%sR{K1*w(1Hd9o9Lx`O)yEw6{&g1pxC1uOr4;@ja9^xp<#qUR&32U}AJjsHE`0 zIf6()yVxb%JLdQP75xaOJ#L_k%OKt_T}V+Ls47Q)S6v(gEdFF z0k_w0X&2|;!sAk*XzGppkLcvMosB zrfdE&)h)A9v%l%UlfPD0Ucoz}bmdc#Ta^;{T2h+WeI#YhOxZ-1sd>pj9U0?_sKs{l z8ayLKycLiAGpM*6EXwtGbnWJY02MFZ7!Cis)2a+%UQ3@YbU`Bl0>0{pt;&*0-23av zGucLrx+J}sE-uxgo{YQs<;5O_)<7l>fDSKQ@oT)11@+xGV{9*2{O~>pxN=)1ci9U# zSh@wHE5DmHc_yh?;!&76?9{8Nk#HQQ)NUAe{kP*q073sd^&+tR*g41-%G`ep z#57G@`Q?pBRkf7i>&PVfIWNFo2;Ar@;Bo^(O^!gZgg=L=g>3^)EbT=!vJNuaW(kR_ z-bs{PKSyqHzo(67n3SE29)vB9jB;RDMs#$21m@+)+kU_6*}m340l4$RK(~F*io`@k z01NjG0N{H_=r(-}f0)XZF>IB=cJIXfsz5=W&i-ghF{SdesX^Jxl6@7|YW^3^xCxeGc zbgV5=<)ycb0)*#!(HU;n)Zy`BzN9BNQDsZx^>dt{ zPwn%bL_Hx_i4D)K`@fw4wA!(E^G$4JB_!Ldmp!Je4jBDv@SJuJA6Qae`kVO(y_4J! zynaHt(WQU)L#OWWZl9{>LWfQM^FV{Z?S(QB2W(4+ie)%`O0Bd#s{FeiVM|bhUj-%m zEqUPDW`LNaSOl#UP0L~ESIW+W_ufh-qo%4A-<{sPkDu@P(^%9g({^olK{RP|-72h1 zA|InJSlIpsP!-QxJ$S|ckEyo~tLpjQKqUm}2I&%zR$3YX>5%ZyA>GZPC8SHbySq8m zp}Udp4(aZ=o6q-m?{oiwXZD)f6KlQeU9-<*db0XkGm~K*uZ*;Xrb-Gw=!8htKap_- z=B5T|$qT2El;R;QEtO5HumJUHsBU+lrBR9 zO>43VrD>aMTp>%nodriNh1!4wDgoh8?ke zbd!KvFc8#_0@uSPr@H)lrY&}Y(FU6k=!5|KqBSAAo?!f#SS;Saq{-I$S_h2~mt6wL1Hz{-(L4}Y3NJ|{s~o)?c# z9z(dym$zu)@2yHv@O$?`qve6_->?0;#vg~w*plpABqjjj0!{vT``nD&471JIb! zGc+Y;=e%(0R0MOn&&SPEgLD$T%iRDK1oKMXYovnU07vGs#Byb`(iLNj)-4jY>7P+~ zPs*>+R8b}QZY=IhZB}uuI!MEydp_610p9pAtGKq2%>=bV-_np4D_qn5HjBBO$B*m3 zb!v`7Z-F-sDVg&&sSla`I2;pnAukmhw&%f0-1J+Pgm1Ps`lWpi25mx)NmIBv0eg2N zdS(sqg6!~O7{z99HH}22jSa*M*4bO1$efUpEak+$=H_x)TB1zL62C{wGf2V4Ji=zY z$CNfV5DSGGF8tmr?qQ(r1lE24zWYFc6F`ZCeOSjB8LnAEi^MUzP8Jta0cO8pX#8=@r4P_Buf)jreo#GOuk3~0!g?Q=?5a0Uzt;TJ5Okc`PH$cK3tw=?&* z%^Bn3u!HVmM9u`eOX%gjnsc-9xYwe`+PO$1vSCs|@1&Nk!IsuR>I6MaoQK+%*G{&0 zG*}Mq{UZRy!;+I;ZZ+3z`-fC3L5&Cf1|CpRm;<1C0y~=hhUR|vFNyEbQ-|_@x#bw> z=V2G!7#9RUL+yk7f+=wtMb33rBSz%Xu2PeFOy`>SQV!RMVuP)DYyvi240#A-Z3Zpo zv}0vSWKj`Em*c>APL?c*miO(_9EU9CjHFt1qMkmT_@QthKyNaxD2RnYm#Lay?@CdR z?3q0DghAvDK~I~nTHvI(d}C2gah-4VB=1BYFLxRCx0nKeScAkpG$Y3PAvs`mP)EyK zZEh2+CruBoOt75=W#+*#(^u(Nt>JFgp;J#QC(lo%RT{}4u8BP?Ha^kZ-*2Dm%f%KW zGI7*n0_R4c%|v=+k|^#tdq&uIytj17ywWIv5O$({DGFcv*3e^UNtR{BuAg#fPT7>b zf`oV@?l>Yi{V2ku%Q=ct>+!${KCa3?|BiVkU`2e!@!wwk#aC7+L8Pd?TsIV&mnFd6 zqK2s5M9UeyObXRxczh4^3hfu+f=!iueEb;^`%@P-+?wjYQYH+Pz3 zTc#&i4A1zcW_Glp&spMj$lKalFrj!^A@NnW41t9~cPrv}0b_%1d>Vj|3uJ0TFMs); zr+k_KW3_K-5XyiG__n0Xk46%cc=nc6zAdMwT=iN0ZA*Mj3mw`{Yi#RXuVNaB4NaoL zQYo)*a{>yr+_gaQqvZVk4JO+d8;KOe5ldBxW}Oi8-cc$9fYIOE#Q^l8D(K8BE=_*k zdBx*rFBp&)YbaY3U7jc$?%+&TEjvuw5z%Eo_X2dkHtPsozQJOr-CKbFm$SE#t8%Q- zidkw#0JLf0!zoNK81U_6EP&kJaide=wTqEQjitjpw4{U~h08lhMKQM0r9n zcaBq^JHZr7Q|5Jqvz#)Nuq-7#Bc5Van$=2jk<{xMX0N$$9*797h!dy?gP<*;$!Nd; z0I`YxOS1fUC1vnKSqcHg9Ag(p{~lC12$AlN6&ncZMgO2QJx*h|al92}Ht+7Y%;AfkvLs6#svJ%Nf=5`jF%8)s%N3D+G3k0==1z9SO-9L=52$gvjK3D(Y^B9ZU^ z1X0g_TP=JLQ_oFjlR?*XA1?5$?g)nVX2RM zmtVW%86;4CcGgEqWvZ78<~mKX`#TPv2qhc8TB>|UxCQJM(OvuMTLFGHy++^WJ!+J_ zCfd*e_4|0^`nz~MPlDw~AVSOBzJa`xZm8dsZUDN}zMOgMc(oMT&vdH3o}1-Bs5M6= zEYEPyax4n(C}6}ywXs&yDJ8zzqdnqX&f6Ar`};qb-`5{1A#Ir1A3S##!G%=`#hqss zWK2~7sskf9)(M6dx4K^TQw~7(5Vj?^e$QI-ME*+LLVhqB8ImY~CD1D)(Jb(N+DJ&ouKzs# z>rcLgqhKCaX0M91(RAW-=0jkY`D4Dl=^0A^*Doik%ujr-Nlu5@>@}N?u1ks+FQo^i z`}U0WkW`mu{yB+oMxit(44q{=ezyB9zJ+{SdaB*m8hV`J?d(p)hJvCDAFBqT%Wh-O zED=x_|AqWYFyFHf1@I=9iO$`rs#M*E8Gs$jdl)My7d3hBSr(#Tn>2z{r}l44Z8xsP zreNS+lbl+2auv-WG*DC=fX<{yA6SY5V2(bg>3*>11k3IOWoOu`^2o7<(D-B!Th|tQYhzwri zissL05TT|g$-WQ^fJM1b>KL^s|R;-4@a zJL|O?EM}mABR6{aup9g`_wX~8*W@k&7WL{=cRyuOPd4EClqXQJLEV?HMtjMRjgA{% z!Iz=h33KNK(Wds)``6^u_z83Q`Tv&`W52;4aS=cNud-y?pF!@Slr6n31f={h+E?e4 z{M;FMch1cCoV^j(#p|NCG61O?(HaW82k7J;6F|FzbH|~t?@^*7_}k`{)U>E7{EXl@ zJrvllaSC|0b!r}Fl&{im*K{Cr`G37sWhLLRc=xL2Uuy>N;vHrI@Bh@=Yt2xPpwPGf zlalWK*NxVtPQVkM{rUX}70}yQ)I#rDlNAh*et#Ds19tu{A~_Y=7W?_pUNg;nObkdE z(Dai3)}Q)GqP-+kF5m#hN{j>}0KJ};iN;ngQ;dGKepswIzo=o*A)lsWpo%hVU=Cct z?O3Z86S1?QcNiF?a2w{qr_s|7bELtv(Gdvn-hlF(6MT)Fm_BtF3uxNt_*LS?vYDb{ z9{o^G%_HN-73YYQ8aRfpm5Jl|C%B+ZAmmzv1I#v85Px^hGWNNkvJ(rssl}&HU$E4v zfH1t;1o$q#ez@M(Lpy@`5GlqEtLz*zG6INSo>yVCy^K(rQO1S?k<)7`&F4y!ZRX{L zlI7PB!3Hrn(wMTLh<0^jMg8?fYYuYKv;_!ye{0gNk^-y4{w+qE6REvRr@1o#mK#W} zOy}`&rm7PZ8)I#hC8SlShno9AXY)b$pT&gT`9HFG-K2Lp)DGBof`tJ`AoU;AM1++2 z{F-z`fhAETxRgD^PL4>U+Y;vJEcjMu5W0QzB!g01j6Gg%mi@A|!bl#0Kb`4)?V>pK z?F9OF%M^av)Ig<3a=12awYYzO~;P6faZh~8Oo0E@sb|`)J zl10z>dkp74!kLKCE4Iw#6WQ%3_0xLJs(Y_qsUcqX6Xu#3`X+Otz%AquozV0b^Tgl=A(f!-gVs%gJ<~!Kg=Oax!FseAIJ%$uD zsYn3!^+0KbW`IGBmaZ&)WT>gK@dZ4j%;z%i3t|EjiFJRt#yEgS}gWP2s&UM?RiZ`;L}}t zU%&sF$|}>}1q3u8s-z<-u>-+81#y4+dXG&ht56Gq0J*p{>Ys0b!H;+?%F8bOnptCy zri)&b9nZ*u%1u+cpFP{Z+l@|EO3TjjEFRSrz&&}xp!p%JqtCy?VzpS4c3|mvbyX*5 z@WdUT<8NlhcDNylM$)hRnOt*dgs@-@gejQOKKj52crY1-&7=&3TNrew-5BOs%u@R$ z2XrDlKv2VYdQBVz$pGVx5Q9*NA<(D;v;_<(zvhtG_y3IqO+57(b5a9aD|6|PgZbC!K5@0U7e{i3wKc%`(oN>7P#2heJ| z&b6z_jhY;>XUo(yIF6f|TP0*O6$Ty^t-cr&q}>1z$KxX($Hds&Vz7_&hp|;sgLT8B zgZYmVq-^qoZ5q5`sP*{|4h#+K&ug*?v(mJJFvnF9Kof91uhfoLkWRLDIhQjh7kiYvk+2I(aRb4X{lHo-M0z8j?yFaR9K5h6>7 z?kU|eXCeR;P#jPKLi=K(J6z#meUFIK~7V-Z&6GAZccE>RUDg z#!5cLef(vZV>91Pi5K7?Oh@CqXZ~iMTC~2T;H!*r$z3cmN2Z1*rfl@+-dB7uz zC@R@=-(t7l0A98N86btqH<(_7ov+Y?A+yB4Aw>R_?BI4}S)Gm7ys<6~YzQP9pHA26 zF^dIGpx`}WHdcCn2;`~)7N5I4o)hSQBjosu z2(Z4O?|d8op~cqq1$z?Nz|I(cF-!7sYXz~Xp`eCW7}K*lvhX0aMp-HVOk9b8YY@M^ z#6`9rG;h!96_1Z^Faf;pZU$9f$7`Q4vy~};kc5!G|9>u(rL$koJlb6l8FXZT>CWV0htn?1u-G+b{yt>LME;RVnj6-n?CcMj&uW_~A|KDVUadd!(ACspH@n0$$ z386y|Hz59nD^RL%qQCtotfYIM^OYAmIhu60A!i~_P7SKDcr|wE>jgKcfYpGE4@Cg^ zp`!{q9UE?dz+K`M8CBye=*R=UN5V!fa5O7nga2jgdx}9b7>{n*8^~@lONWs~^7>Mv zq~ofS=^Nz$WV{0eyFk}qa%?(=D%2z8{p!<`Z)lOc;;L_6Y#yAg zt^&i35?eE`n4;THVT{&ZazA2^1aS3au;>NUZ)sKm{)$}UwXZ>&1HPPn&d}O9`Win{ zz!}|O7)y$Wr<>)w;fr*3zT%5!a62?*iBNaP%eaMpM;}JRkC7Y&?EEPBe}LnTp4EOsiAEkkgbNhb z)&h4vJP}6=nU1YFfae&$XErv7>^4=hQVkre2nt(`$@omOiU$A=7;Q#Fh1cj;CuFuU zF1#)(=sK)l*>LD}9;c2^JIkOn1S^+HOO87LvU8`8LP0UTDbGf^9oW&e-&j$-s|9AE z_Bl?LSi>gNMyU7QtbgvT>oKa|DuqlP3W(3Fyw3a8-$8ms4jV(M9v=c+fcht=3K5`DPw1ne2G{0>dC64IC5?!#S-`~FsGv+`-| z<(X`!JFBOiGh2Tbnzy%bpC8{KMu$22-XW`V@lThO8b` zfY%7HzA6Go12XC)dFz68nP0dOQ+4v7Ug#nZGsw${xUKUiPw4!jH23mWMOlLxfi_>=ioIDT%E+@34uU>>>;uS*%m%XQH_?IP zT1om$00A{-wB1={qyUFhlFUR!g~VAWMr^Q`U5@%zl$;Rnz?wjy(hZ-hVn+z-XM!Vu z;_;37*fh%we*XA08uNB#c!=g;1Te(O1M`Q&>6t@+-n(Cf1%16p3MU{s-#D0JhSZ-KV_|9N zeY)n#;Rn8m#Vr-mY`0EqVH!}%FS0+_?RD2FhI6`tDy-kR=r?QmCZJrrs0i5o*<0cx z`Q1tJ*26&exA9Ocupe1clIyueD_hQd~BoOL>A#D_z+-Y zNCy9y;C=$Nn!$-Ul~VM3%0qm-X5)H4MK%8X*fBaep~!2OdqA~S;JW6JG`B$6g+gw~ zs!v;~6TYL2s|vf&-I=S-0mIGivbju~e6mEr-Aa5<$ndTC$8q$fe{%~_MgM6Bb=_=Ti5}uSOU(-eR4*0rJ}WyK z#8)BTT&a2gQu^_iHOF@9Cq$f_521vsVm%|^OF%Wrytn_Jq5zJo%NaS^`> zla_Tl9e#ANv2F@2?H=YSYZHZD8xXn$(!AsSudJEVVkzPQm;~qvzl}32G(0yPM&B|2wIq z>W1N~jUcg+72dqIpu7KWbp3Ts06UyiO6SD_7QPpr3ArHS5)Vn&?ls*;38#9Gdf3?h z>GJK?^{0)JFIwU{X|GAQ&Qsc-IA_!R6&pWMGN#v?ImfK5x>L~V>Iklo1WUR=JTrm` zLqXt1T(pJ~$X^$%FuV`T{1lf&= zl6Q9v*WsF^JLI%nUxBQ+fssQmaZ9FUf;Cl2y9t+D;AVvfDK%Lk8$UjWr>f>5n}>f% z%RpHMlDV)W4nf9k7~|%@4{;qxd-eN&hGNgmd3bW6v>1qed&Mr?b}Inq2^PPAkB+b! zG^_40K)%5-q+bXK+biIqTW-no&#X>TOLer=v2-XPe1)W2D{3;Ip_hg=(uTkx-@L+hOXlO#G2BnN{ad*iaL``{ z%F{$Aj^d}fSMPsqBV-uNBjDjWDQ}f=+1&2gy6RxK&}s~mgecQy;8~MItXJjFZ&XJ8 zUUI+J!!a=gme*|f*WPzSvFN%^wV8t(g>|K)HC<=3Br6{qc|wR66N!a1=p5&Lo!?zS}2kEK|IgcIuvUq1B)t0$z_U0b_^t4D2Vu{ z*3{xf?icuvY=u^M&%RPJ1bxHW`h6q0dt!?%kWy5QKmvWy_U1elPXsSlY+$KBW7o5- zEvtpp|1R6bO{!oHrLN^#ThrS~EL&EfZEcIVw})w@96yX)vg%+?G#Jk( zG;QS0jH#F+qB&v`sQSz61|KdP{3CpqLZt_$sYChPp)hSB!aieRNdY=68(iXk1S(Y( z8-hL4HgH-`nr{qQCyILABtlQV!#hs@DQVzqH@VIRy>K0Tpi}h%SBZz?_*#BC@e%MOau8GfTt(M6xFkQtAaxNOEf9@`VdIsH*-r>KeGodYW)Xb~>$a zBJFdS#Oy{``<~~lnxtgoV9}S2;2VGK5j~E|Sm>`IPi@y*M%Yr7^UkQ8CP?$`IXa-( zfM0>$(O2tE8BP8aq)>LIPTJaW(O`JPr|BP9{WwVL#LqW85UxQS^NGbPaJRbVq3mUQ z!}jqlj8sDFzLZE@osUZHY4I9qM4SEhx#c8$+&}`#-4#*EmruhiUnK=gEH>j6e*bpI zIJ@8bY2eJ{?qGDNsXo==YfUkdI%KffWb24VJ+E#M&ca&0n9vMo#t+Y6y z6L~2AkCL(OFN8}80sI*K|s_?C8~B&`Ky zd98*4iWZR|fBjFM4EX{N-P#^0)Ptcmw85eE?JYc!1A+Y}t7}f4XQ)#HMMP=vnShlL7~!6 zWQ!WfEI&iLky3`GJKd=_;&wWT?y>A~w0YZFS8Y>}!E#&2iYgRmrOpCivqtAY$IxG* zs4i8Sa&MpdKJKWDPk{B4nvk3Hv$3SOiqpP2N2a=N$&Nqq?C)uli0~$3q0FT2{x!5D z6O_+9||o=UOvlaX}+aX5!wCz`yEyMaXqOQ zUQtW$eGl;c!al8LQ&B$Ja3Y&}0{V@bf=a3z%s@w@>t|vSE|L&M3Q$IbGhZ@sJeRFM zLyl@?dKZ>dk})dA?#UJCnHSMeBUblehVxYXQt_=zO;vsP{LbBt6NR(2!P_QgdpO;0 zjE|Q$mln1|;vCv>fks1oKDSu@KAAlZ*`Lwj%VP*^bhTJEsb57^o$O*}UudUBDL#P`T&NLt8x) zD$oy{52*XX`l_U6*OAe&tX9+TCsNu>jd50RJ$cejq{-wvlBY*Gq<@YP#=u`lSfLm$ z_Ktz9rmpghCrg-4-@2$^4xP`NL<0F>ipw0}X?2zuCeq0cG5cs>swaL182@`$!Y(`i zSKiw%B)KD;Ro@BW>hB1-BP>%|a))YUC9h~Xty&(#%b&pF5|n2Z0OqY|e|(@wn3gT4 z=jmg9px{O8P@>!8OVj*}*+3b(-e6dSGpDspy49N`9}l+m6<65=eC2S1LT%$cc=I?7xqz@KLgD1G@q}p;HL+E?pU?7oN~$X`Cvjur zx(i4YKOErm-Hig4%6(}rl8xe0-!vB)o5Hs|C>tv>ESx8kBrxf7Pow518U}-k22rDF zP!c6zTN0DAaBh2xmgvKx5dBFYt$x_8!fV6@e+v?`Ql#mGL(5zOfixJaEyn`^l{Q?Yt~Oh(zCum43HV7XP$VEst|cl=bh^ z>RToy9H{X}6Qp}V6m@3n;ugOVqmmE)eAo@PGMC$S`MP!>E_6c>65?i!PhCy(HMBuvEsI8!jt^7scXBJsYak09u%KHQS$_ z{fADQ1$u#hr}?zS#ey)v(TAgR=U5%PPTi73-T}|=;UeeF4+|6!(-W;QEt<& zIehNY%a^&XhheS#O}7VA;YEVomC|nO!adA}r=&!|>Y`{|-5%Tf>6y0VjjWoUOSa`1 z_xH*5l(>$P7)8GX$^kdgU6i(o0P#yvj|hO{MO^Nl_zj%wJGK$m)D>kdEwqz6g&hd7 z8Rqmm@gBV~G1zet4$bc9ea^*7Yc42Di%2~wX^L4h^=-Jw4mG^+QxVmj-j0g6UdCen zkm4?duFf-n=XUW|33$BhAHh{u55+yA_As{Zq(sp;%**g4x-Tw=b8>VdOxXkf7uMIE zCPjR)qw)}jXyKI`8*a6ut5r~c&*uByfH#*_V%Nrh@&QJ3bjDHe11+KOlS z<}_=#go~5LzQh=H3z65Oa{0tIWl#Ho^P}CBrNNgUKFyrEDhBZ6=q^{v7i+z6-uSAl z*{~=r?v8yWCwgF)JF0rKRjR@I?JT?1)BUb_QhD4s_wUTh9;RIq9%TIX+k+ZEnsasP zD=p&#B#>$lsG8pG6|Xw%!o2*83*6KBw+myVRdaF|Q>XuheGJWazNl=fY-j3CZ z4EJmw`UM<-0Uz4%?3-YH(mpg1%WVgPu$e4v7ni8}3ARVc2A@fa&)T8YO77zyGfNi9 zAT2pbA|d9p#-In{kE3_dTrI_L2o3Ivb|E^CxJThTMoo_HX$fz}mi6_Z!(Gt7!w@Ky zfj`B~ZekWr!|18k&ufJ3bE-Ta2@7dIf-gg(_<+)e?wA+S$X{9ht!@x1yw61Q}xwqWb8+dhSc+GohCaL zHy^$4WX(%&w*)WaUF?97rmaz`(JJ#mWO0jhjuVJ0oM3JR;dgBMR1@|X87 zLh9ozd1$EdY_>Q&hz1e-kN?gGc;5Y`&Ye`aFcOQAiwb$CG#OSF6bsveAl>;F0mi|W zEEG}Qj4}~9y2*5UT%tuMvC=KSIx?&cewFLzdb4!!r#UEf_fv_6pk|=H;iqVK;V}#RKLhoZ z2esQqhYU(zGqq-+&7?DoCUEHOsX5`EX7G+_1HFE_HziRNpg`eDMb6XWTkaqmLBBxG zH1E2Fu*^?ND;)v|QtZBL$C9?6M1Prq%%$3SGDG|%$fa4TsI+r_hE&UlY^+!tvedeynu;XB7~Q&5eBoascMaWHx-CdUM(H4OXQ zb1U{oGb8kD%-V1qGEDZ`H0udO6S=&&^t$13F8L8fqTukg4c)1&6GU2^*j0RzH9a;k zE~$b_IHCBvhEHZHI+?C0QIg$$inPD$3==icm1x%)a!}?9V4wuQdF11iQwcG z!E*ySl>OH-EJ5-VJ(`9X z(jv-ExNj_1(n0Ys3FamEA-fu;M5j2bMkTVygXQuwfVPaw8cR_1C=|ZcPMY-NUoC2o zNlVM8B^Vnjqd|)3M~@%%-k=GzQFnu+_i-{t{^aDkEAKbE5(x2K8;hnNkT!`=twEzo ztFSj?p~Xz`DL=PK%|DTLA8n>t#6J z);d+|$BZe8OnyOaboCewW^LM!CWkE8Si)4{L7fk$;msfCceJM;7 zSbwJ5`|%ekmhbm7;+qP;GM^JBha-*JMrW3! zd?I{(n9Q!iMT^Z5lT|wR_lliIl!nMBlisiTGm19us!#4xpt^7UWG8GX)5}laxY^Oq z&vK587fEKnM7x}J+-KI_8sVQCPk^a#JbqY)t}esrlO2nBgcsxso+u(PTPxYAC$kmVlfTD!Jp8(Zx16~*Qa)^TjO;(*8YvELQ?Z1Kc`Pn0 zX_Y+&NB;Eiupl_=2oJg`!+Rnvfynr{uP%sd?FPB5YD}b5--9SWc`Rz2F@A*^D+$Q2 z&Auz`7StjMOt__GS3-peVXEc%>M!?xgvl$Z0}qVF>V0ErGE0U(j9se;I(%bnE2_EE zNfb9Nr^}03D~Cz-dqTq$rxP|v0@!eSO7M0gNM588AM;>j(lSd#%DX8{i3vJ=rCwJB za)%cr?Ruytk&_!2&gZ0-Er;SXEkYlr{*&g6-q$CleMG>bGSt-+$?2F_N=Ed885>G8Bs+*> zTb=p4{$59nl(~iV3_ft|t)mtB2TNPjikZK;bCw2nw=Cn59JvSdr^EVs;4CcHj3WE9tNe!K>M*L|BO$}8PkqhTy#5lBK zRo_%Syjk4#=(|8RzeT(2ydBnMqbVenQ-UTfQFK|)_G0b&o@=gM8efB9y|7rY?XN+U zjyt}@+emoSTe^2H5uZkqn%hu;Ydkeb$_GvTX}roahyNVVzLTZ7Lk|}w6-0zx@C7i|6VJB){6SORZ_r=6%ei z;UR#=_MffgrEoza_oU_aiu~Sh2zOr?9W~8xQi4_Wicp-dQg0zrzQz#M3NImFNW(jJ zJ(NkXsmY^WRII9%e+0Jhw5iBZE$G{;`;hzgjRmmK(zM(We2rOS!HhnVpD(!xjLhOQ zk801yQEiJE&G-alfSkWSKY4*(-V8H%1z^tNSe+fEr3B+j6uddX7k#8fJ+=Nxq%%MH z#^zf5?I(*+iKWMELNQ6FK*%%*WIf}&9e}74gBX?CJLFb5dC|e(sIK|p?j`I;Q> zktl5u`U6P*zHdy-nP{om>kQOJ|G1ySSn_g>>Sk9`UAX_vdyye-eOqT9KS_^Gzq1ZB zxB^S!m0pTkfvOcMNTj3sYX+U$ho!5_eAnWHHVui!z9KB^S9!rNBS%#qWb*i)qIzY# zKB5I>4XUi3Ek01%uQPGFFJ|!P#@Yr5;2ZNluoEd7gYxYbzxpgzZ4m8W>|xotYJHA) zDJ$NOVmgYl(F||zXET?yU^jBv9fPQfM83%EkPY%R7FaLgdJzpoBlW(5@_Y>o=&;XYo2imm0JoYKPT=fY(@p0Y%?=IvA@|Vjy z*4N9BPhQT*U;2KD4&GdFFK(Y7JvZB&WK^^@FQ*Tkq^~~Toi`usZl8@nKE7WnJ3a#u zV~9RpHIqMI-mPYOWH1niqf}m2Wc<3{ZnjD5y2r?PIbvM<75MEa@+f4rs+Z!$8U!IH zzn#x2NTYZr@Y>&eK6^u8a5WWxGBx%j@}?+*{8sy0{Xexc3jX4LmnOMh}-}F+6U!es-wmwQe3z{K!vEez>nX zHeyp^B?ny+q$U!xLpa-5$T1ERIXe`F!()#7o`k~g>obyw(IQ+XM(}325zKfOo1zXRUUYWHz$$)c}%p zf9gcd=`C=kCUBsVLoin*+U`}%5>p9Ja6ML!6>}FalKWPZ)d8+q;|C+j9>c_84s8-p z-8H=-i>q7S`x8W2tbDG%tAobX9Kj6|u(Tk{yUeyjC?_Y6P>|-ggVPPs$U(p$dr|wxpM!?|roOHDD!3&?xk!9F8X{()O`P~;?i`8l- zsR#oIS<*l9USr*PLagT?#*ztXVoz_uOma@8p&iPUz*b$vz&A)YnmorXpykkECbCa! zAKv-CdDnTaCIa<1?9;@SVkhLh@14k2?!tNDiVRm?d`nzdwmNN*yP`wxyq(_mbbsbU z(RX`IG9TZeK=k_*~3lv_#6*#J8cr9U36X;21>oDpMxoaHi&QSfsgWr|au? zC5o;OD>vMTL0j;LqkeItgIxw^6dW>FuDLx#x51(mKYr(W5Wtg3bW~QJv4-8rgxwwV z5!2wJ36I**e)YdjtlGusR!ekK4ZAysv)l5**E$NSJ)^OV;e5~O;{W)*p4a2%+~Bi? z;0HX~3tt~t#&9=@rk*p{^wbX4fv<2{CR=ZF~Y2IS( zYVXMQ(fb4*MRLxV?~5T0QqU<`y8YQm|E(j4jpF7q#mvsBYBh^Ez^URU42S{?#E2N!pT7?)|qH zp0a8JK=%zqm(fc4ghvVbcRC%73%K-^+o0uKMQLtQ8d%YgI#{Z0aK$O-BE^G^4JN^j zhTe6S>5RekNw9-80_AQWbjxQfy1Cd>#GV?;38U z=%5bKDy^El4}b}%&Zl+jz1zMQT(F=6exQ5`gg<*{Y5RC|^1Tazjh?lPOsnNL?60p= zi$r|S*VBbj3-{o*4W_FzOWW+$Ev@gGy8F#_X>M}i;#Xo$9xv8v42l+>+zph>Liz+f z^!QVSWM^y-$N9OPVj4{3oj2GjFh85d<@ULix~)X`u5hGz9QSEH~Kn>&sPZrk;FqNN!#R!?hqn{H+_$7WEv%8(?O7&WYUj(DpGj#Xp+^ z0>yDdu&254A8^F#++B5rtf>7d^bHrr38g^^pCWkxt5O9 zXSmd~KGN)&_z_Pbep&S@4T?*)a+T4jClBN_o+3|u)|@K1LK`(1ZLcIXHxn6_vY-WS z*j>_U{YuF^_qJ=HStR_;Y;y+FDW#BI!Jxzu!WHpMTQK76``n&R0~U*fsRIm^M8Y;J zO@`VbPjeEtdTT~&AszC~53ZXKyM5s1;bHEuz{ORxA9f*}>l%(K-c6$ZhHcUJCxpi%S;A^<`ICA}EjZOJkmY7sd9-4B95 z<4EWA@X5gT!{cw>wq|L0;L@Q_`7hUnX&>;MQT{erJ9uf(7(?1!oIJh0m@Cq{`un=2 zysyJU<~KvMZKcjzJ4O~TqfLEUXZ_6G%`Da9oKZ)07;ltwuuFR-LcRG2g?#W$nF?Hf z<1BN|xAfj&h)41nfA~GA#PwnV-zW56C;sT1^60u*32mOa zUdKYVQ*8>KTT6vm88(aPf9g2QzddQ(Z48WjA3FQ2XwbQm6~)^2;1c4wk-=Utm$MAJ zPv5+4%I@PfXv}tDohlc=_)CV2uz*>{E^sY3 z-5A21*78LvJBwose4J%Um9!l1Oo-vZ8E+9zb{Oana&HB zvGsUoaQ>J!Mss^qRP$oy>32cc)+Gheo7&-DQ?C&Cqs&2VA>k>g@Z^KU&EDrsmE%=$ zb5lhsgMCaP^xcOjUo7%a+uZOmF$Me#=-e_?_ag+(h_u@bhX_()sb4#vn(Gog)|hVd z-2>X5^qpmd)U}?jH|gq^FvxsUiKO%)U@FDUI?4d7@cX-qzH^ur8@D3irz+@sFY2qM z*q1^JXj}Zw2;8i+obc;HES;KcXlo24DR-M8BX@GCgzjE+mHN zF80VJL{Gs!0w=EV<0h`VA8+JB$lFkkjOLZ($uOULI51h4gYDg-gqiwoDKsXx+@^t2 zW9w#=h_4J=vFl}G>=~5$!e6dLI7<7!a8?>+H^O6iG>n6k)IxM}gc zg>K~ko++BhIItxnGu{C5ny5B1xwlu`*+rSp|NnBS!TH%mw^gx4KFWs&PY%JRDEu(I z%5Yt=)ndYEt8$T%%l+5`gbbMoyPh`?%Y;zWxKz@M-`NYASqO`0EHwrZNnbKo^hJ8W zy2d*sHNL{o&RsmOd}4KxFVsj=6{H1l5Ih^h58IX>6?2lmE&XkXL5pLrqF}c9{o%=Z z(1{j5)^rdSj`PuAS&*AGXBf6?oz*DQ=idJ=@+2Vef&q@%q|Jv2!qk-eRYJ$8tz+;g z39M#di`y}0<-RvGY?J01c+@OGDh&NF0n5d@0Kt*DI((E{z2!&Ezpr*}}U+OEL;n>=?O^T_8Z;s|oX22|iJSTTy7W`cp z>==SA$y4|52V`h&fQ!eW?17_s3)i^8Ny5}Sc8XPcd*;ekeQV0?N~wJ`^B}eu)Z0bd zpKH%*-8JLECDV!*++*1VKNOWTIVezbFtO@Yvg;zF$zl{15!DrcePPMAW|!8Gy2|T_gMvP3&E+? z;pMS?hK(T#*hk=Fk+hObEhX}kB|lz}UZY6b$x?SjXNZER6J8uzG&ty z%v6HTa^ZR;@()qpms;?HGFD0IAf#^p0!VaFv_7}UJ)L=7S?=uA1!>05rc%cuCe$^0 z-7St-tb<996*tjom;ZEGR5jAwqF>c_t{?M#8Z%%dl{nQ{$`b6q>-feOc^_MdKKC5LTVoRrB8cnmvVOv{p!L0^5Rs9^qjQ3 zDC^k{i>o?`Jg6r^E0k}lzkB}zZ9jI`PR{ePz?XMBQA>IAJ=_a;tJQ!&_Kq1`Xoph= z>UrZdSk{;ykOO1(D=NdB@3jA*F`o-EHC~i)ZDR$=jGp8E5&V5~!EsEn1NcyCRG?!v zb*bWyT;C%)%$lKE#SlJ%bWtc;G2Hfa_i;WN)G5{B0(IX3tP zndl()K9xW_W7)puy!jvJ9%yaNc=Y6bt{7$Eb~WFd`3$xSbQLoNxr$kN%hgOGHLY;% ziVpcHlEKYtYE=zjnb14f2ZYbT@n68(4wn*}FrVeLVNW`xSp#398XfqkQ0%Yic%P zUTeenc#G@AX1JjA)VS#v=;S3L*(hAI0q#4bKJ{L)sbG6i8|*1RAW1d;UgGui_=2wM zZb@XtO4|BKe9n|OA309tO=G9%i8_3L<7oZ~x~fb!5R!SBZvY^HN@m3vx7r>oU2Bxy z_QPw|tdAhm7q7{4K2H3+mHnon)7Z5}BsK&i@p5DWEv0Kcxz)A_@C`b`M?qnR*Wj{(5_V*nA zZ24xOrAf4Xaw6ou+iqh6FC_dj9dNXKEY({;Mfk_;*JFy=oqqS5@5_r%T%RWJCpOFz zk9vk%>gfR=nWhU+HlOX%>$Nu~ku!e$Af@=aG0Du4Uy69|%{8dhbf@|AhPE+E>1je~ z=)!rI2fB&GU8?T0YU0x||2qw{4?OW2nG_7WtIOoAUtf2Zh_~S0>$$q3HCteem;8uF zS@-T|i&;a5BoKNiYEGruQQ2vmw(hZN{rw}&D1Ajx`IE@eT{wSk!Zkdw_r2nENP?7#-@y-oDIC#W zoWFFa9R8#taliWREd1D7qU|n^Rjr7K)S0l6$jEw|HpR*b*GqSMrDdsP6kWGRyZCTa zp2#{foAw?9qXoUpE!2+Ay_sstz8ForaCsQ7w3vpwk}hF%2fygqXxpl6;& zR~`D^+gW&@`2&&mTTw?S?L`0mF;@~H=k6Uv^L(2F-C7OnM|0XI?+xRg3NFH(M&ZU! z3al9%(S7Dr}R-z>br4}kPF3nYf{*yQ@eIN>K*)J z`ikau7Yh;mXDP1Z1Q+QxyuJ@-w&wNU#(8{3g9mXaF_m|QL0XSRf3A5b`4-^WPM4*i zMi>|D{^l}ym%0Um&kYV14UQ7_Y;Oi^-`p-5)H&LyA^YUScsBblmAD#WgaN}!FpFXS ztZPFlbuw`&CK_Gz?3G*@cv#M3dgK~ja?J4Yp{3+*=1cvO&itvoX7z3i8Xr@=+gTOOIIk)>}AJmz8*eDPFnnQn)(b2 z&c}ZZhHd>KJFhV}_x20;k~BwrE+I`Yw3-2>yj+NQ_ic#CWGS#=lkd54<`r;d-DHH5 z0yh&b@4NO7e`D1a-F};GhmT;@ZkS7q5`8Coj2Z8(HQri#R#cYf6So%*2mT(ALa&1rHu?ZT3@HHKdP|AU~gnX8UXte5IeY(AB?AXVr-Tx-@9_;aE)8~f@q zgx%3*0-tsIV}ctHUzo}8WnQjlL_TM|jy2e%Dv~=FDH&A;2AAmM^ign}s}6UTl83C7 zd6zy9mBl>x_hRB=zLXqsRaP%w*3MM_c)R*aM(*a+aCqgXzwSj~{N>{fgAlGFx#6>q zNkgUNb$BNs^1y?@Qo-hS1|dDVJ3+jdi57*|bphrrvkuoo#>)(Z;&d;A2csbW2F~?; z2|mj^MV2*%J!d^dmu2)|n}q^1(4S1(fm9)c?=DD!<#p`OG!ZNq0{ZykFL#895pw@H z`A6sfZ}81YMr51YHE{ip0pP~_%ON3R{ZL8AgWvEHTTmV%Xxv>g;o|d%PbOxm|I<*~ z`g2$>7i8_{WiD)t^7lZq!=c-&aaRnbQog*72kLpcxqUi8gd{arK-bmGn&dGoN+&n0 z0K9zyUB0(Qq(fR?11T%1_O`DyBHWSu))g=qPbFGVR1Bb(}{4sr4;u!k1rpRs^ zc*ffQ(b%{GaOud-8l0Ju&FcMq(-tIS7A%R8nf#$)#87EFOZ_kPLx2a&G@Y2{<88F_ z7a+(BylrF_j^TB}OWRCd<=(>U2?5XjH!aSxxs zk;p;8?_69D#?!Ck{$9%7fIVI0_Be!WALQbe*5?F%VvK|Gt*?hmq3WBG)3fYAO%|DG zvlkmUU7vM?h+DEU{{Rj4F2L2hz!BbIs5a#v%|IUfMetXZsIZ)V{B zoAQkG&;nl%AUflwyoGG`P-1aEyVVJ6JDZ(IuzQj@sms6XDDRKicWf`;_O-_wep>g= z_}mvDlEZMxonBeuVbo+>=G%&n#WY(`MtixmtfXVuHiabX#i85}44(kiLoHgSoq!-p zy;&L|V{~VwWX-sf@SgyE{!L_oq-Pw!KZmaD044%(IW`eWe#eUqDROTBqpefWEQIJ$I@8~MB>2=pEdwc3Y8&9%+kTJGkw!~#pAMz79PM#I+OYk(e)U5lkX zW@7amvpsD|`c|td`()!k0!lWnGN?(mdCa6Y*xv?fj3#|Unid$il4wdS0$_#u^1V+s z&TP?ZSRVi6qh%MB&KX}t+Gf0t?8YKkbo3rHQ6=LK2I~Yngf-6iYKV3A?DBPl;$~%O zCI(6*TSD_fg2w$Mpl6>Ky%>(vvwn*J&^@O<#Rcg}$=01KD-jiY$(}cL_8A&7&Y$A+ zHpMavD75CY4Wu4Ek8zLLm9*I(@&(NGag7y}$kK3c8M^*D%>DcVMJE(@BunfZ<*`s- z!_Z^Z?qqX$y>rIr#@%ccwjMlocLD{JarWxV^tqkp>Jnt16y|q(u zm5heR=K{C|AOY*?#J!nP^4p`bBlvUH;4=fKP<=pkyfL8AJhlp80bt3{vyat%fb)@C zcM{(23f8@7`iGN6z~1Gf2l^*NRxE(YCq0PBlzvT^!#B}6>rae+Z1K$zF3%ERH`M9aizN&&voP#?St zJn?d)|A*ZF*xU>P{R(FLZwgbLR|u!DpI5*`_j;@n{~PxC)@wL?0%!$a?g4mQg3g73 z?*rAX{+|#6py+_I-SoDTnLuKODx`$mtvHvH91aX-Ijc#q1pIWPnAidR+1Dp}a_O&Ak zZ*u;Yb2TDxK=RJ-HU?wZ(yI2x&hxj)AD0Geo00oWDh-Q_?K!C5wXH99_BKi%;EJ|W z;<$A8nL3X1wp-qal`a=E7R^@*1rwP^V>bIp>vsM>j~7P+c9N-3!BVKWQ)Jbj3`>izD>Q@YA5pl9cMX1*XEBHfPl zE=vfg4U6FC-+xkk)~Ar5HW5U6ID~q4SJxvijG@|VA!HAa^T~4r@EJU`)!2};4UW+D zKrF*U*)kc7imN36#rRu2Vfv=(I0Qb9ek~ZLMl%X;mO% z4Mub|Ctuo;ZBN%%9VMA;*OtB~5OJZA7c1qh#wMGrg)3gS|6P1Ibwe$vS4`9wfKS`Z z6EYvmn%*$YnvZu7AWqAY!d%=Eq_X4dq1*Yfy{!w1e6%EYKd|5WPkWn;oGGd~U$yXb zN9D)y^ZA4OJ`?x*5XXwoU%t}UfBP|SVVl@_7LuJa-ni;p?| zXg1Zp*{687pyD>kXin~mr*`BpH2gF>Xa&wCOWCR<`lv&1AROgW_&WUWQKbh>4||`! zLdg1?O0|s(8mossCAkv%aM_*FHT+t>pI0*K!m;RAJ85y0@QUJSlF-&qmcZn;+nO83 z$>f&oqV9F~dF!h`v`-`Vl=>NH?sQ8%50#mVnkij&gRf|d^+P04=gzUh?TntkrUy+p z;sZN|juKK(C30;#K%JgfY^vj-H}m-+9S*jG5$&Y-zloa%+8GiY3cJ<<^{HgBUB zJ(sD$1)p6*_)MJ}Ozi?{?s@dhJJC)&7Tsr1oZf7DiX-coGaFyvnt?hY>bV!tRTph? zZ=|`erYDZ3hGVJUuZM)DonIGJ!!dPZgc!Eo)N94A>3#&QA!F4mIv|Rky&G`R$l@JM zy)n-sQu7@9gnCZ)nBfa`)<=5LXknP6GnbwniNuYVOO4EacTqwoIt1=Ox?6qP9sl=~ zrhz#y&uVR9TcfZQn>G3bl}j%=bo71ifm@-))S=nFZErjSLuzk<=5_?ECh=ioF2^SI z2bYO7mvo6dQ_h*+Be1n1g|+K|53QyJs%^i%FPdKCPYft%2s zU)(RWXteHKh6+#*^<9O7B)+H&OMp>}XGrZ;-gJh_r<#IZmO#wE#Y=6XkLij9Fyouo zkLwOn-6emE>aD7jy1%sCy>-eV+UB+li#?j@`4c*|8Nf>KBtLgF;`hGg?`uIU-w?VB zFF{FQ`(_Cl^L0hrp2001qYLs?)5sSoAb-<dXb(speD$>20PD z2~M4@`qX9ows$2J$nH&O%a1JXGuc<;mhKTL?AnR9f@baRZYPyU&djk8rM_Ml8dzempOW(& zB%H@$8GYoTeM%F?AdmNxJ>+WK;A5L%8uc}7=UKw#xTf#%`v;k^_zyhw3Ac&KXMadL z%??Y1_7Psb^A`xhu+q7SKJo zpiH5fN5wjOmj^{1p7h!vW0q`3z9rwQJAbvV_K0=&e-J}%y#u~@g*hLr#jG zHDVk7r3d+L?Pp7Dvdc$JU)~XagqvLgN$cGssCEKlZosw(Q1$jiCsg|LI+^J8J0H|R z%RLKX^)@Ba*-k0X(R;H+sEa9&u4}lsxe(7aM3r_G`fCT($g1Rd_e}q_^3Q91@leL* z&{w0*F3QY*u1_C$B~|3xl-!b?REO4xfBJj5iMriuzuo+MEPjrv{(D;;db2A3cPw=G zug!IO`%QaN`(J62fxpn(85mB0aoWhC=1Q^iv7IKVbMF-=-atFy8Bt>}^`tCSI|Gl4< zLvS8rD%}~?GhQGNF&0>BN*zS{x(!Q|iZLwE!Hhb{x70ME>VegP zk#EV1OH&*yS-JaA`Zr{x9bbbQ_ydp4Yj!RqCUp#YjE}FI zwnyTX4kib&M4#iv*`(?+LfN>N|K8A%;R4E4N2b$ zJm#lz&TDVZ<~va{GkEvM`rG-}Hw@i~?F@fUroS0RQ4H)V)#U?J0gD!wG!t^%Pm_2^ z@bS*q@lI;zb|?w(hINYXOi(>zfxKXDp0b@RsG2Z-W-oM?c{`{_AYWj5D(j$`$P$}j z)x6rp`-2{y7RZfo!TNr_Eza%`q=u<^>aZDqqPthF9MnnXJ0OMkm0Z|b8z@l;Qmy#% zy#Q~SV1n4s|265BAGQm}c?D^^z7R!Qv;AJUi52b?V-T$Yg&w1oT|SG6YhB0OUGpJ@ zsAdZDXxKTQt8Fz_W%{zCtt|--475k)`(w%FN29Mt+n)z!Cry5-%X$Tb+DB&99(gm2 zm!kP-b3Da%hHl3l3thLUOKWE8II{Blr+0_m+EfFhOOwhaDdS=qxDpiDNL?i9l_2S|rTl?P#y5suZ3y?l8O2gXe2^5VMvL>Nr|891dM#VjQHn7=^t8zXO z8eKnh=jzRamal8!kjfo}xGrPGD+2a2jpIGL>ly)w1j|Z5e=Y;JSoFJFGhNpCL}%ap zhFpd@y3JK5eib+A!qm{-Ko{q9a{Jy`p<0^_Lp<~Buo>zfGNhhb_xU50e|&na9A^Ml zYyoxdj25CPp5~qcKO;D5ubV(Ut0uXRr(a#yr#sC;o?AKq zXXr>gRk@Im`beIc3`s~)AVw=GUTIvFrWcDgZ+tb9&-amqCUA@(-)qUza*qW zC-m*LcO`0)O!)J$-3p_|u1e_&fEk(NM0PV*$xb<)jdc%vZ76Ex1$aCcSgox>#dpPV zEa#>l{T8|jROByYa$&>0eeHeTDOb=hS+*roEvWHuFhwvJ%43O@=EzGv=!k=FLyeDY;+zVbo_cf zxa6<(O~OF7qVj~<*-H_N@rlhWywZ)aLqg(ElgI~N z&?)Law!h=-ImPmhRo9WG!(8PEucTHxk;Lt`C)~Bsp{C{E2uF9?P_ ztnEhl8p2L%#JrfSk33EC=zgh5bh4)Xe374^a_V;If?X2m7$r7H2J*AL^6a|Qp!tIO z-YX?y64LS5{jX#n83@y6gEcA5dXhxGjr<)ol#)Q0=}RxROcJDAvs~LvA9wNV#cTuz z?AI)y%69!5x)R0a>AUmBl0Ot_mDh@(cYd6_ix#Q*#bKBcO|yJE!!4rPmig&ba)Yme zj!R5$sysSNz(6Uhv+VMVaGelPNsq=~_N}22>(k<{W`nY#NQ?NL)=_bO`&Dog|7`Ck zv@>Of`qulWHdKYe3h~y@Qlr0pJ|Kfh%yalS8+uyb%QhX3q$)Rdkhdolk%MQx2G=un zB+4x@$uJOOGqXECNZUbNWkjbo_RPhUl5R^yBc!EN^_1ONiaf;SST}YAFA_h&1?B_d zQhQ6WrgbXtDAQj*_ECgAGJVlNn;r3hs+MxVU*7PL!r4`=60es#Atekn=am1}gYs(^ zlrWtak6uvnU}Tqa*}|T3+HQQzg_(a;eK+QCM2X@gh8`Vq8KV%r_Ib>q>5NrOyV!8{ z`)q(eEE=zdyO5pb4KJ}Tin7i6!8@*6e+iq1xORtzIxev+=Un}lAw<)U=i|{4AXjc% z$tE6@apuZr-kh9IVW-~W7eB0eJM<%J=@mwsNb?JcDcYR=JjwVUdqTlyyP|W-igd$Y)9Um$+ zDfR5K4sww6gRy9WKu#EaM_6M4Pz%eJee1+NIMP|@2pf{30ekP^T?nh*1xkxz@6APl zUJG#r=y6<Ts%v7%Ta*a^Kw;bGI#+i_OI}4Ua-b`*mG%hiKDviIzUTjEOkp|Au1&MTG zszq!HtF{C6oUfjddGL<*f^q&{v{a&j;+QN@RWZqy)(JOk=0pDM<}ys*-4IngDVAlQ z3y{uY#Ghu3t&XjhG6}|~sj#K`sWOVNkZ2n4sr{<<`-J<C*02*P)3EO!p)iYQvBHCF^ zQu(qlC_yYt2!Rf0$*X118!i1K9lVOufH?R@%bm=!@IyBx(iP?!4Q7$|dzqcT8?`l$ zm&m@AnZQ!*74f6k=#P@%SRKwXVkl8}7jN$cpt3hrKMn67RT zRm>~$<_~UKc;>^w5lYgRJo?pJPghJ-|D+y{)a*qVuR^fZ5O&2e*N*~i8RkqF7U_Ci zoU2Gr7k@N#i#hdR@eADH@~IF>K7TX5dyvX(F(wnMmHUG&H*TCg@e70S1xU`N(Gv&1 zdCF+4YIbD>XRtV(YN&C_f4Er1;p=Qnsm2xMp@nO-(~Hw*TjnO5KG_y3GNitAb~w@u zKYd2vuBr|b8wn`I#<1>p5UD~c}oH=o;3EoHCkr3^zyAZQjBUEz7%oG-$a`s#HM-JAwjGj5}!|=OT zGq|SqIR%$6Vybu36}SbZl>_hC{;;Mr(#P}*$3zsCJoC3 zy;AHK9?K$7w+!vEd^<{A(zo?WuAo4}H>2{5c*C|ruNO6GOfP>6blBuRE6Lu|K5yD6 z7ZYVNKj6DUl>Ngz=$UR`@ds&a zD$`5HxYxf~M*jHHJdx{M_9P~rtkt}*W^@ulcH)C}J}0d7#Lp*U|Beo5@2`t*9e6WG z^-z^9zg9!}F}E0AE6xuP>S#3W=MO>8TWejzh61YR=Vfb>`k?2j2q7;gapK*dAEM$q zq6Im={O0e79K&CF^GZOA-B9+dMlt{3)ebXk5Ar@Po#0VT0#)CdE4ZeyD=nM8<%@$Y zqw2bUSmO;}k(pxoZcEXkOkhk_u$wDXngN|hpyO6I5aZD&NNMRXPss0=`p`xRPvT(Z z{_XZw?m_|84$G7(QNhJapJbu`%KbC50%gF zfs7}HsPGc7@bWrEL6`u)dC5T)uZl{8KMyJ9VP-@>`x6kevjIfKky#uCi!UT(`Me06 z>M`+=rj>KT=8g zQBk^d6hVFfH|gaHAOo`*(G-;Pa~U}1pQSiqXi`DSIa~0KG?Go_QGp#TabMRX8S?69 zK?+ZI0{47;79!TJC?_@Ix^P<72x1K>PP?l`^pvJAv(V!rKK<|-PXl0?m*Io}>hm3$ zFv(jc2Od#z3C%O0mf*rqq?^s&e|%anyF!wMbP-r}RyvD0;a9wNdxB6y4f5@M+1LU* zU%3N*26?(np`NW^WGGCZ#*=&ORo2Py^?L!Ulra;>D)ogsQ;!kT^XD?NM!HPSlUulq z%t1RdROPBnQTrtNHIzFF@v6#S=&AzUiSobp3b-Hnh*(-Qz4bF{o#C=P9ucFK^LuI6 z7R{dpQG@9%5boU#*VqiF1~;s6AZ@#BbV(rw8i*ar7i_4E$aKbL<2M2Kc_d;GJ&l=2 z1W*1#{kcX8XNZH^bgccdfd1a+03RL=iepOz{6 z%)henHtE&A(uxRH>-Nm=3BwX}Z|**HxqH?- zXl>8ku6!yxPv%UsA$XzVY?UtP+4lxgiB7CqzoMn|6;_C8pmr1r$6ib-7%Zl!y>ZBQ zy$DfBd#Wb5BBrZ&)eQU|HP*ZY*Rhv^&i+>Exds?V>?q9gqXCz&AVD2vH}${;e zXLa*BQQdF?b^O*fyYXhGfV!!o>CV#mx*j;JFfJBx5?J+X!^G-#;g!bS4|cX#(;8pd z=;wdLd6AI$Fwv&ky}wmR%rBx_M>09Xbu9-A)WcB>0k(HXRAI7>4G5+B7>d=n zXhmdb-jjh4&Ho!M5iW#EvL`nkj$W zp#2dNK9@mo!~E?&bHe8Po{Wgbq{p|K_PsTg2aveDdiG=SIMDth<^*xTn-{ddPHaS| zu;I-ELC37}$Q=a!YXw0z^rHf$`ZAJ;eoxzUChFvc{aEF&M9Yb=pnQOt_=hqC^!l`! z+LAhWQUGz&e`B-jIQ1ng+2|4>kXEI7vrEMWfz>n-l#x68!!_wcKWg)>ul@BqTRqv& zIk~?ewQSjW!mG{Gw%*Su2o83DW+QM89;t+!JvwZmW=Jvjr24eMoDhYN&Q~&td9h() z-(Nq}3ped~{;{>_puhN6H%PEg+D$B(G*LW8$GO>O5ZHnML1lm2T7=-}J?=+>+cN^3 z5;pT#&_MRvni5V}wPve_rB^55lhvk;X9kN{FUVl~jT}I~j18B>!K~T6tM3^VouQq% z{r&;U``56od8a6gOvB2~mYzmCROkE$qlDJy!MX^ z65_4QT=P;7aI=Q?^MF}CAQ7<#6`AxN(?+`=vI)>rgBiq6JmGQw*(uc6(q}UO-#h?}C)IggzVBX} zkWB)6kKQ+_1pY@SzpGl*Uwc|C_r~OuM9^(YM4~TG=XtNT$uW{s;h_p1M8G)UUL)f?RZfl9s|lO-JFKzyf%6y=swR2T1tyJ_5DVG1_9aw2*zG!hT#dTt7QO~ zY7Pf4SnwZHF-0cJ^6Jj;2cTtQJMd0RQDksbdn}2bE0M>LFw)bVVyTj|73_NM2s*Mh zDu2)lW^N<$AP!nF@k@$+X8i(~L*OD6!$H;TzFz6Cc~JFl-krLUR*VdMk9j+*fP_;O z6)*{JaP3fU4AAZFd+air=;dcC>XQ_`@6HeKoepW;Srjc$G9k2@0mh1Y+5VG8Ru0k) z^L7i$Bm7-sg7`30V5GF#ePtO5NCy~?o}yaf_nYknS6qaVS#T1S2`(?1e}dR#YA?iM z#0g9Q(DvU13~vJ7FPLOD4L3yimji4=)VF}!Ee)m^vEyk@&c!eW+rO!2Z{^oi>r426 zk*})G-8Q1X>WTmhh#Ei+yB{u<1K?@@SZn1cWX{yawA?YAA&r)+x3`P}z-eJ(D(=9~ z$OhI`Ccytm7g%Nk&AZ;s)#FFC(Vk3Q(B5S=`J?>VG@vyiS^MtGJ!ii@V+8LXMP^i?<{ucDAvVM@qQbajFrY`Ys}2WVz{d3Q&z8APN=eDE0o?ze$+#zD zCBpX`fxTb3X#l*je{qft?C>PdP;zOixDu8KS^@z((OQ8>f7iH$O8#r^to=L=p9;<8 z#W{=;e#0gLEOx0fap|Ak(Ec-7F)l#u*F;;v_W2{h{I!I83L zHSH3ABQp}i1Z?N4(Rt?#@NNKKw!FxOz4yqYz@q*iya7HRz@pGe5aN?|0C)<- zW;$GH6qIA|z5m#}i>^_{eF(Fhs~uIQwtYDV;Clc&=mK5}+yS$Ys7@;yc%BLvzwci8 zpT$L1F>E1?I_Vl!|4e~qc`@Ks33YH5Tb94K^@oxYFvC{xyaMq>;=(_nIxzodE${r} zEWBDH|8o3it_+Bpk!Bde1elYtPx>D}z~afj8!+R`z5h&Ys{a^+6_GuZw_g*Nx}jyh|Jez{e;DB0 z7)CboMJK*PyHR@jGp@cvyu59 zUzJr9TdL+F1>-SV`i_9+^w(&)C=<0>IU50x)Cr>dCqt=b%buVBxcMZ>a3b&0d!%FZ z{EPo7gK-Zdo|LeLU-IuBu~S)O#)MZZq!ZGLJ>o?+4fvH7jBrmDf6$-Td1TRetvDO0 z+LBwDmUDL;RQXP_oY6~Pa@L(v&7GVH1lzc|0+HE!kE38_k=Z0Z6}HR7 z^X&fRlO2!m(Ev|02mxN>Pzzs*jTJ*!0u(=x8t7(5@K{59W=ie+|tv3g>w{$!RMSX!$g5(0}vU&``^sUeO>6E`mt;_ zgCPuv%mo481}OPP$5;Eu81yb7!N3IozJkjJ8vp4tVCNPesDW-az$sX<>GJ&NRseGR zW;du}iy1mFMi*=YUeL0S={`+WL}0-CE7_{pe{O{s4Qg@ACb&p+t4MTw+3%$lk5zBS zPVVtPiLU9NGEW6AOf3L=3xI=yS-_QHWW~{C#0R_i&o=_NQ5{VPxFR6PfS=t^d47oa zXXjwR&N-bd*W4k1YjHNdDswC;V$br!%zW?&P%R|~RAazv0kRP;;Whx4Pl!}8aJq*S zf5e-b9T9wHngd|<^5V8irWi)gS2=*F44ex@vHxh8it{bfLa1no1F_^MR4IMRD|2Pq zWQ*?ZSw|GTHKn&`nAqo*2KUL@U295y`?b5wSNH3gAO|li4fnoJ5w);Gg$>*0j7vam zQjJTtQsX6IDz2KAq!E_P;PPYROVG0vhv@LHFNef-CTkm>6tgex8cw=_DZ$4KGx|+Z zvo zI{ekpifWuN@4E7ZgohLA@A+jd6Zp^FryU1kk_H6+AUJ%IuqO)Vsz&NO)X<%S^jor>`%TqVqfH)j;yO39ddJ5xL|`mdczU!7s>J+M=< zHvglgbmoUZIn|zv$&$H)qP;3wQMQKLfE?#Jo(jvl3k3gI;G?->RAC+IHwJ3=DWFt( zi-bWVaj|k=CY?}Ds)_QGKsxTt2(mqPpr+F}H?X&fHaSVvq8;9{SUCD6J1({aShnb7 zm3bY;G>L_g$(l99=#kN1QkQ?knMLP79;)T@dAnr=N{DrncejZF?RxzpLlEV=EpZEP zuo6{xI?tqKgLdcO?O-`9{f@ zRkeLBl7?srT^LJ;D(cN7*GmW&6IL2C{#4Yb^e1s>S32>^)eLz86sH)DAtY&Gv%(HP z?zgMB4CxLV_r%K#C>HIHV=~ewW-|zce%S+|q3|$2U;N?2@DC%0{073OO3VH%U+mq6 z6=@q7!S-FF5&pBUDw>U_X9@oD3a%Oncep}(Ro4(PKb-vq2>l8uR+;LjtK+|xS79@) zR`A_#9_)VyHi6Fo&n#cZmv&2d5r2x?fA+?o_QsB$Tn68C0pC{s3*T168~a^0(kO`9 zt4dOT(BXh=oY5k&BzkzO!H81;yLRM!Ys&)1f@*Gr)F?U{QVA#mo9RcSyF$$~{n|T$ zSU?tZXP;9`4FH->iRU_>+@1BSX8dI(+e#G zYJ#V#6n)?(*PsXAB6|HSy==jB)Wer}u#;JC2TrlKY${H=;h=J0X*_pC?Z{2|! zlcY`gcm0aF()`uN+pruy7rp0L#=I;B_t(Y(_8S4otVbdb=_@+%A8 zxVK-h)sX%?Y4v62EL$@csic&r?&a^55IrvR&Tkk{wxZpfZN|D$(CI|WG#8Kh)y;yS z$mLHdHqbajlGAQP8Sfi}NueB)FV!Fs9uqY}mI}H8?Fh#v`NDXcpJ?nDKdOlvByBdfs*C*#$cX&3DWZ^fuF^XY>{kEfCGZXBbyv zUzP6ygfug4nZ9Jp@s<(k;LnCmmp=e@Z31khsSt?PUH%V>RCCjuL;EyOP^=3DQ{s&( zX}$2`@Kvk|t?(wtMJjmORdGb6crw_?S`#JsKp7V1&wgL!cJ$mll z@Aas;A^EkIDM><7y&Z7BCC5)#dgzQlKU><+vf31M|8-|dIbjV&T-B;5L2Li%&tvB<+nK>7rdr>V0o>zi~0DbE@~>|nMSeum$6BO1x? zbDxm4_I+!e9O2@Q^`JIP%&*1+eG^ctDY8KL*IEHt%{^zJebspLHv3$)kjT0O0p*^J zWR1YsbA7+>KulOXwK1sf0K^cE3IEb_kcgC;e%~JTfFN^DH!rp+ozE$LKz|!KYzWT# zCI=}{+k!mBMAV0n*w$-}>C;LovG%$tqo5bvf1Ckg0hEor)!BOQkv!AfXSuNLIIlt7 z=&sljjP9%+Y!nNxWr#?~MFQ8odNl^O@;Xw48C6R*GowVsJ7e*z`nMfCKg3?$tBiF; zFFLL+M?+V=hY*m5Ge8;`ghZ(JA=uflfTi3;ze#dw;8`faG8cN`O$wtxh)^w(HeYp~ zsJT?hC4L2Rl`CXtt|anDV>3&~JWOp=d){f>k!8o`_aW>wwK2|_21wINWcGX;5~>Yb z9qwzUaX_EeIGVS49P)p50nTk|CHx0sFlSDWv}w-f*+(iSuYkDSACmvuado)-pfT); z4xV|%(6hE3XCsZdym=WZYsT7lTg zPBANmE_;`ap+hX?eNGsj5^6c(ZOB1#5V`8((L2YaXM-#=i=CvCrdJx=U!V_eHLJ#!i%2lB`VHp<^C@#!39T3`~b0W$U% zWLBk6)9cO&qO4C0iTkOj*kaUq7x-V6${#Z^yCXxDkIhA>6|IS(@01glAYpo;y$H}{ zL;?3txe7}li4pK8@TE*RP^Z*8Fc|?%@WffM#bNkv_+)@uYc^(r?q1O350B?npVK8d zN>6}%XWmD8Crr->KI(`SDvN@ zs;zYoNni4_VYTHye40T007WT?-hw||`l*>k)s0YT&BisICwNyy6_y_6c=7=J5;V`_ zh)5@MeMf}Mn%SIn1ae-*vBuCo^%kQ{CscS!(?cNba09dY2uac4e8Ij;05fCidH7(YNEJK-o|5fT_OlMwQ@3&3#_ z;_Att+$h@(e5tA8+2Hkz?&h`m54$Z(#eq-bjdk;a<0dJlRu$r)A>5>79YQDvIWn zAzp>&r;EP%k$XRfQeoydyWj*kY5jn8x1$-Xoe7=T80LhsEsi{yS;Be@tI-O2onbti z2aSVryN!f_PW=T9fAU^|QJqt}oZi5 zeBL-a)w|g+|2uCsl4zjfcr|`f=CUB*;K1+v{&!y#qNFX-=(`xWXDfn2p#QsgG6tCdqV5$@R? zg5XB~<(9rKRVbw1iCeKOf~<;2Ql@tTGXj8IorOe$^lN-Uv{bjXD2WDHoytLwxp*L zn{56jGM-k-J#MEu0zQ`595OygcNR)7@<>O+AEtb76gx^ZjtrreGJadCx9@m!ClOv` zWcDl3oi_9!^()c?u=Goi5z!3rn@_yAK$IeCmixvz9avTT1UM&S%keONXpQ%WiUN;uK~;6&}+7>F=sp`&+ghgV`6Vw*!^q2H5A1NOboEt+qFgth8E5r{**$@zf4RPq+bThw9^OUo+#`&==~Ks4H9)teIgA59s?2;` zjCJV15pGzmGLZ1Mw2LDQdk0Fw1!St;4=?1*`pd%-6tC)j%pbUr>qwmK0hjh zI<$G8F?EF*ufJkrubpgufND@{vCzUt$#Sc`hBII*w~)xG$Wk=|Uonl%8cdJ?=Oo(X z#t(qpW#a~>2?jpW7^F)BQd-B>soC_=R==sPXhfA}{1hHpnJTt)^M)EJVx}0O0JbZ( zd2|et1s3e`m?p-2u8pvn9U4C3^A_CjozUEayRmaGNYpZHiSurbk5!F)!#OsvF9mif zdvlhr^MS~f>_(N78(}53NhR?*CYlrw`)s{Eb$yTvLL&_9F_Q8gb>Oz0t*5Glo|)1$yRKPm|T-7X=fjhxdWatz?@K;N_V;@ zV}dNF2L)FyrK>%HQpONefl|0Pp^D|8`WCgfd`8a!Rfv6KLv?)HQ<^dDEmLVW{~g+- zA8kd2pAVHte7gO&WJ?isXUqpV>HT#!jP1fmk7TRVe``o}0BN@hW=ylzF9KdXZ!}i= zz<$ZeM3q~OP8(cXlSrU}0kTBZzfmw^w=gpp~UMyC*>gy5D$Bi!YG)2wB4!d$hD zD02Tz!$?fX?5ei#j^5c4*?0gty^lF+p4U$B!daAiX;L){M^ebn4GgiqGd`=spKI9A zX0^=kp;o-1SsZL;;4bVvl^8qQe5gFt_ryI~M*C*=4=+_wk?_oS+uOK~L=7wEauQ$a z(Ejc3wuP}Fc9Cse9&9APqx1ZB!INK4)>Yz3yTLEyp=#?(;!uzRqg6WJ6-9gGkz;UP zHNUXcu@(rjw$$q4>NfF2Shee93B16@Vr}xh)8k2RXRXJwGm~}tHGJQut`eGeFYH{d zLp31joH)CE4ky7yFFCrmC9&uJSsTQ0$m ze&49-e*7T!bPzAF>!bDjWSkoZ^%uF-4=D~4zI<@?XKN;JJ)>!)QvjmHBBssB%?tr! zIjQZ^d4@(>G$Mqo98iD4Ucbr&woGp-Yef1uPD2U;z>=HsA*Q{~J?dL8!_QN{Bxu4O z0dZM$_N?*sr|hN>ecv4LzrOM%`l z-^N*bnsM-*WEYm2Rys<5?}jCHRnW1X#g~^c-%wzuBddPpt=#EtIToHVq9>(o5G@mrjI!g1Va;`S$B9W>GF|W>?D*2PX841CUtZP7g3IDPna2!ap(W2EU4gdZqsl ztUy!05#Uz}i5=+B5Ngd^TkE@{Ro@0OeL_JeC!nt+HvBygx}7qouUM6&C z2w^oYbg0@YW-_R)dEt&?TboPo`ynD@3GEtU5m{JlYkh}45fQzF4t*jVkPDN>_?2Ow zh)85YhdwdKY$dT#{q7N2w2Sb_vaNk(=o1m0P3W*sl-0|WK7M7`C&K=IM&-yYBGyl_ zr?JiW?&wYE!Gv~BHsKL%U83*C5@c`_I|>g& z4(O+F5+}54pG{aCY-@cx`YAly3GEtU6LuBbTHlU-iu#xd?Q4D$_8{9@-yL1=t{V+f z&ZVQDLcSB)HN+-tO|~^o9Q_pQGND~VY{J%LTkE@{YYnl5^+*Ox2A@KoB(!UYO{`0{ zHBKD;6nZeBV_gbca|>IOT{gb!^G-s$eX@yDG}{{A9SvbDbnF(^<1<22Pq9l4bc9Vd zvk=R^ZGo8#2Kp)XOZL>zp&^8=xrMFCE~$Nl8*mHjv7?{DDoE(iC&Jd;!g^$voJ+Go zsxcYp&?myy+``s;(yTH07IsxahlVhFvgE|XnzPf`<6sdQ}En(UGzs5Hbt zhdxnzL)e;6Di(4gR)g(#l#@U|l^z`E&?jmy4qKC5wyzZWWS~QzD2pR%Y(K*v`&2v0 zK!=7P63v7Ewp`#IKAvqWa2bydLo6Lkza& z#m&CNw@VdSD&*uDwbPy~FJ^s<%(Ri8G|=@bPH5j7m{L@6!jvbI*m!^KDPqF$Bn6Xq zY$`>@RutIlSf&XTYz(k#>Eh|>6? zGJa15i<_iO2`gs0rHkR~0xWdz!1|^u&g}#1+oDP_W19&lw*o9Eu@w!h?}=hd99Y*z zDjkh&WWvE>A0JrXgv1$nU|le8rMc0pu#r%!!?!}3H-@bRGi$lRR4XdYzB?LAqZ~nBHe|rI?}wR9m*}vqRCVcARN6QR^h0E!C$uZnx9~i(&DJHm zY<$-?ctVE)79R6f_=0_Rv`{>B*=O`aSU{~z2GOpuSrelg@2Ho-F56dPIbWkgLx{-A z+uGU)hK3NQfrJhXk>f|d{^8&M^{;>Y=l}VSzx~^P{m=jDZ~o?Qe*MEg{q}GFi+}(1 z5C8I~|NYya|K0xa>mUC0w}1PW|M=HGH{7p(`j5Z<<6r*v|NNK#@jw3ie?!L8KP=mC zAOG<4m)~$i`UP*o{`}>S2oe19{y?eVp|i{P~x^ zd_3O2;Hj&}FCQO&`O7aKfBe(yFV8U|szx?)(_}l*jP)h>@ zEdT%j2mk;8000-rZIb{0|NsC0|Nj;M6aYuh zxTt95i|Y>8UxJQKM?9u}xZ1KC%xIL_s>hqioZEA>Ip*+1Hqn^p-UnXe3E0!D0vNX1 zTlB4)Qa8@*9=^qYREYtfGQLJ4JO6YyfziRpYI+{dgXwni;GL1dL#=x_1m5)?4x=l1`!cvWq2E;G?cu$ zx>~xpcu}#!DeRh&kygx|u(RG%+9|u%cw?K#82|JX)pz~)@xygK3WFdv7r|lJM3t77 z{+RfL|5&lPGOudRrsV49XW-_`|LsMyF#~$+UfaN9QCHi>$Tx7co!`49raviLc}dmI z&u3eR89z@?|Jc{w{;Tzm12h>WJ^?>X*w>esGYu0ScC!9~2j8%ibA~uK71-@;Ow`85I6<;thGfTaX zk1uY|4k|Nq^FBqs$ky&?7&BHM?Rb}6Haxaoh=lK_hIbda;Tr*)Iyp+{xji(mufU&h z=s!WTK+&xZv*)D3xvg(hnI;Yky-KOU6KIyx5%Yq#Ynk0$4TmMwt=hM~2% z5yS_kd_dF43YK)8jIeF5X#z9)W#hODsqjFxGU3xh_!Y4`4NbDyIZt=JQvqf-edhjq#41Pp$WiPdge*Bm&_uW%Ft94l?#>INaVs;a7Ue96D5S!>L7FxAj`I^K8!J>gM`=(}uvMAK%A(>Nz5r%JFz?h}&m z@)oQhHtTg>N(s6k>Nnu&2pBK+$d~>fZk0{V&G-2@cgAEZ6V%di@c3seY%~}IDSPSc z9ufwgZb#>G34WE_o?1Hq2HX$5@yO7&WzO* z6%{2ve7H+|yAifS@ZELI&W-vSj-Enkg@kWGS228%S+UR^Vj~BKfLkj4Yt3~`Gie94 zL_~QhbA~c}Q|krulDw>*L{SkW>(L+y_0D~CZ16Wk8|};pbv1*7gHuZz3S(n00*EgM z%*-C&k;AJXU#{enk!{xjJw&O$X(lD^<0-yw8BE-D9w7*{aM1WE@L9u3$)n>VaKPT0 z6i^fa`DY%NyWiUGFh#(x&?S*GO4NG}- zN7;(GsUn3dlRXy`MYV09u+bWNW~D1yGAf8bIJ!QyO|b6*hx7V~FL*G>MKyG5ZO*`4 ze(y0Kw=OC)o3-rgD9W?0*iB`F*%j=rofWvb-G|5xSaB6uz402>Wjfrw-_tDLdV70k z7yN>H__^V$02q71fg{sfTsK*fW9Y7=CSQ>Y&UC8_-d=zW0(73i?M-IU(cW76>2gy7 z(FD)2y?0Kgg!+-m4Ak}|4#6&*nA+kWD6t$HJMK;1n3q0{5cYgA=EQA}Rh8Pm^-1~=a2t1SP$oi;QZ?{YzB@kIZ;sh}(3w^1<5c)bpLxnL83=GU| z9bbABA3v3=JCkfRt{fi}ka*XXeJ@28tNDl)9xgfvZVBsCj@Q3(OFF+mmxSHeGJ=uQ zZu@mRRT$xMh=~gURr_NeEqhSZUctl|2(*8P$3%r-S^nMJfs!lN>30e*v~L8dzyP+N zXz->%aD1BA%xgY3td%jTAR=s6!DO)Uu*%6-@_xvXGDTv?mV2 zRwLziVfM2BxTK4{Kg2mq10wD+V?3@6CV5ideDaMS9vWqX<;nP;Fwqw~^h4ax)GtwE zd5^5j_}6mG`<-dInDc9^ZRpc`??IDK?ssh*Q9)Rv(MAM>n7VskYo=Ime~XEcY=SMs z)xp37V@l*Ge%v963DficwB!7rfYK4%8AtrVAEJNuVUodkb3r@Ww(TAaVnfAU06X+J z5cyQ}S+Q<@!&i_#YId&V=a9&kvGy0j0f7`uoSfG3r&&A24U;0dn$4WS?lNn!c;RoI zonx6I&`bt+=BST_X5%RuoI`xS&1qR5Zpz1ek`d1O{Ft`i*z#qbxpZzt#YMN-K4nwR z;RQdr9q6Ft;K0Z_y!R#xDktf4D=qa3EIg15UB}wAE?IXSF{V4p1Z_K>fdxyT?b;ZB z|56)?IxEhk=p+`9qN(ne*;C-p?}OzH2dq&Xx4&CSnHN| z^g7+|SNiruueamf#biYM-FnPJUirpSIBYiWVf${;{>S=6{LGcvQ|cnP<(9h6*ht~N zMu;={UG^=S&CS0E%lFy^3*LhjHWKI9^Fy5F{3R6v?|YdON4EDYg9+9Z`PTr8vt3Aw zqsuzRNbTo8Cx7xJ#G>c9t?j3aqvHaP;(2gZl?>qWpO)th?TU+!7uzEEdl562UP$0| zsH?+eBT!djlZrlKGG7|9)R^8OGaw$i;k0j|<%{gLzhVNPWNTtJwG|sppCy@#co=L+ z-)!LC{_Z7=RPqx57r&~(hqNx!WfugMl1?1=`kl*NDct)pc{+Cuv z#@q(^B8Qfig=<-*@IH-w@{obr2pGX;#=McE*k1i<@O$8(w}0?^g#znm{)eYnu$o{R z+mKBJ9IwW$SDhQJhi_#`&ap9ZB^?@{;SBX(ySZ2oxN_GDZ^z_`;9ju*z+_5qaUpUO z7UxwMYzB?$)E4`tMK?x&S>vzGTyfTHEVdz#M$I~4pp<4||BbXSqI#yaKkn!R{QMk` zT4b#XY{Z`bj`JMRb^1?yL;o~3mwQ-MXKzU;C`i6JNN}OXAg>RCtI=$e!)r=EIY--aTmsUQc>V{beZRv zS`zEWrV%b@KES~5V3p-O{$wcdMHZBllq8`BG73)DhVzcwbiB|$%x~J}mT=Lvo1VHs zrnT&qiQd-UadOrnq1z8~!;55bs=X!J`rXq(pGBz#^-P2a`nIehY^++t-5dFYmoLjOHyI%;z5LKL$I^A&l5KX67zM z#)ay~`_g-!SfTZX(cu4%Mg>8dB@L7C2jA@POvmeb_Js`Y|I%8#G|`=MwWIp()^2{9 zplOyG**p;f*f`>eMF?XFgRxbnO$xx|i!eu>>w zh>1Fww|~{(E^yWtAreQ<_NIa@r-#LH(;#=Fjh9B(4aNLdetkIr+aDV#H66-j-TtlR zO>MGbU=W~~jxi6~`+UNGp@M(wcR=&ezxAK`GAu>pv4vBfIexB2&e86SKFSc4A+t8K z9pYrcmHW3Wdy`-OYEq8*loPH(Pm2y0-!S)#eZt9*yB^ z)vjX9d88*{E@|Mn3A{MyYWM#nSUiduPqck1o;bg4K&S``l3mJnPuWxcGPl>reE+{c-bdc`*Ff4uOq|HcAOzz#c(lS)Gb<;S+$KR^f-^-yk9BVqpW zzuN`^Aer!T^fNprH>Q2kz*h4QqsIq?DPnG~qt^U_2#TZ#*I^ip{$+XA4^w<(l@vw+ z&nWuZgupg{?BbidJ@6g(9QRtBo2eiQ3FTNCh$ZdBTTqVBtYVNTfdaKrNW&Asl{9g~&F-E_)ttJZ2wr z>BBFETNqPRdjUTjf5B5T3>bIMlYa-M9~Y9cPm$Wow3|x(e9I;e~|KL z+twn~5ac7ov;T{e3ov<7R;wR*f-}MLW<--jYy#VjpV3eH=#;>?$iwD%xxX>mQp~uv z;Q%#aBS?ctXa^-x(p%&WV#U45jV84(>w6dlA{+4`Z)GK`lyP2twg1}qv4aTVfQa6> zGLIk`XY`E+Thq_Wt=Gl-KSrL?HUicGND4cRk{sq~UzMZJJac{avi8Gns?kXOs$jW( zPiHec;p<#=j?C(ev6fau4NwVMrmzCx@LFzKEMBXV*eduL=D=Zc%7}V29E4HSPnqfKn^o-rnOfZWdWO z@XB6ZeM)GA-1Qve$A73<&w%Odx{WkGDNshxz`^>{!O7`Fz|PkM`;_3-FyjKN>oeV} zQ35YtzrJ7?Cp>Cqnd^S)ReHVdl$TqIP`tQ4xWV`7EmU?3$VD8hMA@&jgOD8;fE`u& zuHPM04l;0>?wW|COe7MYk2`#3!jrj@$>fl(A9?1HMg|5uxfQJt2D88B}7}fgTgwL0GtKhmo4{d~$t}8lJG;?59bG zXxh;twfAo9Xd@%3-`*y754_4_PDG!7Eds4SYO!M~FtyChtowdse&IBs(m%|o)4<;5 zk?g@n{pb^%I3WhAOe5-NvorcHi_~=gxc>Opk49fPG6uO!US8e|;7FTgAbJ`EpvC(z z-bIMW*7)CCT;9(d@Pt;^f%ZGzE4^>YKX0p;v5TCF**;JfssWy0s z%5>%CHtT?SVpCWV?94x86xx*G`{9K1&!!B)sB%nA^MHMlz?&kuD54EwK{7u5A7>85 z#c1y4Se8(Sf4PtMU8bsH{TFM_s9%l^rc7A#-@Vr)o)}WunQGRes8!g#xTJ(zk#2N1 zkSo_m8=~_n2jq(ugU)B^=Nb)W{l^+z2P%?=Drphi!|=J|tmf*hcznK)m{5C8Txaj= zYAX_#*yMv0FQ=>CapY;4n$*LeLpg@X(>0EhfBq~R7#IXqcu)U0q#nry*`N(A(k?jg zfO7Hblnb>f2Dl?ODXwSJ$L&0F%5;GgdIQK9TL25;BH0Nm(B?u-|JO=^+H-jMgh>63 z3396^1qRr7EgH5J`nLrY=aR&0KA`i_hdnT}O<%CifgRjA%@XxqKca|}z}-@I*(U#W zZG*>lqP<9qLE6)RK@2+O)fM*u7V54zlgS0vsPrt2hr5^%J1KM3_dR?(9LZx#JwBec zYL)}^-jjpsDA7vQUY{pYF@&*wUl3; z4dkDI@ERi!+`1N@V`BCTrPYA(JOu6DM=?+M8!99M4LSegcx3k?pUmCy1YSiQ4=)oW zN^jwGt@9WSQ?#@0_I$rUQW^0JBFrG3QuVk%VOBce=rPT5W!%GLL0LHwjJf|W#3wu(`RmFgj)CDEm-FZ- zaU220st_9*8k&=1aiD9;EcD~0{fZ$Mfsq(B)6f0&D{YcRx?+A064h}p;-0~HPG^$5 z@5B{7Ex2apn=^Wx)8`gdTjbk1w#;6lhuz;)@=FAim(MJ|Fk~oz) z>$~gxV_KAseKB@bpQYiA{^9yN)JOuWHRn8JnVRj1o2wlFM{SN$AtHDGaWWQtt8Q=X zM42?)pQ`bkxlV^c*tq$)+kPg)24(63sBwg>G0}+gU!DZb1a0=yAR13ll)n*_T%f!Z zq0Vyn%CB35ZJN?JW&X`GpbW~vKXv>}z7k8qBnQ1RO&SU|bo}r)4lD65TQR$Y~|#c7gaMJfRK(la}U+wfsIt zRyriX)$jjn?hI(?l7zHMvw5Ws-?k%Q%eJQY`%Ag5eVP{y4aYv&f9x=#>2{X3tde`) z*7iK#Vt-VJ=|M06LqoBY-E^PmF|)t-gZD~W=nAuVov@|_sQd)vZ1W!?#3vix#4h?N zR72sw2Q|U=Q0OLHn!MRGGW+Q9S9?|1ZT75yPW;cbb6Q%4!ESNqw2~^;26Mu9;QfB# z*_Ef;`2Edo&)F6A*cP1;w3klQNq_&GNF;jySr3ajm`ly)>a4MsMs1TCwU*2}Q6XIb zej8xuI3f(e{Xba^q4*$D`_7|eVW?5RiqYb2wvD^?)fX>v3Va=#AGZqg?h;<~+;22m zFJFSx+C}v5=H1`Xc>`lvF8O@0D7g3C^(1K*3z;jt3nYw(cPI^~@cy|t=*8|VBtE#z z-H4490K{q1+K|&m(}-(&n54@_l|Wov+*E{he}3Rx-aT#N5kbl%1M)|@@{dpDpIwy? z`AFqT6S6{CMQRYWQ{FHh)Wah;2|=n5Cgjl&+OUTT`Em@-YLY^yyzByg{yn`HJ~-*w zi?5A4@PgDxSUN5x*heeSI?&R0Zct}0mBuoC1AcNB#(|Z=#s{3yq?a2%ieeoB70PyY zb|$fe%~srh_jJFx6XlyK%eCnrbfGE-LRAlhUKZSXvI}91U|i|3FI#RB4)h-Xy1(40 zOHgFTUH%g!%%Y|oHB!2hMnZXe6C2xp{IFZz8K9+!qZB zcD-A>!TXR(abxs~Sxv~yawGZjE+t)rmDFul*63Q+{L{T*((77BCbN)`P_~_ z7anp+Cw5tym}E=kI$e$R`kAgZr5I>c<~C#}MVNfyl;#>C&)G*yrSa-*wg#l*CBVAo z53L)xZQye}gGt-GmDW8b;c8MBzN5XTXOes0z8nWf$4TjccL<@sju8>q^N&ZhcM6Jk zd}I8r%nfg%qr);eyKWg;r@D`yU&fghr>7TvFCxOw!s69j(D-JB#~3iH4{&%w(4z-y zIv>EUnIrE{lt8Ws?#ZParE78RCK$r)ZX zEjjZNDKVI`uxkhdF$_o zC|8=leY+6aJT_I3oxN9bdkFv~eqjaQ-}iuWxqv7mgV&U=ev2AT=Rl>eRETf?ItLOO zDkPL_hPhy2kug%Q3-h#M?Z2i2_PDJD==%78SrcDy^Zjw}8qX*fYMuJKYv9&<&E}WZNJkDdC z_=Ep>TH3FUJo7TX<1k}n1VT#3-ZwKt(7$)hQu5Tar=tG_6{!UXhu&yoam!q@<*#cxuQ_?8aLkJMzhAjgi~zI;&g@{p_A( z+BM|vAk{6U*`VEJ>us(p5rr!+PxA)ec~E z0*7DoIdluO%2a1oDU-Rnn#J*R+&)YQ;&ce975Lxb-4WfDsfmTyfUR)x>G+h8b3fhv z^f0&_{q%Awf@7IK6#=!qSLM3Oy_*%Yc^1j;=CvDYWy~Vpk0&gwmHX&^i=-bJR##Rg zSa6whvi2QyhtO#}wit-!_aYD35RIqTwz|G|M=cuu2|d;CsSWUOs@!^$1Ewvs=bp%n zZt<3~8I^>RlDC!t6>A^R(`*E83_+*RkvOUhpMU+j94D7VjHp=`wyX*`(;{CcJ{9<} z^hWvDiM21ltR0sUMOf|suaSxn3i}$9xsRgw`l@GLiW5aX+FC7Syk^VZrDxG!^Xap< zVJeqG(_=R`j1cXh0hPn-h(Hn5^Vc?)!w6|ARru^9lXirna8ahaE_+PMFF%2e;PiIR zP|epaPCt#?8b@r-UDu12epO%$$=RTMklGx-k{lQ~7kNuufkYzZqXu@h4~I11X^<$W zLIiF04UtKJ7~;fiHUflJYdkMd-|AZFt05(tm93H+P5pw9D1#k@LN4UgAD^D_qnM*2 zA_iy9gszIE?ZontFt6RAV3?`(oqt$M{^|1Qm6m|9&MI>>Z~1{k5XH?uMau8&9Zf%9 zF0g2|r^wTL`zmB8QWlgQW_mY|2oC&)m;#yDk@LwHpHo!KnvYJfL^<6T!$?G^3GYcu zp|`mWq{!Em6%;Uud~#hzY(&6V(8M-TzrR|{uuYg!vpAQm2U9EFFVmIDR`c3FUH#S- zZrRnRoh7{`o|XS%+cEwv4G1e&N6TM{Dzq{;{tToY%~>pz1l&HcF4nha$0!GBysUC#yI-cm26J$36)|IY_)Q2 zSIWjcpIHaAY&6PkgqS>nxv!JRkF{OJWhDG`j+|eaTBzrfnrOmHvfulnEhlM6$J#n6 zqNhJXP3+IvmvqY8sGzDkvXD@0BYAD75jCO?#`|_W`31^1f8SUXMuQ&^Ksa-A^W3&8 zw1@~VhH$2&tgOt)P^`7vaO%c;r(Vj}zzPakPI0w6{=_1$r)CK8@rT=o8pGfbV~M@m zB%ggT%XQ!-ql+a@<8Oa>QC+!O2i~aBECVtZx6RR zn%` z3oY1-R8b@jWI;&S7-~S;BBcV@wtS3pGlQ*GnhSM-1^2TzF3-UR_9_}}3nN6(%8ObV zsrfd<4k-yg%fJv)mx1wkQ-kMTp~i^&aqPhy9tUV_`f#K1YV09>3TxCF@o?%CN#9+| zeP_N^&ILBGU9{s|c%6d(xwXLb-m(OOPhbYeabU0I~=whBs;b#Ep;zw?C~={TRT zLpA4+-@SAanKZ+(x?+wL`8mhVrfafEUq1(5=@1C%t4-NYy72K(dXB{#$Uy8r7kYT! zeat98R3K<)TfA|uE4^Xe+m>;J-|2_Bn{Oo=7Cg9w5s?otm2KM=B`NoN8kCes5!<=S zS{AauY^fxdSTDs6(qd1@3uoNwFircN)CY>7=aFJob}%(eK9O4eF4&+n`ZMzbwbg=Sc>P@OJURqkuW#mG;BJx3V6^Y_poerJ?8*E%b-Ub0?af%22 zpy0!Y4`+h$MK{RE($8y!vY*%JhKF`H+yP1pXuu>{o^L(XA#riChDN`?_Y~QeqUG58 zpBV`X|z z&|Ks&=lu5l?HMQtKYIu67d^x`7?4E^d;9TZ?eAokomL&;`c(nzsr1D;nE}1QMkzwf zyDZ}OJh_m#n}lJzMCIVR5WDW)KR!ZH>gqx?jGS376nPNm7~h;mAj7~0LSrVG1>}O9 za$ReOdlsZ|37N3WUB+cTijQlo+UCWx-@Wtd4ZeXxwINX;bMP0@H>RX(QPN{6 zPb6#Tu#@%;qZjOCS8Io*I;=>MTs$=QBg4LzPzb=tb<8*1ZBwP*$@=>-yUgdU9YuHZ z>HI+4aqU{aKEA;mL#Z{LkgrqCeSp#TJ2g*`{0xBb^u=955Eg+vg5)5rU z{W0>@uGG=Esb=xTB{C=eZuzcBFF7vCZCSyJeWu;Y|}2cuKh?``s&T z$c|AF%8q8Lw9^RGVxp{I;tinKfdaHs%aoy`vJcRIz2s2(j*xzXvBedWqFJh_S1_7? z-DY4gKfFhH)cNXFa)}7Gm!HrH>!$?G9PKcs#4;o8QkuHIPnEMWsMSlH@)Mmqhp=6j z)A|m_%Tm6~aiSwpP>iV|G=&d|_;_vqiHIR-3C@29A{elOU#ctlJBWbAg~!EwCsH!4 z+N|Y)l^fS#ueCKW$_M9z$lhkHeTANM$w&m-s#ZYGs-#ntOWh3BnP%@#)a_XRJh@0o z6nGK5awh)-rS!(uPU?*aT&**NkmY54wA6PCJ&7$~DjJ#ja1u|iSijBAN)9@?;(t;7 zw_&OF{!`)ONqu4b$&UoI@{qe6JTxN5#C|vx%;|TLsi9IGb@dU==q={ysU8?b`+Wo( z>E+`y+xV%I!k+c@TufQiWd+4GEJV zfNV5pP_n2rp-F<*A*A6@NKMU)91nl9NbV{krMc#2(-0$7pJ9bj|Ok5+SAkDO03{H&<8Qu|ovW$-02Yxl+rrZ;Og_iO&h zrKCj45~)WQSwslS;mnv{w~%lN>>4To;0!~sx@w7(7v|{ZuQaR{^ZOomRxd#`VhpD# zQn50Q9)wy>lcMDWMHr> zl*Z;3GV%)QzB&}Vl=+u&wm9Zmhc~BjY1<*cb`P9}VNjj62aoyN%s+)3&_zcQ(cp>w zT5~>0F8#)h(^C}vlje9r|1k(a8%9*DG^6*xl|eZXN&~3s6c*d&Dh(oty006!+7kbf z-eSHo%7^-xj0&j~t?BDao$_;hz$~edC{&;6Fe4+WjfKdSn@DWt1*A|wiFH`jzGcb`BG-0nXM)Sev^14^bBvWULnh_Dr*;}sI%+3Ho)~dPk;sVi z4mxB5+v7r=UU?KlFl0p%(87tP?|^^thV{s;6R}G{rQe)R(GSUIynpX>&X-+1_bHSS zyxgoglaqw~ktp5YebV1RNctVvlq?Zsy9tw%CFgxfc-Y2k*POo<-g}`L61wAvjw7#! z+nlUy(&1N#MiGc9D=Sz=yMe>TLim^cQsQFcj7O;N&>^N{f0 zCMbauEgpP&&CvMR9!6o_hy5z4Q7WmgRepZ^G@fRCDYnHf_T$f}^yfHQ+E9-@dm1kJ zHrS;kIEH;>)ZMyhixOr%N65uG{rlGx;FTpE1^c5XXO6d-f5Bd~i`c1N+IYO@a-EEK zQQu5q-%NDZxO?{V=h)xBzYURRe3xqc1q6{9^M*^hAC^`E8d6miW7=zI57{-%f)iQXh2;o0!nikg zn){o+gwpDlzU}hdCTk=nypbl|21?dh`eD0Pj<(;9o~86i zfK7rhcCX?|gwP0qXlgR6TEPjAfW}}9M4`a5p*5L=N84sFQzHN&qN_Kci>juVFuir` zFw?%I zC;j1`!fL8s=PuhwKE-?a8*|K*k`A z!s5blmHw5<_}N?ts<4H%J7MWFYifq#!iki&;e_h9qyxWI7k)K;QdYQrj!Vp9bgHKD z(_>l@>r&@~4BfduqxI6m=zx{bz!N@Q#eh3PZ2=`SA?wC+$-<}meAE}`(S*@qfFx$v zUvx6BUNK1t zLA#B43Xy8@06KRWdkxq2>N&HI< z|M3YM^dBeOi(mvdNu*Uga^Db`(Wh~p9(nnl8|29EfvfLahpJDccbouG%P}#bi!Dvp zP#FavN6U&J@5SoW)YN-mK#!{e*OX42JZaF@-rnvqd;w##&j?A-@X|D`n5IB{hkAAg z(Ys^1x3Frj&F_1Z{v6CHZ{mG1tF44wia+?xaABtITo7YE;NwJUZDR(cC3-*ZK<=v} zHV4~Y*gnao_(H*f+nnNN=xMuau4X75u=q<-D|$b`ae#8+Yfz9nYgZ%1O18C+wvZ6adg0b1Ux2F#Y91&4S|IsKS3v8blyK zAcQv9ZZvCpB&XFL(hnOhbft;$!*Pm~k=XqMzizQSbYD?!1+=Hq>I7db&o zq~mOg3t~mrc%PI6 zt;>IT8c1lg(<&3NpYm5fboG_9^07BX8D2*nr9>JS3$0< zl+vmSc(ezH*suXkIm{kQ$@Y4L+_n$0aUCa85l3TlwaI8cf-{54$M(`3gi501n=R{m zf2b~)mIrwZ@)@ytt}8qlZMC!YGMFPTEAFFQ+PB>UWFN?V6_9wg$xyUukp++R+e6)Y zTg$bZBBZk_=+rZ`!0$o{EBCJlu%!jBp*FO9LXV1dC*5JqIN`@H#tBTO+%|~|H&VZ@ z(0YwEA+h)`$EuZafZ9N*@eH~kJW?~JhT-7=xS3c4QR8xXfNs5UE6ju#nQ_MGp2G9i zyqvaheS4JkGKu?^R)lO;85@wTUVSP6Rk0 zNbIqHTtmn;-w%ZZ2UeUZ6r{0;#0`*57-tGjwH4lc?<6DX5p-fwp8s21t_Z3A14vN8 zF=#^#c%^sRO%Jsc)0U<{dq_rC=^-{d9cDnH7{T#1jB*kHPSVjA*#B9#b03=`JmlNF zkSLRXWm1__wBnv^*H@Q)#AQJgtJ7*iRtrn45h}|2b&1r!NC4%6+>meSVSVGwHQ)#% zgBF+oZd+D(j_1$zm#6pn&Q5(zs)Ra3%DaK8Kp77neq#rPj0>kSCL$wSW-pw+lzK1X zakii4sF8`u(aM2*cN4}iRUp!%>wBFG<#{277xp+aYB4^DbOO?B)W5FYdj@dj(%}BC z2Rt%+7(64K=|v(k!$&-~3ZO)1 zc2O#$yqwO;IlcrRyrbjGrgKR`qN0W-mj(i%Hh9EHx8L0G$$SF+&?fG@{f<~X$T~!9 z005%n0zc(V)K2NI^vSjxt5RoS0WURn6}8Z#zoILD{*Y}<4@0>YVfV>SM-O0DC%=a88e)m;+ zUR_-~>M3qrZLwBRW!)TaI6*F1Yj!Pa{CV_SQ{0+kV+>noS7mYW7GIpWWlnqh#W)eu zw3{DPdYq*Ps=c++-|HnQd7HuAjj8EreTlu_!;YMv=pB|C*t@U2CGlfytD)J{_bS7< zoSdACmIK#%1gd5zGEjld2HFG;%`1lg()UyZNGTSdftfDh4QR`~$=6Lb0-W8`3*8#aa%RtI#4C%sA0rSz4oY&8BpFe+wx}A`)a98^at&-xP zm*<+BiUszGbpP46>8zc8=n!$aL)Uwx&U@f(h^5S(J12vd^dH=Rcui#QcVN+B!QXxbeYmTX<{8%0GvX92p zJ{eRZE!Q4kW)9A|caMg!!v7g$zvaYlx%B(j(J;Wb-ZwUk}VI&_Gts%nL7kbZi4I?>r#yUaB> zv#)G!;%kz8Mc~>)0O0EC>fRIcttTb?9yBWNM6YMzDj?VTSKt$XjJ}=tA!ZoYd}5V1 zF0Ps5t^4)&)_6}pgRH>TNs{VW0Zk4L4q|UtbxvJaX_H;#$xY8wys@!Ci3dR@QrIY+ zcYNs4yxLOjOLy$6*9TJ`eJFkX0bu{g$Ugp>+=XzP%L*s?9QW*H)REe0V3PW6ee}Be z*;~Mjsn_H8K14mOmnO&ub1?{{if^6&%$QAyKy@(_3RyY6dC4QurH=SYS%sX~tglnw z=+7S3tbAINP0t^MgKds)agEeoJyf(F-dL6~Auwnc!7IjVx+<<2o2z%3pJ!%JS!yi> z{79zJk^7Otd#XNblT|yCl_fXk$Ym)xIUePbo%$FJLuXcZ5S{^0-mX0}6<)Heg4Ce( zC=&pIZm7&2U_f3qLm5J%%mEpGHes8x1H?$MU@If|5aE~gL+@nMQoO3tDRX1SUKM4! z*8K~w$?JG>O0nm-k7!kvf}L=6!z%vLXUoROzK7z2pN#9Ew61h_<7vaFQpUo!^XYm^y2N8tTrBtHIyvpwXkO#0}s+r%pDwLT0rWkzAcr6AYWd*du4#u4YoY3ll06u znW<2{q5X&oJE0%Sr5Z7y73Pg zwqQHIO&KbLT}|qXP2u0a-v=#7pzJAi7(F(E0r7YS)#=u{CADT`7ay_b{^lK}s+V&? zbB4^pa<4#%!{HL>H$B?92?{lL2aD7T#j7C!3=-b7*7~vQ-7iSjKOD!OHz)HOQ}~TW z87akMl-L!De(L)d&G993Gp-P{x{UdJvo`zaY%QEwCHNSj{D8|S?`W9Vvfc@bw25l~ z4zQZk)Lq46OFYYgHJkDwSI*L7&5vSgGjf_ZrXHAM=8Mw#r@{)VQae*}+X_NR@h3xM z+8vd7O$(G|->^Igvn_fcY+KZ9#~(jnr{8TC-?H0$Q#n`@AH+>)u}iG>uqm{)5mHDn zGGb*sQ+J|#OSzEofWjNb;r18J0V6GC*&AoAPyw0K_$ml`T2X|$jeO8321R4ysx@Uu z)SRd0Tch+19Qg_@?x82#QLu3r5pFH9TM@FXRLUp{#%Q(+D--QW^CdFh-+3lNN@UvO zvZ|3()1vgoR(a7oHT+IctT0lSZn>V`JpVB|yedtKe8QaAcUg&1=lr6Ss02fOK6k?U z*61Q7K@?F&Kw}x`ZgJxuI&jwxk!%XczbPB}0S~t*(ZeX= z!}H!j7I#XahZ@^UBhha^l71P?rI$Qa^$vekD* zh0t9W+?GpfySffnUONIXw|_mwoR?<1yLCx_*`#eh=CTiKLc*5rDno*aw>uPu9L_>E8g114jMAO<%iF3tou0k)UiFaAq1DA@Mx?IM5pP6b4>Y{B|s8L2ZlkL2_ zet&($h?g8BeR87aMyVXak0E>1NnF<*bZ%qC9rxb2E>Cq|_ayZ*XU4rlJ0d+#$Zrvi{DW&>cVnG>z7m9(xHml|*aKAFOl!uF%Sk$d_ zV~1Dsj-!_}&@vn3LRXi&k&H0Wia9xt08K!$zj%<;p_f#FoDQEg)IL&GtY9s%*I-S4 zZnAnRa3v*V#DAd5^Tm7NZPFVCf=a{Z z>d2;4(1J59(}vH~?%>u5BY*+~dbN?}A=F7PHJ*Tzmn5uQTSb zme=^I3-eBk&QK<<<)pbFV?d)T;ew8RC?~QQjC?t|mp9R2PdCrYx&9JWDd9P(0QmPI z*yRTL(A$G0m(Lj5v?eQ7!^H`o^m-bb>%721!hQFp2~e!w{+N0%r)f-6AzXp7Pj&Yb7dRl9rsOgSaWjCG}p<+lp*u3E)-Dmp}=XLmW@gwZw+RIJnSXv}H zm$EC-k`^R0P|erVd$TPYGI*8t;ULEzPKT{o%azsS&Xg_9#6kbHEL1<{;~b~~;>m~* z65$C4Cnp5VP>WnAZ*Q9m%JM|?Vz(iv5ula(e0)f)nvk)Ge$uq(J|;Th>NQf08>+M2 z35;5Caj_SVqBO#!x`VI-ZqB`Mo}pniG%Qzv#PixvA4L!{gPS*RetLoUBsxoP+N#!S zxP+X#eysMBE3(jnup5fsR%x@O-ovhN`v{jp&}ISI%zBepaP%Ahg{7}9-#4kRKl{oi zHP;U{a=PH(~oY%VEMK?N(VZc zEU=i{rZ`0TWQpkTy4s1_h42@oHakKG93y73b2R0)AdyCGnk2QGDrjq=p4f?}%&i#X zSGsx?+GwkwoZeKx{F?FKv=kTeoTiq>in0(HLIF{bkx2C$%WGp8SV6NAxJdYQZF=Gj zJAlO!kE`CcDtrUk;%02DNw32`#>TEUP@D^U4hhrz*>(2IBL3_LNkT*F2liHLRHDYg z#Wh{`fQML*4?0t`Sy_`c-BsXNUmE7HOfPV(R0PYrYZ{a@sALpU+uV2WzEAvwc$NXO zGoBw^@VkLq)j-M09EHrEoGJnrS)U?LYw;U-isSOuWO@iXZjH5V<1Dl{-#@3~Qu+ax z6u++zbI$aZTl&uSKdGi2S|4cOB*gy2*~ZKzI512cJ$gZJfsKQcL-!;`Bw=u;`@y{o zz~DZJ)QUNG?wr?jk-41Td~h&jQ~4j!<>O822oOe0y0_|~TqQ`~p#Y`=*KRa=<{Prl zr-Ywm^eVT2JHFZk1lDi8=tn+^sZvDjH5H;b7*;Q%D z^W*GIxN>okgP+FB+q)dS4g!8ZA)F0ipO9Zl9p|_5Zz++@Wgnh=kMr8`MxYA64*A6& z)q(2Zvm9$8hS&AlY!h$1yhd)oyF`m(1bt{|Ta*Sqk<(_QQ}TNa?WbvfrMT-7*G(X78Uge zFSMFhHd>}j96y?c*40b#?MulXiFBoPh`;jd9wmk?i!mbNs_!1CtsSb^nAAy9=Kogi z87$|!`b+?%A?3h-)@SfHqyei53XdGhhcj{8B zaoey7XZ%DR9rBYVC4vS65+ZaUAnct^elccNGNlCX3-SIK9er00B1*H5HTaAXY zjEvr+4cD2O&rn?gt`#cT;d%7Njgww>KUXjs0&rhtO`iTpGnMqK+#?LY=7=aUIDjKu z^HWetuctNH4{Q#v{FYG{g=jBc3X5&oq|Hw-_#8$pO^X)j)=gj_C-?Ej?s=jiz?L@| zuV$$nb?fH)H;rTj63%UItQ+jEqJm|reN6Zo+*kDl**wrVd&9u|9_|mwosQ!ty&bEe z^pwR7ND_L!>_mRE19KD(dXR9?4g>x!27KH0kqSxp2IbTJ}`N7A?n#g z1|Fo%l-9wju#)Rp8#gJO>uecpHe%Agwm05(AE!HgS-y_YQQfCGe(i{2$3UZ3%%qp{ zxI%04i6VRZ)kw{`qCj%@3B2>%q`!}kolii(@8#vg1C!zPpRPO8-@1;_Gx3xi$JP?w z9LKufzdg~dQG(Yvd`^YRGRMHMebc{}8Kb$PDik-h_G2#C`pH=;KVh=JKK1P(`ph;5 zheh4HO%5lIWxta6HIYj4n|k@c!Estm%jt-7DQ$U}I>Q0At4$B+-ueAP3MIXW{G^Gq z{LTXvD_-269Z%J9J0`*SrSKL%E}Gk!Lmzv)Xj~~wJzWpCpWq;Iy86iNqXWd1>FA`m9l?5- zQx88rNGv-paq8x)FFU6=(bh$HWSnvSPb&Mqe2x(9tRc^TjISn6sO>P2EY46_7rPJeSmUD=_ebvC)__;~4bha#&yxYC8@139OEK z%m_yU+jr#Ki_FvlHzfVa$Wbw&s6dC~I4LRyJ*K~pZjaY$kkh&UH6~u%8twA#Z7M_v zyAu&8u#OZpFCG3pxA{7RBG+dlf>88N9fAK z&O+CRrCqxT@A^f%bHqQTeT*hYSd%Lz?Ugrs%=a*01w0;6Jy%JTu~*IQ{N+<~W;T^o zMdQLg9mXkb`8RnVG*&RM&1%{SoUgA3$CN1{FKAiWl*};Csg#6~A9?h-9jR&WcGhgn z)PK8nQbSiOF7U48urbwo7Zby>r^H_By|95LuF82uLfJEk+}nDU5JkK(!A zNyNF1W1@tpc|8P2yG4y?UzEI<+tQ|_x* z+VB7v(Y5c=QBm_37zvEp13doCa`Mx$RKp%}KfUCllauc~aTE@4?Z5p-O?>SF$NhGM zL2uvoYij@49!4&SlS{nxLLW}$=5r3@`f&FfRX85TR*EC6erFW&V!(*+s>j}JVm$Pa zuQxIarjH)8_TC+dE~jTxcdIbyJ@H7#h$o*Kj^za5J^xl*|HwhLV=!(x;R0KGJJ%0o z)hlu@s904Dbka3XtCC#5Kgr2y2V@-M}Q*X_T*Epc>JCN*CYO(g9tDZEwGwY=+KWE(Y!&8XeA@HcOLM7j{7!` zjO*oENnU@w+PBYy>&N)dX!NtwQDxe{nkc`{B;7|kBxC6ZO;?mgS9t{-4` zMa1OV@e9q?GNeED%7GD77=bnk-m^~CakIouCQ)i?Y8?%;uoa#XjdNCEMhO!Wx;xWec{TqkLQkE~bOu zV7^@mkq5g3z|%RTkAU( zJ<1vsPl7iQDugpSI_7%1yp8ZPx@|dSYt(I!3aPSEew>_4j{EZQkecKkl)YtKR9)CNx@YK+lm-du5DWxqDM6%>mJ}&TK|o@L?nXeRL}?I^ z&Y=-OP+CG-K%~27&KmFMdEehT-_AEaF+0|}u76!?o1?DfapBK2j>RYbaFa4JmVc=q zCXS=CVru+4m^%bl?%$q`2qnjTK2r~~9lRq3RshvDSS#=jsI8F#2fC3)!ZhxJY|kN} z$Db&l>zbVPt!A{q8^gdtI0ZqyQ|SDFy-*K9?2BX%35g*GJDNa6Qxl>0o76o-0gro1bO)m(S1+DvKRDCzU1rcMc*Ad1S#mr#smSeH9u`%P+ zRMYp_7t|zi-G!|0DCAS*!1l5UrF=lX`9QgcySmzQ(PLj}@;R{BV!?X(3jSYDbT2uZ zfIRyAP0ki_0$Q(@Z85fD`5oF_1APdrMBOb*Y(Zei)QFZCA-nid&X9K zjW{IO0V-?@_U+8N0nSjGfLt3)UEMm~Ld>8ZG&cF>Owh649+{e*X>q>|TL=XD;FBMBcXpR}zVy#>9Dm0Gr7wWwC8RasttjIF zLu3T8myGgp1d_fv*ihG{r7r~Zvu6`=}?nsy}8y6e$T{WoBg)JrgRtH$Gdg_w!`ETkuTh^$?0& zstX5XS$Al9R+cJe^8sf3AjKd&3I=jID~JdMp~Pub5e$JZlJ7q5_LWg@Sa3S`+QK77~b5D>L{${ws7b7p|9@ z#$TreT7SrWdcw7hI`2=vXWG$WvseTw9}y3JW4jA6bb~r;+6La1gL4%`z#hPm;&ih$ zR?0h~7vr8_m(Xc2AAQ2NYg#13eJ&sz>!6oJjV_96p8Qn2LK4y?8q3|?@|8~j$N!Qx z!?_i?cMAu=#pP>^>>(6yX0tPt)f@?)AK|rkWj}CwA1TEP$#EL!>j%xu2mu^eOo&S% zPnFbJxwb#|mO}pq(A?N01x9#;kZ@QCMN;~S6y`ix7jT4bpiase^9ul`0h0scv!B1{ z!*=wvS=lDD3!+H2F4!*3dAhHpP;|SA)84!*1B2*UvJEMr`Q1RMj2jPNIlyD&^!4+H zwgEPuoU14BX?T~1Q2@K~%U9VU%$IiM=jC_4N(;*SX+d&Vu2#wOet&wa@eG>%Z3a43 zPQ8fvn?u;u{TXWct`5x;+tRA^%0VB*r!fZ$ku@0EWb-`|=H9tqur_~OQ&&MNOG_Ad z8qk(1ssgSP=Z{Fhmc``^^ehd zy5QhW`fAc5F(G7mav^<*khRvR98qKgXpIMlCiAX>@+D(d{i)BFmw#LB{}y~cIEdfo z!+n{tir6})#(_w2@S#o-3wr#=lC{>aUg7JG+y<}R!ov>^9zx;m$3ArhVDrBZsPk$% z<|*J60?S}A#8lSw?+YVMnBDno0P6?M_fVOQXQ** zOjUMGPG!Jxlzxz(KXN+j4EAp~rbJJjC3?#P&aFFmAULVB_oXlwfOf^3&c`O-CtU8| zzc15zI!pwxQ+-Bd<_Sq&{-2%SOyvt{s57C)Z(>@uQw`u4H#Q4n2oDaJ9>c}-nRb?J zBmqE;4O;qDJu)h68V{k0&rTL5O*}$5xmVv_EheXEsWB_C@PyfgLfT;r8-k7BS`1ME z(nnHtBttT~;asapGeG_Ln4h;OITt{Uxe!Jvnxnc46p#rbLOJDgi&t=)y)UptP(I=j z?@)G^0I0{`825#^HCC;$r6Z2DC#SoGF{dUIyq2W3lM-}yx03f#>_kQNa@TOuEpAM6 z^}!t}j3187+1i~0rPD88zmmQ(iRH!v3d8dfk=&Y=kteJ<14#D|f!LBjbvH^49v%ZM$np@Y2 zy(xBE>QVQz-|GIMa6aKD4t^*0un7=O3VExDy$jXc901b=X z@x}>qZoCRn8Ii34Zj+xKMJ>}HMd_{vfM4?5&v%9Yg3;D;iy6{&{ zld%E{O+tm^V^S3{Z%Z~gL)M(n%~z+O?%If}K`pJ-jL3+1^F4C|+m4d*mXN%>JY+Iv z=U8eT2bba9-34&A3MT-|DC-`H=)aG3{^uX7goDfE$c3Q9^!iztzLeln5xMK42tIze zRa1cXNgJ(n4VV?X$FEkOMO(hYE;%z}$*@qwGY#U5HVY8cef0gs8DBe@5) zEluOc#cG0gDer_s#Lh_!o-Y*x9<=y`SySBUuB1of%1o)dHPK$73^M3DKg1DNd!#X7 zxVjU}+DZ^?DPa0ZYwEn)VvfX?nlvG?l1RAKl@zmCj)+;zy{HI=AT1#C{4IU-Z~ssN zR5sJA(NCzO>*>8(CYJEf2hG_I_UJb~u{tU*(KgZjD~~i}!2G%}h=7et_Ln+xPj;GN$|bK(Zc`3Q%I-S6T}1xxau;@Zp1ZvK8tY=(?5g zcyA08fd5m7Wh2h}R|M}p@x~Br=T>)Bd@Zw)pi-{*IWX|(`LurEn{|7=j`|}O{rgy8 z^3>^~ixoQhd&^QYVFjJir9!2YG=syI82Q)3pk*P*;%iKv_kOHF`)5J$Q=NIW0u?_u zf6Z+e((=TyD%&XCB+y3YyUi!;M1S_YIg-Ix7cj&3Mi>yOn z795jZbmqy;=|L8bV=LPfqW*4p*BsrCkk`>kcXN0%C-9#6$LFdlFnm641o~t|t<%33 zjdr>;%e+3>BA)$&K?hqZk9_Dx%3x~-=4Xp}Hpf>@WD zX5;R$Ro{At?{*8zp*7!OSzCNbkv(g`nI=Zz^CoUFIVh)rpyptk$zmMB1ZBsK4m`;Y z!!u#nvHKMlcjLD6?6t0=ueX2%rIZWEiqz|lIAwQP<;sFKbV&GP487x1uysl<(~QgK zAMM{udEH=uvu5bkL2=|>aGzfppb>^$TlNvkwP77Rl=D+yym1QPDN zy*m2lrPXskxc{X>3Xm=%1%M)AJSE|Va3>BuZ4f`4BKnyBhjY$3M>9*fabRujREmZ z!k{H58y}O?trBDI)8Bh}@}-XNF$+>IC=U-@Lr(Mank`}glOI_vpFW6|UHQ}G)LyV^ z0)s@yB(zmX{{G91rteWr)X87^eg=JbK-T5Q9HJNmhZGlb9}!V>QNo|d?KyP*Zx&#a zwVHIHYq#@PH5Aa@hO{x1K7<9mZxGY22fYrkc`zy(+}Jpo-PWD8&2&prfSTkcDYP?V z4|~jeYT}6rJb`%yF>TxnLK46VNc9~AZ-!*XXSuk zy!riN>kR7ANqPMmfF9LSn&$WXVUb@JTyx4=NKEQzmzFom6?at}H*a&KozlGlS!}0i z0LuWCP{$VEC~ku+1RG0`6e6HOmfb-ChV-8Iau!hIFI~G-f5{rT|d@(n*91Tti)4){3cdijL_j@AIcVUbD_%4a_PKij^#!x z9%7Xr=7O0LgW}4E6l_|?$UePTck9UGyZJPSusv-f=Z|MlZqB9T4}fA8caypNj;CF4 zFIwEoe}+?unw;Ogm`lxk6XQ7?N>$sQJxkPd_)*Tz%$$|*GmJHQ)C^tLULl@@G4 z_Hu+?yTWkZ=S*K@%WMFsN{Fbb@v=0rVTdBmbMi0Xl(9D&*h>S#s?6C*DYJsHE|5Au zSKTVYayo9nt&S^uZUv3%(QY?;M(te?B@%)@sO2{fQS>?7+l8Z*pvw)MilA!HKbY!g zUD|<6$au7+gD^(!oF@328wc z5`xtuGl|@MZSy(C+RBP5+sv-}qB)Zr&G`vv-ed9^Q1_)2Urfw`NLIAPA&imfh(k<_ z04YiFeX@eCCO=br#BcB&t(IweIa&^4=U`;YZj13G6GrS8f?vZW+wvd`mv?|l!?2a> zqX_UmQfoD!1fK@fmDRzzg)M2BAq*21LotGTGn&!7Q2MNV(T0-2u3cUMfkV)6=gDvh zC5gM#jpcI2Am*!|UHBY|;6|aH3vysX(5!HM8S0N*uVqkvW3SyItsiNC!TCq-Z`Mzr zdCsGk@6;MFx}8{j5L`;nu3H#)iuP*2w4O=x3W-KEUXjpce+cM06%YI{iSD=*QFaD( zR6~|*|I>AH<3cG!*Y6(77g+E5jhHorQ{7$Hn4|}SUh$g?5-ibH;Y=AFk0076pr~3w zM>Pb5bYy+fSBaEr&A$x{79FMkBW4R)8YZX}(8B=^q5=OpB+}|ENozlg*BE!t ze8M|IYXhf#8lSBFdF%U6K)#A-L#(5gZ$9dadqLz0U5Q5cBU8u8fSRS{eCHqW+0xYb zkXNGsbGm#Eiu4kA9;Hr~YdW~vaIkbEhko8>kx8+rIfMLQZ*E!h0X(u;E`yZC{LJKd zsf~#@?iP9y;Rt$w%2=g%1;S-s>O$Q2Pv_-}89slXq6vA;MLv{-Rhp77r_@&_;m&a4#R<5AfZe1`S$IP z7i!Bqg5M|#uUwB*1N|I}8(+IzGrA08-z3>>=o~P#eIJ_)K@?I#|A`WdwBsBhyva#> z7q6$rptjZ@)Pb94vnZda6~MJHa{AOI=Wc-5y2yptouR2--e> zl{os&K#)ic-VWz-gCL8JBF!Q{lqEvp`pvJCiBR0_qC!lFjr|YKiL7mVmI7ums2ZI6 z9OFrQd;DJW(cX`ox&;oR!O&B)m?FX+o0`G<=6Y;s!>b*jRpI$-%e3A(#v&=80&|k_ z@eP?zj%$9BLX?XR+NPYjDKD~ujRR9p*JmG&VIN3=+LCF%SJ3?ETxlP(mh6!M2+$)|fh3$X>;q3G`~K_@D*4XKfiG=>P>}d5fp?AY@fIi0EOV6Aq|j zdiwO*rs=F{J{@24zog<9Vd2Hy*LS5=~5Fk@vO>9r# z^>Q%BDQ$0y23e=;C&NK?yL%q=e{~IQnXmgiikG0vQ1ut|z0ah$9bg`r;FlbwHHH5< zfCZ0ALI|n9T0)FC#F+4)7=|~gPxG3`yqLQA2Qgmaw9@5uO}}t7OU5iV#;uSq5JM)0 zuJtGhW(#P|Ln3B|0q7fs%B~3aD1ZsHMHbu<4Ls;5>@S9bUzCXS`zpwdsadha$IE?w zXWkrFWVAMLT1o&M&4lNhxPHld2i#*Z%WGc#fliQm-(l+64!unHHejf2tMgrFjAADl z-5LN6_$wI;8griL0ZxyEwyT_CG+!bMci$# z>gwLc5_r7NfZc4X_kY=Q$6U;`t-qq$|F&N)?wVFc=kwqy$KQ#Xsm@S_ z2lFm}%yC`8*8hL$9$e1GNKzgaKq$trJm$lz4A%}f2me#>%?wx}4`vRrh^qQyPfu#{ ze$1iGjTv~{dtI^~#%1_i_|gy6YOO5W!GWmSkJL4FLAAVU)$NY44K_VYtJjEk$pR`5 zQ6w!8mLHTAG(V(k7yO0Uq9peChmw`=KM1UU&Q$oYWK*ESpAS7(2ezP|fs=xpcKaU# zj8cAvZ7R{u?4Oh@%(kmwZuwjgP^2mZqyPNj*Ly78BDGHL)q^4yDp5KdENJKCTF4GK zDxYt|mcXaC{-+BW`Y*CZM6@GMeV8N~2kJei;i+SbhZAPGQV4lPpz6`od#vl|HaY`c zRt3@yuu@7;Vrz~@P_}`>?0zXh%<16|<pGbbBfgwH#m4jzea9T2|I6y6duvwt=dp!bBV!MRK|9lF7=7*)V^0f zsGmWYUtUW|W$f~?kOw_~6yz7i^V7`4*G|ht#?zAtzhq}H+xsKyox$VT+g7W*Wu2+; zZE*8D9bH?)vlWbD#Tl%=yqvW=)aNFn+8Ve7k9RH>?UfPB%bs=)Q{ltIRKTipy2dzrw!`5YH*9%ykV^#m7= z{#Q zTX1#E_3ItI>LH?dm#5~NE>2BBpo4z>`mp&BcJc2zL=);g+)06KYEc|uyk*(-oxj|G zPj`oN~-cF?;DnHwvX~5}?XKe>d zB{el8V*U1)khyy>!Yo=W58b=Pe^pt9tz`8yoFfG38F6zH@E>s=yA8U*F4l;(gD46P zuzSWV^!-XwsGUC(Q%VvO2V;Btqx?+X+l}Q%pcLdS1^Efz=d&S-jh(}R=ns?(-SM3- zdA9m~P8Ik%I&1da?)mh|8A|ZP18|boY4gcsH6M!$>eA^dZPZxp%_GfH5sL5QM|gmO z-n3{4zP$2JH49wZzqr>vAxaGUq;p#YYL%3XX?nxn~*-Y%tr@*bj&w>C_=MESKjn@ibu{4hmRyg`YX}7BN z%)=V&9e~!!0<5!V6o`_g3ZgqZy+p*0DKuRKqEBmKIRH+AvX>!o$msA$a_RGv>or$6 zyS@PjVcC%C;!!{A1rf&2O|Zf1*$iQEX)GVl}R)E6brS$@$>UW zUl*G!)VEgfJrfr3^L0FWwJU|6K2dSuA!P=Mh`>J51LyO^4;ro>&=B5ndEaUFH_{bP zJ2yJveeIR2;{9Ja3A)kmr&CWYF`q`4 zL9ZqnangBg!F#i9MMvLPwg@Bz#Ap3rE6T?X5Kx8|DYI9}<+@hT3$6&Nf$6{Z_b)t4 zX?x?Ckv4X?(i6TnPuG3u)zms}zUm|KO)m>v zNPLF>YONuT+dYI}ta2wg%vcLw95e`Tj>nt9(~V z;R#qR47zn|*v1fD-WvS&rymZ`8Nb`zx%o4zbD{0$&<)Fxe2?bS-y6JqiRGeMI%Yzx zGHq%oL zeLkr8@+G&iuvZ8x6rqruOzv7#!*cf>i^PxUsSpM9W2m`CdEx(nj%K`02gl+?F(Hc@ z9(BGd+R(Rve;?l-vz*f`9VT(`_1T(Xcv58&WEkRvM(+uuNmcv6!3>`mD+Eb&ziFf} zsQJOM)r30Beb<x>lceEWi?-X>@}1JGkxD zq~(6v6ISwvu8k3~{ks+`M5nXg$q$}-&yBlTzjmuvXE(iV9@;w5hqusRSzo|_hTV4(0(TM zjl#??&pw0@L#9nHXC-&bMIV2ty?bHPTg=wkSgo>wQ`qRlMj0!0iuu}0xX57e$pa46 zK2~88H)-8N(QLPz>yO6M6|ld_IdR6m4BlA4$6RJ>=VwCiLOZQuz!}=zn7d$dC@!%O zq$wYdNWuO(C`PX*%wz&lm*I%^g9FlAx9<8NuEp~>f|q0aRyOq8GXr4QqC$ib>PVH^ zYa0Il*HZS=OJ!QBlR=^^G)tem$W(q_?-aAn?0nJWsN^H)rcWUljb@-cmiAkD$W!~6 zo=X@#;k4Babk3bi*-PrVyNCrD_JJrDsxT3uwwaOW|4i=r?j-_1U%$w!`k?(xP$1oQ z&gA8bFmS$u4O^v6OIk26f*e~`A+T32HgHUxw~>|AxOsVD5gRyKAj5*aoxguckxM%z z=9jeFC`{Z;XJW{=XI`4}1mQU&l|Xu*&`Vks?jWkFc&HvD`&zz?oXzwmfR$7upxX_J zZBbsjbx&`;zUj01_ks%2GO|qZkaXJ21TOEw<@ST~Hx=Xisq}}{MyX1jt*aKrMe_SR ztFwnNJ5eYS3le67A`yZLsQBkE4}7MuThvSnePwy&%*=mcy&BlfOu%mb0bF}c#ewPL zpJXN7KoFs4WDK;iGXxZ67om6kDHp&JDlhf9H8TD1FqbZ9G9x1f@7+-Al3!*F1?}&f zp0&m~PRw9G3$~%HpI;l9M_2=0U?c~^V~#o>(G!AJMpouzp!G~p$;c=YydO+dU`Po$ z+tszcxSveIN1?>bWCQ@2K7IBvde1v1DNZL#EZ&eOBs0_8>Rned#`#6L?;Zx-#Cvni zzZ@+PCZSnrl-j*(O#sZXu7M~ijDaSbsOKr1kmrQ|Ffn;(f1=VDsKccVb7^S|Ak5O8 z{E1EdS3pHIR5?OUsEP|-$(ze_CK2n*?(QKI%R8xZ<-%qw?5c$<`KE%b<@fXR?6Tfwt_2Ip5Cds-SVmcq+gG8gnHD?-0r&RtixR1YzwF zuq6hDrWM|vkgR)bS%9P+D4`URF6Q)v^#Lst?*b~y12h%wVJTE{RFt4@*gM+v!Hl zo)ZprDbR6#N)W1`m@R>*`EclAyy98`^Lo|9oGkD980J5>ReUG*GOG{qyl~$*-~d>M ziBXzam@4kSzi3`%p>|s6Hxto`-{M|mNnx62zIiXKiE;d@186z5mMr7)03MTvKdNUC zX~t>+2#kwAvWg>*mGh5V949SUP2Yci`+2fJKG%uB^vx}zE)OxWAm4)EJ5K1Zs`8yW zd!lP)B8wN5)030VYSE#J2JTyRtywz|hu$(CzZwBtpp8GIhGh}SDIm4D+yFzSs%?!u z(s^>ZDu8wwqg!eVzRD297A%B1P(ciy<&Bt5QCv%v13^isV9>aPu zPp}>K!s{pD{>50!jI17qe+=)uZlsNorgoc9R&C&_-!QP^$tjX7Z0(b{Ug9@uT>V*d zJ-m~>2dT%GV%xVxa6AWK#Z-^2&+ec3?kwGU`JjN)$Bzn|7VqFP-9ntm;nY5USoP4Z zEI!^2Tprg0f5^)iDS4UG*vMac23ciD^6gyDpb!Qx-BkY>ck5-Z%e*HEcE8 zCB;`oCd3Er({NARZIRHURKk%qLLe#!`KSnzN87{voRAiE_dxylKVcVHw+);wQ{M7` z96QB%UAl1Vn<^BxHJd8T2RdJj#)AoT)BQw3!_J;0NuhxT4p6A~{-{8bluRTkKrN07 zJZPw5^(W)sUp7&FFE4RmRsfXqryj!MSnfg5wxVrM)7J?+4zs%?iJ`8**`^H<>zWQL z2K?TJ@00bKTkP6d2rYCKjVGe3LU6;3SW>+DkJCvUJcV|?7qdcM{SeX`;1sK;C)LVT zkMPW$2*{%b$vcqmxw`Pf?;bOm$2;FjUjrdxlKoBuQY%Zcd)xY}GDyYATAHAoUMn-p zd(lmOIOuTN%zYtYioBx&haHX&wzf|%5Zby>$4~al8@7FS z&977}49T1`zdVN@Ngu~>zWaOknB#G;)$ymP8UlrOG_*CGZJTAJ-)d0h0&AV(P41wJ|SH}(AdU3T44=bUKoogrDBj%p+}ErxqJ znHZc~fES?+D7v>Ccvw~Tx)QC&E!*=`HP=iVJs3csxasAXYNLA7Y@Aw<6b8h{c5B10 zx8r`Ti_4)!KGdF;6D3TIXZyh|-%vIr!}`k|D*9b$?&tP+FAp;`x~UkDb;74-Db0GP zkDRH*#E2D{y#`#|6L;Wz3YBGM)duWoTu7F`b=#;_y4ioJ0E$Z`%1sP}&VVTtBqr2m z=#bDNgQU{eq2k~MJHjYof9~Mm&YipCo$DTpdn>}&XttTp3Z9)}YllPK^i4S*v^GVh zBob3F%zqP)%RfPTx{VjtcbAix^q!OebnCX*0(mro(2~t@2Q_Uhxg3R3+iIZ`N4&>7 z3dQ?7EigNb$c*c*s@bi3`>=brzoJqwIxK8e#EwB3Gf$X@*F}j{cmr;LxSo7UZ9t;d znL9?yX9GtiofZl)XO-g<;3J~M0iv>EPO!E@r`hv4{XoQJx696t2L;+@Ths2`9UFrX z?~pkohhJ%kw|?2jWETgfLVfKmCGFYF$bfT8C&#>^b%sak-&9)vnHr%IA1nCQ#VNLf zYOa*t-URb5_qP@jMSzx#+OEZ0y-^KA7W1Ix+o$ly^$gk#`G1{r4P!G8AXc+z>R`;T z(Tcw1j7HNoHa51%#vgPIIoB=C4f%nzMy~ZmD?IljZYia%mJa7MN=D8ETUY^M4Cg-5OeOdWNZhP6r<+&>C)_ z#(J-Ea253t%ZVz_FW-I!A>b(1P3cr&&YCLOzc0{ z)uvvYeZ3b5Dng3W%cJ)$a@_io>LE8FKGXWisL)WnJ1X@DvX6Tg-%Mpjd23SbR!Y!b zInN5N97WdlvxozqmPQkxRR21mwff?W+5nyC$J25wCEY)+6WqLW2j2hPRmC3W5yR~O zTrSb24#}Nm-_(M2L)rVt;nHwy>jvpkG2%3FNRDV=TZfd;##GyLnfZ|%Du2>cuIwAkc~ z^nahSK5!`3NUijE`R0dP&4WW9bYfDKQx4(`HU`eMQHCLL^{`MS|CNV%Q=D4aCOtE= zysoaahv3EwHYR4~j=b@gP);ObAkvU60U=IP2A>B`8(y;8p!o3)yQv0GYKF#uu`S3# z-2_UAMjFYD0%~*rLqg`>_7(TQB_NIyl zC_i3w_azWixDH6Jt&%bFi|z9el{h#Q*|GifNr81Gs`46`@&jEr4k5dbRsFObi_LzF zmlqXfO`Y{=e1r$`Up`$N)It4k7624Bj_4c>$Q-B5Av0KJo)~Gz0dL!>1=_(6fC{pe`24$i;vxa{8N0lD+A*kb|3}awm`0h{*($LqFCdxX&p=@B1X%~#e z#Kg?BXa0~hG&Suy+z{JY?z0{kFuW-v6Zjzhcgys!gXm;NW^9>Bbj3YMCg{7*AtTnP zAKi{S+a_IpiNXU`+tuY&@AADOU5`X%VvTwOTxQ9agtYx41JW(^nujo}o@XX)>%U2e z`}SECd&jO-V1=M-igBxdUTO@|(#PWaxgSXx8%t`NJ(Fwrkx+3z*Kmx05S&d}_@FH^ zQq}L(`^PK2zPb89WPF_7(r@z}wknY_ZajTQ@SPzCa^{)N@@oq{v2#lop+haQ(q6Rewaj2vta*78Y#U^= zSBE|u0~YW;|Jh2^^;8?TziammrPSu_=Mn%7M4Yud1P%blWiZpF6C|a8nxynrBkL6v zXC-iJ-{XpN5(Ru5sl2nxQ$oxvo3Q#e6PS4{g!Ru_1E()(fd`ypxednS628ZH4oWt} zje-aaPAXHqrz2!8{R(`cXf7pgYKn1GS~nbcnw(r*?0kGgq@DH0Vi+7M0@o}{SLS9I z+?AQW$6ppdE4yhX5GSplsNltE_POnidT~0#)Nbs&=9|R8&1rrSS(o5%&b?#8z90+6 zDR4$mpN8J>5PT_JMQzaJ;*Dud&r#co`dvn@WxPm@6@3;dZyd||<-n%-V?W0zq_46* zUli{8Ea>1+Y;lJXh*My6i&yw|>8m+!0GWXR+P_XPHue|>J}R^oi7A_=_3(I;`1b4K zGB8WE5~9fw(}Gj=ghasg*Sqn3aVO$5)0J2lUq?hP(kh#ZZ8Mdl^FQ%;yqenf?U1&y z+?{c6x2n&#Zdzv^qP^;SS%8uh*iLD>*-8d|--y<-o{GF25@Y9!ge9lMeEi5c^z@f> zrMaU*#wzBN4cXt_z#5Hdu~|# zTE7MyA*TRGK)An5FtcWX=*$Y46|*v8#cEI$-ugI?@Kt3&9>JFf?fddg#e;nRSPBrN z`0^RD^=$tKxBA&gf@P68loY~m%y2KQ_cj*9fzF*X|@ht9J0b0CAuCzWwf>Z zk!z=QhrXAutv|h^|Xb~0$` zjUP@>q&UH|F%KM#o!LEqTHu?CNo{J7(iLivB}HlSCWz@0$()Dd)#2> zyXSNIM|w$P&6Zx!{2Sdz80Sq=@%GeaU-57n7>-QDJRelid|{pdpVz>l&_GLL+AIOz zpfj1-M1L$kAr}*>g0Ti4 z%#CB94<(z2`VF#v{S*Te*tiuX2syZI9OVoRh4cnj?)m~R;#&M~r^&-dqFF_r=Cd!& zBfW$>Z8KtgfBNerdHs|{!I6}u@c9ww#O5-?oD3;RI9~*BJH?03H?-72IWHOrbNbg+ zadtkz=jRv2rP4DxPNU0`G|{1InEk!NG5*N73K_ZmpZj+9n<^*cuFdDkBj8Wuw^p|E zE-d!Os`%abLr?6I{hH8+Fby&DM<-L1U?3RiogbQ?3xIy-rk`64L3#_>yuhee84dA- zr^v8BV&1z-s8{@7KnQe#!V52kc9OEOj}|YF*d@x3a%y{yL)MpWXD_^RVI9`-Jo)9N z893%3)qQx<*FbWE4kygb5;Uizq-2}|*vX}}xbxT`&MylX&&et})E5t0oq)B~xSyhI zT+*2_#>2Kq#A*7|6msy$B~S-fpo=A04C4oDugax$VrJ>l<;>@|2Uk|kFWmjY7GCURg8D+1 zgx^MRMT!KsIazz|@(o6#{$kG7H{<*d%Jmf0goEri!QI(U)h8t-d;htE*M3lz&GZbV zJciMltL0E$7^V7ty&cbyDV?`$Athy1`1uBpZ5oWdn0#J9A4D*<83FZv&uijA;4RmF z?3j@-{FwbUm1VYezaxMw9P9)I5om+jkz#>Vaj zW47Y1s?78gd$=rq{Kyfa)nTfn~h|Xa6%pE zWCe4hUABN4b-HFf)8xeph99odl|7jhVZ0c|*k%{q4={_cPap_R^`S4Fru&dyGJ<^B$$@Bd|*njcjV^3g{9Z z-v~-U5iK_MY_pG@jUI(u*JA?6?t+#P&e@ocu>bA#4o#DZ*Ee<SdA*jXw^4(ZD%prf4T~%>b;UF#n-;LkGF(mALxnSL!&1Fh=cN zvLU%WR=Y#{X)g}%^8x|#p+x3*CauVtnG%N$h>|QT0>R<>*%*d~iEzgs`6g?a2Z0;m z9md-6Wc1)n*B>e3hUDhL&W?a441>IneG#Po+JR!(0pMf{*M<|K1`_5pT#^2=BZ$tF z8K1HP(8Cv z)HmN%-mb-uF~; zi~#!DPYMP=v4Z0ah>L&a-CB+{Ih2j$2(6?GouqZIX(|Ro_k`x)ZGf^|vF!1_`l$e* zUdWXRzu895Uk@>WZT0#5IoVHs1qfJJ;0t@APo8Qj4>k5wapuAFepUd2S)3DZk}}N^ zA{-%=I$lVXgmy%sGXngbB~}bBqx!=EZVX5vV26Nr-e2;5844ZQH|65)_K)IQA_KgNRUd&yZe5vP&8`7AL(F z0Z&@X>LS?Y)nPytph8`4q855((L~^JxgLB+r>moW1HO2PL{bSnSOJW;c>v3kXqbAzt;vokmFbH-Q9%RpV z&iTX3l%uU5kVoye-C7!f15tzuczE^M+vWkKQT_`2;9lc3{Hcp1ga!j%!P2H@`FKJK zX~1Oq8~XgJ&@cyBS0csjlgj{_ujs;Bf257kf5}SKp41qT{02RoW%UYv}y^YjyZry6hB5e7iDZ~=@kZGXS+cogOe{+u`_)+dF2m6OiE zpcg})7pK{nv~;sWf*BNQX|C^jR zrZHuetDl+6Ts6ObQidquSO+)nx>1XlY2e%TSul=8IwB)Os3QCx!{7QT@QG8P`7+~~ zsXsHKJwK8TfBTjnFf>2Y@i;;{p!)8DQ%N{qISw$bFRQO#Xug#fl)a&T3Uw2v(k5iS z4s2w!Kkq7uY~^JILgmSri^kIVc0jMm8o4X#SO0y;O9<-eo`4xTkF1(5f?E!@Owzv! z4{^1Z#W{AWXtuIY5|C?;J0eG5Fbk}RGfnI?r^4l@Nh!W-fP{Z6C|HO`9ZF<STVrs-itS)V z=j4er7{mEq;YC+-rOTZktA+(!z8eugor(_Pzt}Pcrz`Y7AxAE>-Q40pbo8~1G%Rc2 zxPE%!g*!hZ00U~5P#x|;K4m>?7nAEwE&M?0_gIO+E*)YQwOCSqEf>saJJD1=Y56h! zD-sN9w4P_WbbB;e8Ml7oH+5qtufd}jQwF!EUwydW@Q3wL#AAof6D=x@sTVJ5=k0L& zFGI^<`|e^(p&#aUZU`M~a0+$WO&(_~f|1PWw%1sD8Yu7-ZiEK&Klq{7SZU9yrLe>C zw!<{5VJcq93ryAY-_g5TY?34Xo-JU40i30LGU9~_uGY&GV?mO`>&3}bnF$8Vh1gcz z8ysP!Ch;FVZ@8)uC*ZdgP;&YVL-Q~4va>HV)zuf45R$Q|^K-<`@)`I*&M8QhV1xeK zEg)ZXb=h*gkGk+CX?`nZvK@G^PHCuwJkgoUK0Yu{ObA&%y6B`>jFSe^3y=v8W^3ho z9TL-;7{K_d>!~{dAbV|L-&xHj8MS&HfXn1^^62g5`49rA2f!U0TB=G!hbDo81_Vw~ z0azzlQ0TsW5%wIF&-_3fj`8Ms%Awf~r-{;?CXL6f{7S;m?EuwmaY&>pSudFx7CPm` z{~aJ@zBd{If(I9ybC}z$n8CWlx2eRgK@hS(k|%>i)s)Iq#t8;nO`=v))0_##NuEje zn=8Q>n&HhG5FnS3*zR$$ifUWw-~4VjQ$hs-eo`6yCoh?b1_l1QJIG$w2 zo+Qax)bEkub)6nb(Jp>2MO^n$f*qF)uah>JkAFDc;6A`OFQuJt7z!PU{Z;mP<10S_ zY-KNV9jz5h#al3nw@h97)wFpq2d zX!LSR``5X-&-EzH%0lw_x$*6qwD|WTc3EHmvxwn~<^>Elqq^Mi$cZ8FhaJcSbossm zQisgNEDxZ0<@a`v>o`y|i+#cd7PAchB`*Pc3U?KS%Yx`wl>2k5ERR*Mq+ah)U`6myIgp$Y4Wh=2<+2Mi%8wV z*ZoGOx9!KcAyosN3_>x{yA6#)6L6K6pA>X-Zi$mw;3`4)=m>VnK7R*%^yb7RI5&yMRc> z@3&L`y)$rX#F5w**()(jq?x9s@T*s^VD%;EnltZ6>w!nwzSmzQgWTI~7i_h&aVU1~ zcG}Vatvpm8)b>hX%FX<#%c-yTzH{_}D-ztX14kdj$B=@Mj_||hVH$E{f*~v-L5G(P znnVx;OpVATtQQ}2Iu#pIPM)K#+FrY-0}h%3Inp*483~~BnunR0N3cu_B&1aVH6?spT6aG6p$oIeN({!0Q8mTZ!*6tGCwD( z?hQpTwt*Qb027m_aIn*8Ad?z27v(*#Kh31LC?=YR+m#B^hk|xjKq4drh*PrnLQ}^z zCjXLffJo#7mDc3A@QiAcMnN~aXSVS6CRSF2kU*#O*TK&P{UOp#k7Q8) zH_{pjQu)p3y_+BZ7|r|A(o!j*2qe+J}dd?(XiA zMnnebP(nmNy1@XX6=Vjar5gb$5tI@TX@*8ZLO{BuyK9*F-FVJ>zHcpE{^MHDJomHj z9oKd3y>BYz*C@Y_rL8WAWD&|U9t=GQyaPZ~;p#-=&k{N)S9YnF-e9Sc-@4H$h+E%o znQvQiXH(3`0}E(e&(2V1^Y@*5{@}5yhOQ9CH^(FWLx|ik z8R$^QJUUJn^f5gLnL(xgc^y~0QZNqYSkxv~uugg+N>LDt4#u%P zE_}9mp^Q2=r$W_6^&?_l;ht+1a9UBOHNv|KEq&d33y9yULp0uV4uw6?cpPv1|V8ie1bcfNs1BpyvNB0(a8 zhe#~!s`C}peWUu@sBZaNM@QgX4nb!mKAU&pgYN`vAA{@WxfZr`z-254nZ!*a4$yk= zMfgslr);iIt+3IetUL=f9P$hi>FjOOflKh;Lmp)aW#B6h4~I&|;<~x0DCHuHb{* zpzSMt%ygAeRsd`i12en!tq5+u&}=gqEhP+~SpU)59@j!3rAvnp~7vg_~ zc6D_x1&-Ii(4zg6Otw;w(3Uq^hEunYqNi;cnKgX0VVxx}vW=8*o%fJY5= zWT#}KRcH&CGqv=JQFFebM4E4Q0X_IFqnX3P!&9cH{@;~F@wO`v2;SraweW!3qJ7{sj1>ASv|Z{S61Y7)uS zzu5=tIIk(kAZ+fyEKHQKlczhT#7i;ta`6Z>)y^5Zr>XB*v$ecvUKExzGBp5=p5j0^ zn+t3uL;7W#C29qBYi$X1in)`%SLs2I2wB9iYjGROqyoVwD{40}_~%h<(&Lv{%ff>Q zKq_OU#OJ)cM!`9NfP#N%g{BGLKp(ztl`xNicupW#~2O z^-{eC5Qi~4C~FxTIcXbPn6V~w8->9*(lAm*E?n=(v$_2>{X<@Tf5LmJmp%1(vtZ_1 zl5jeeeFOzYuaten1gqO^j3o_7`p>Pa2sZo##np_9=JX4N!7Kl4^)E+845*DRm_oZH z8FqMH`15gPUvb7R(0qQ@Y|*W%A+io(<8E=L>`{B&rB?6POOijl9AEK#{Q3IkW(yb{ zWi0(Ilkb*Z+xgzQQ#aUw7E4}TvAFj)`Bg(tWM<0?*&yA4%`~4Czu)J}5lP69uoY4A_u^#j1<6?BinNkzp&6sc0nNX(qU>O!0vXB9soL8x=$X zd4#=^AtFxDg5mT%$+gD%_smREKPj2rWJ~4Yf_4A--u8>``Rmzvs{jkZWy`%bpRRA~ zzYC-u{??mY(x(5?>xQ1aTVv~G)F4gliPuvZn7@p(g8MCF0BUK1cp(St7{A5|8Ow&w zSO)VE#Bt$^qy`V}>R2z0_{kokU~(n%Ci*oc|J*p;8lrQN!MgmQ5A3?{c4X7s`PkU#u(X& zn6B_3FDedSNIpBlnlM?LudJk>hn;1-?|tkra=}XsQ#7wd@F_->Up9aoK7kPYAZ;U; zmc27wx07r-d}c^YYtQ|U9dwatYSa8FmdZ&n@a1-VlNPNKb7rdm2yvbF5lz`V-ePIZ zT*tI4Y<7J$DLUaJiF8%U#V1MH-pYVttepw4UUc|jCGo`}*2KfLa-l6CmDyF^>RnAb z^Y?b8U}a()$T`xbLk0D^!PtK+iE71}+5F!3spHZ}2#!B8a{K#}ybMLDoQnl_QoJP6 zL(sG5W0a`j+vaMZ1GBJXS3Ux>H)EkuCPAy)s~h(;xt1;(b?b4b8>W`eo$4SrbdpxZv`9v`Rkg44q+?Z&TYc`EcK9fp!5p)pk&6AT zB>R=G9es-=cFUnos=pkJO=mi98A#=e4(z&t3n1;*{5A}PvY%>!}s$` z#cOHjEx@lIr3H5-5f1h6VI8Qj7=QL`Mq5i*yU>;+B*M`BE|-yz${HzTy5?ou>B|hg zw8e6wv)CF?dUrwk_AH=M}1C=mpbK*IFq?pR)AVt8W8 z;x(8atF|E_Ax8^~oSB3cO7`~Dr`wa!<$G_HYu-8s>=j_R;w53PcZ2I2SUUhy_25Kp zGx`Z-S-Ti<*JNrdQ7@ZXz*)VtY37w>KQ_fyR% z1kB1l4M0!1m*&>aoRO3UO5i}!569lEed;Hmnlw;lxTnKF+fSMk)A1ECk05}eUWuw` zHW;ES7m_fD(@>bwuaAVn6FakO(L{@xzVo{K?c!IeD&+Fr3v%LTVkJ68_fxrl;qZlm%b{AEX}RQM|VG0?Du`!;eI@CQkl%)>g;KpK|OE5x6@ZVasM3 z0M9CKTGB94v>8K(&^|rbaotll&(ieZa+23`WVzS1%&U7_+MAIKu1@N>zdlTqH_YeY z;BZ^ws5WP(=TOPmyPtpI7Fp+Emp%B+PA^^zokE1*lzXBbANagb-gswF+Qz7i+ymcL z6l+db#@8cV(XB#BB*w|)b)x+>4LzzN8KzIQ=&rf{M&7CDY;j`$BG<@go1Wo(pEKo> z^K%FZ#MDOQ2*Jdc5BW{$@G8i`A<~V6TC6ck?iTPzC=_WzCET!7ka-a=&F_*sR5!D0 zq|kDydK?j&b=NY?t~w5lnU?-K3P?k&k*6=CVs)+jZ`xyw*03PwoBsP_V{JyJ?=LP4 z)3RgRwoY1>4;C(t*5d4CzRr&MqPFj_Km&p4QGHTT2 z%cn;#yt5U{g7x(}G+t%B-G5^{pamQ;tgAA>ITS&qd&t{RtZ!uWK3UL|$6~J0?+oP= zAfLOuNSTN436Z7XWX6^o_`8%b;PA+u#Mzr*o89~KGkF&Jt24w3-_2K?-#S@xMXjpu zUp$L;Nm%Xgm+#t%xO;O36%_{)T2M%KIZcL)tCG6vOD@af^+Z z@^#UDJL#b3GQYi04|~wXglB_#e{`*n1&Ptz+F$k7itlS{Z>gACwJ)xwoGR?Zejt0B zg9WEkw4pI-#t!WGOl5rC-$Qx)q7v@)9MgEL{`dN-b60gnjoaEtgYV_B_KW#u;_~K8 z0u@aG=ZwqPEMy3ZLOwmK^F)HP*7Jga1sW5wKYrmqQCYwU_SQ~N4!&f=!&K1>=37-b zJzf?#4cPE*7@o6WmN~Q~3EbGgP5X$#^w3>%yXDJZp)$RLPD;(`!4NoBe^v}102q?=Z(PHt$%5o zl9q1zM&zP8+=nn0ApOpet?B`@F5rm$?2X50&eg<=O4wBoLe1a-X;Qs(kxj zM@W}A#W-)Psk`PDYd>GEXf))^45#k9HE{2mV%#+pd=I}kd-~Z~#tM6FAE8$U5q48b z!)Na)K6tFo;Fuxqgu2OM#;SMyD9za4JS{CMDG> zuX4NVbM)58To$iOx1EefqEy%fQ^N+*p!d>V3|Vf@+R5uU;5d4LwP|#1;ECD@g~>O- z4DIb7tj$l!Ra9124YC>!y1{G+K?u5)=iqJj*E6!O(Y~O#n3A)VP)-&?5QlAdr4du} zDvzaI-~uD*GQcFI!nXS+(H{M^FDg^Dh_?%J0T7cV)!OWWkVrAGc8;u_;*Gi@bxWqOYPFLMSztHUOKwd^vhSIei}&B1XP( z=SqsLdx@Oad8(q$k2BzK#L{N{ynB7}q+Z9bCkdy2U3ikPfODY`dPNIzi?(V;c4bb? z-zV1(ANEza&YSN0%BwP<9B!W&q!SM=uCCJ!WHzKBr~P0DBzBY0PLKP@pAC?U_PO}V zi_6`eJ~-YmKrW@CsdI4%@zawG2 zkJUnJDc|u8a~8q(V^nSa{fSLru6i=UCyyKJyF@2_)AkK zWQ`W|uxq3FcjKH`!y!b2uiI~;b4Tr>sFJ%d*SQBj5xPJ-C8w;6Mp(F%veI z(N$_{TkCc}P=-}hW)TTq)Oc*_QG$fk?ndg3)0X!==j)unwUIjjRtEM^dwTxpzEtCF zM|>}(L2bCn28}olE_eM(%SCt{-D<4#!otRrm2ekB-(rGqzp{m$nd03NwULwzMfG5K z;>ZonHnF2(j{Ou~8j)a&?}k zRbG^p*V8+#V6WX@w5=N20F*3qZEG_wLat_^pI04j{#of`Oca#yHrc_2H~2ui_Y~+l zUdU!jB5PbsTyvItkZV60{IDtj8DEpo@vPh0z?f(wl3A4x<$d<*Eg9zzjJSkln zwvy|lPcJKRa!~@sObjcU4tlxRAh%Z<-ft;zncc2@f9h?Aa6^UlwYHP1Yy0)pNUiUN z!0PHM1k%Tt+PYH)P>{O1LP%akHmPF*%%j0}0G?{`G;Afh5A?YRHGgBxp4#x;6JPpq zettlSJ!sj{sci&wwnB|8q;vkZxKD7W$PL$xe}%nf;b4i)^uz}L<$)aL>E3j498MA5 zp4iqG9@Oq9l(cq{qG>V8bL_e^1(GKW}#PSg*OV|s}a%#Z=?K*SM&_2QY||^`4tuH+Ag}Sagy9Co8#%}^qKeWg6SaB zwQsLTo;_=HdTAc9*L39QW+d8qv^Gedl`U%U{DB(&_3pvXiRvRMPuD3pxTWh*xi}i) zxOMBJ4%AVA5&?Tt$^34XWtFE16ug+K{q++`>Y}6ACf?j5-I85U4_0iWv{N5*d!e3N zpcuSR9Th_uRfXnxOcO`s-O6-@&Z5iyI znCTrgKU1lxsVCMV;$c5$3~pl7@jG1Ex2q&&W=&;octY)mD}6*RuXu86-*k2J-VX4? z-5X{NSuBQB>(|TT5X_m%yC}K4 zohSDG$ho_U1w-sbh$#>^6 zRQh^(Kndg8(RbN7&sGHTk}vous;6j`AgCtAVp*BmTXW$jWiQh48f$mp<;~6-M;?md zssPH7Wqf*GKg^_yXZ*1ej*s0ri0OfOn?dnjkX~H>PjmBVX|w<+9on=-OK~7GAJ3^q zMo8T@kLyOy>TTbQ8wbPTpI4$miDnl|6q~88!(sP2>sr@W{)jG*ZwbJK;Dx~=JzLvl z2;8)$%V2KnXsm6mxHH4`)XL+GXLRg>4Ua=be+O?-FJXS}P28~OV@$Ko zxYeR)`4{1xo9e(JKkqdPiCVBv-?ElHQOWWiaKLDgJVr~^TDH$v2HLWT+HMPj-AKziU zz4ocp8s?{+l>xzapbWRMmi*Z9#aMoI^P%-kR4_8)RSTjea)VH51S(*btvm~3@*((M zQ^$^{=;y~6TpOiHh7Ftxpt-3$JUr4eGL*T*!!)=OkTA>@#{}rIb-_mY^S9N$`Ax)L zUJ9dQs_fQDjsQnUAc+G2&u?^ zb}m|fi%Jfr|9}qVjE`DkN3+C6h03u`$<>EiZ0=5$f|B`PCd|cXHa8ahYnJgKF&jmp z+zl16Rci5k{GfvHF)`P|{t&B4ObEI=>bzg@GLNr|_}rMEPoIK4qT<6~#rOfk(7|85 zyrd$0=BsQ$tV2rQXht^0-9BZNMIEuLNmBq*R&!m#%CB(}X z{_>K0o6c3jRoOmWV-qCYnpl0-pB$2&L_QtGXApN63a@a7%HW5&#P z)MSdCl47D#l7F3}?q1O*pY#pSZkro3tu*Q4w#;$>QdLVeB+rvR^aL;F@f(btecn)lej z#J;3p$8O)$p;LFCkb0HPzOizyme1{LrCfrnL%uPQWU_x%>sItnPJkq2m6hqtFg1lN zcSE~k1J28ae+}a_oD)bg_j%vpf4#`uqGi`hI@H8}+Mq72@OU~UUR7GPPODjNC}y_d z2jlFg1Pxhn_YEZUjt2|y2#>?A1U1#_xhRp`PtHb&(V(8$_|08(HfCe5-PiFU72*9a zU(N{aO5t*SKT3R}><$rhzN z3j-zcTFf{ZLA13bYK(ea@$B=7n#@b`)8 zH&`cvHua>>>RdZ7JhGSI{MqxNsVTYPP0QJ}J^%DYRIn2^_$HxA!`KG;;&GHeaO7Dg z?d&lOi(ItV6)C6h1%FyvX#Mbz1|-ld=2^^YGJ`$lwohLe{?QHj|1wJM;49IjlSCE# z1dd{adncl8C1{*++}otw*$C^8OU93*6iDHRDv35V$@>&9<;I;F%W+kO<(d!jn(5_4H~HN^9jH zi~>_nz@9XZzN~-x-KUI&pk!Jt3(77BDo6H$1Z%Y}{HNYY_88FYO!50lE!e7|RmJCc z`S*Ln<5p-nOPzSv$SM{BU(1Vo@Awm&2a+(cDnx+v3oLbxI|O*0ke$>k^S}W z#3%tT)C~$gF20T|w;){{O+VUyBff>Sg+30xdXLE^R11}IgG!b4p! zOBF{BQi3^)V$f^W>U*$|l(D+X^RqH4KAzC{;8#twOH7&Z5b727w&Vg|& z)%ub$GS=wkruTif7`E}g?Rd5X<5I`i4PD4|dZFG_8!fl8MvEY$R95#hP_}sLtKtkO z$bQd;Rsa6r<3FNv3MY%GS6K+E@r#C~Tr|k^dm{-(|Gc^J;hVHIOnC#@afgbykXLka zT9|2sic-K+3TMcGp$MlwX_=WyRs8vo$H!)YS9dC;|i!VRQMGXIQazK)>_4@LTat0_m~GlZWH1*^ZT*<%yLxg~AyWdsM8 z*x0_&CemDlwvmON_@vr+{(1@+hUKS4T@8AW(h2PIms75!t5saljLggggJU8}FB7sN zKVZap+WO@7hti1I2Tm~*RdHWlSg{QSf{K=?kF;etmo!_qU<27_fBszMvX<@+f@~KJ zVp)aDOSTpCh8)@laTKj8hn^tyl0&Bj-f$OS*&ri&7qAQ~y(tl8v^C88V=~0fUfaQB zUf?z|MRj#S^C1L0`ZR9;N&eH&SPq=`9O~fykeIf3+$MM_i6t40O(b(TxQpoKn%fNZ zAy)eom%Pz^q0SRPDTN1tKcp zAU^>RO|W|1rWL{QhFA>7k(I@={D;Hd#vMClPAAMMNW!%faj{D)ZkA0IJ= zZXmVUnzAVo>KydFm!%KPhX+3$#VRe@J!ae$B_Ipw4@LBGcV=NyG_uHSZuvXyncW+$ za3JivB)6deenQ1nJowkEydUsNp`1DjOqz&ZMT#epY!0foYXm#Z@RTAsTh@^f|K=jj zlAocj)a}swpB0{LA`=%;1P<--syFnmPPE7)`d!gTOck+;dXOOSdZNb0wDtFjTYFRf zM2v|xDFH{p^X}gnzeyjO5Xw(3j8rjc@!+K0cTC|YEc$+YXXgi2{f8!p>My^hM2g9M zu<6oS6$=?;esF-N?#0iZzi0KIegtT`YFA!re8MVvlvWmc?%oYwSNN=*HR6oF?b_bms92~ zLh`e^c7O02O{P_~3N%FF5Vl_Z_!#;_EW1tp!m|YN8Q9stjxn>7Ougpooc%9g)`-!cY!^v6t_w_M7~}#vPnB7pL5V zWV^bn)gzV}!IHR|3OJW%*Z-4}4w$!=(~P?$m%LVO0A zzeq|&wp%`QA^05?felOGx}Q?4Z@H$du0F1}R)2%w*UY_ERY(QgM*T zBmM_EI<3na{f8vf%k3`8KZ7g?=Fl1@;eI~J7Z{hZBn#*JngFgL!}Fv|U}X#J6q(M` zOF1!RcIw{Eng%R}K;=m^LlzpKpUYDxdLm5;95-9?`J+unzcOe+w^+`sv#nnk-W^WsDU0?P?TdJ7>b5%?j|J8@dw*P#u2VO<3j00& zRbjdnjyD|n!XB%kVLZRMD!=5VrVMH)$Pj?Tf-8k8(F5q+rY}+U*Ko}C>rMdOX}M|I zsI||pxVLU4-$a7Thn9DbmG0g=pZezcSuCe3xk$(Uo?*cHp5`vI0_j2_J3fZGnuryb zF9zgWou4hadoWR`+hs`!9ObpHr_uh^N2K^i#(EIjr@y#(9}(e;i-~w4UEe&qpW7$n ze2RS_sE*-ZX3PzMWYWLTZeX8)0cm;v9Fu>q$KsFZhsxe^o*8J$U)=|qyQv!csT%*t zE=$Y*l3mCymrW#oh4jr8A=5F?XHW)3hD*$xfS;10eJ$)&D2zla?WL*f=!Uh{Cq5I} zonM}5mHHhIeC=escYLY;7tL1$8J$uWzl>g#JGpe!oFbg-Dg(4A3X#O&7vPEnw{PxkEDr#q$v(?Wlz=G70 zi62N4uQFw97;CPTt2{J8w0WWk+F5_H?utV4*giX)=}G`vNC_a})lwFcqvO^)+NLC> zAZU}B-p7SUFWL=U%1+`hfHDCXiN=0V83|ma7N+zj=jBPDypeT1sb=T)-HhoT0m`#X zyU2u}mxD8m705caQ?+y?H-nVnq+->)c1hlmQ9|GTKRUMhudDk1X#sLyf%F>i+i#s7 z3T~ZEHt`ejd7{1&p}#W=v|mfmqeP6%)Qlc)~geBA7itQtccw&H!B+J8h z$QCzA{QuQff8^O0))Eb51CX}C?c~fvXitLg+k_KeOMkMgWOsOX6B&k;CWvP-&+UHa z-Q6*i!wiMW1^y4DepjG8T@AXP(Aae%lv02n-a^jyl=8G)Cx#*@RF7f(`?PG471Sut zc4+RAkCCXVxw-Jur%!q6E&NyhAq})b8BK&@gYn=-r<` zv^-C~d!JV0{E?W+Cd&XC4R)Hghy=I_H5}Zu<-z6)@8RiT^|f*_?(?{k$Kwt1rxltd zZi$l05%?J@l|y5bfvcZ+ln=4!?&*|(FzWESlnWq&XLFOWA0*d2TEP)Hi{j`9a}mUK zY=@$Q2Ig^7i!n25_GX)cC$I5eVc(nqp@TG{B&1Tv%4V!t?|AH@I2W(4`6v6Q>rduz zcu|2`ReWe)o=es52m|6h*LrroftSdz;R}qYA|)G#EW`)H0PYR@Yl5*0tc^vK;^rfC zTVIqMjGyLEls^Fpj%ai1t(%dNt+#YgZ!d@LE%uHCkUE6Btn=c#J*85-!3W;+5gJ=$ z!4!`7B)5_YM=3vIc-S6aR^n*PY%eQdNc_beGG!o(>0gw&?tV~w6#MzQUtJ*!4Gy>P zKZ-8jd*UR>v9pC=IRN>zI4|>0Cpx3W(Z~Y|A#NZb4@HD;762+edi>qhNbK#qS5(@m zPpCZv@YrcWNgzr>05HU8AmAf(iW-2jGJHlF)zusllU9}D5#EjtpYbwUFgjhN5aAkL z^iUAT@k3isZ~gjs2#0LwN=UxvJFa>E`ca4z@|{z4Z7*U2Y5Lm}1<1l%OZL})N;f*j z&$bnL+M%a!7X2vL4cAyD_GuHYgh4(M1-mK8&4taVscWT9=~vg*7V{Q$@i4&2OuYNv zQ)|Aq+FJ>a*hc|oXZ{pDz$J&a;S*=bX=%Y|h>2(%`ZEak=IyLl{wib_!PK|!SiqTs zUWZ)9cG+PBDyo|>VHCu?qy82BRH|Sbq(BG#*V7~5hdBeq&c%StP^J4F0{kbkzm0&( zVlnCZ`4i_S3w?fm4RF8b!=&?4fdh!g+Y`19L35C|i|1pc_VT(xPMg`w5twIt)22^M z;09Sylb$Fo9sa({m$TtUKMLfE#zL=np8yx%?{u!65FANB3k+uer?>;l;F81oQ!@^> zpfd%fZ~zQtLiVl(*p#UhL!qWDfHGeF#rVqOm8;Gg={(B&wX0d zaL5gvR2hH|5UTqWwj>w;lU9Dq%%bonD4uXUf#e;|_6%omoYKvKw12yj)P#P>$YATM zBz}UFur?Zx4k3rAOtN_eag5h!OxlK)}DxSQRM>B$uHwjUVw} zi2zq=!?$68LCn%a@35(67{^)gNRb8h)*WvOTlj&!UuFg1J$x0Qv0;FWZ4ZkDGAPde z!9iO9p-aSOFZ3+)h6BKogCJIjlOb9u#lCiSEL;r(=tIKQ^mocar_DbK9dS|K`3o@# z{c6&C>nXnd4NZkPakUCo3G@^l>2jUX0{xa^-DdU@eR%2#C3^(u96<*Gj*jQNxTr|+ zWhrgn=3Ainv-X$uq_~=!BG!N8dd~YTr6G|lj~l3haOO6IgS+f8rKTywLLc0(Ht7me z3*?%Veo0Cx8)HEms;M*D-)|d*EviVR8WkPA{HOGwWUutg7YwqH@h$Kt;tnKFzkeW< zJX-DGV$C32g@Yct<=w#EP`sD|L|taLe7vL>7#Tg~1QO^Ayy$VT*;QT3xPW%d{H~>{ z$H-x2^~AQ2$krM0_mUSnHzbGwovd`!q}zu9OXqVey$Lh{O&*5$CXzeVrqdGMw-VI7 ztft&Vy7<$8$ln(earq1Ktg%zQEqy^+6;BLMkeQL1JYTq&qmSoJ6b4*sh0EGU zZCej5irtwMHx+;c&24oX4`|E*BL})0%(F;MYh2XS9dS4P6-Fbl#q`_1fcvE1<9Gova)Z4hfba>K=~98+B1jD5!gCwvrp>j0SfF`G(bO`1N2~-R#yTB+>nyU<`A}8`M8oGe-f)ea6!C7oK zq(cZ6o(IwFe{s(HXJTWTpdL*{(4`Y5m7#Xm$tual0 zP|%_^|KY&yE{jyR$=^D@_CUfn4@Yno4wk!XeGkODT!A)ZTN=``Rva>D^j>Cu`@Xs# zqZ}498Fl>vADP5D&}K~@t@)ymCS5Nbl~ZPW)*&}9pbr`5o0>)f+x##14v5R!_pTv@ zuCb8tJJ=L4aKOcW^RM%I-fS9nK9;q{;TQnNIZQsu$ckQ=ik8em6R3`=bO9_8nz*M4 z5N=7NIR%%P0}}KvMXB*v2uUDTI))wR6@WE(Fql?OHAZHZh(y3d+(n) z>*DpefD))+?ZW_r2Zg~9CB0F0%s2+Ie+1|AR+Yn zn>UJ+MZfu3QrD+WZ=YmA zzwYtspR=mc$ZSMG;YxPLD8IjoO5;1%tWVCmt#=Lc&m}SeHd?r?=ylLqXIIu25U4@wWQCkyR&XmW({I9mU=x~9he{QvFzz09|7KnU*gc-lR-Ez>N76HTCHvT9G_sFEo zxXAoLG?Fkg2mF&uA*H*zT=RQE1%GuJ%oy&(&rZ}WO93qe9dYy|2UJl37y!dA`s7}@ z?d;}22#khoH#_erb;%mkUX}?ej~_qI8(0gO-su`0#mba_d%uvAH#iOlq)6%fY{6-C zs)4IdV-)OZP3fuca2)F}TIH;R#Wmt2-BW-nkXA_Ncs3X`xAwk41ai4Yw;zMEPJpW z(5K<%zhO@A3p(W#=6`(YA7}33MX=ZUC|c_(I{i_s(*eCZ_xx<1Vl8({G@PC!pu zV6Hp5iKP6$Jg>2tPyO=;Lzw=Tqtp)LG=#Pm13HgN0%h+b13k))!dL7YSg zg_1b<-dR0~N3j!u6j4b+@8F9I-t5YxYM}lauw&W%Q%|`$|8#j!ol88`e<$7^`yDMg z1G92Y@w*s!!cQuGY(r|yr2p3Tvl-I=$^2+U2y6XZDX_W~BDw!e|In1-g)?Nqo|zzO zhAZ>F6m6pjLtz}FmbquKyyvEsp9oHxpFVC#U1}>}1mKhs5di=4cI)o zbzr0cdk0Y@Q`G0?uAH*e{v=Q1wb#Jdly&!3V-^e>w2i@St&foprdg}6ikR9NfyKF}x^Jw2hN z00f>V3UK%~D=0H?za=HlZ%duvsEL@3Ak(A*YXp^;aG4=AN`rNGcQA_$J`Kln3rLQb zI{lfT;`JIK*wI-py3B+D$S}uKKI0}W) ziz&(eYu2=*I-&Fh8<_o0u3eHUOk^pT^j{jQIxq<>=!Fs%%-A3BnTr7gP>%GBB^g^J zU|kXy5mv~C*)gZ0;?Ccj1$6ipt?lW&a!g(kGSXEuB6#ffVI+{s@J9TgEUVkH)YL0W z6DLpJHKh<&UE#Q&JpC^8_UR^l?_8I*Q5hJB`W$59fD00KzP-vxwyx-LbVy|aVa9kG z&u&u$w_XKq1s&Z)*CZ7R5P!(RZWF&bF?x9De}aH#$M+wtW#_ai56 zg7u%5fBmYGRZask+hCX3R>!l?o_Ac7@c02aq@j~A2&)=Q!k3gde*7Yor(o>oh%HVM zjs%uY12Z+Ok>TT)Jdw*z)_V69IT6c<5GC4-MN57{@Kx{W9cOI1-zW6+^3JLIf)BmA zD7Ps-c#ipj_Y<6vxKLA2a1(Ip5r1n!#M3xzDui&Oijbde-DrRNh|b!~5!T zhI@xZTJY&yiLl!;Z?My?n`$hQ;-Lri0V)Z5f~nACyufDYLcqVK%c8==L-*BXKi=m; z)*Qj1K%l7?e4PP$DR518_f;Pg{;Zv(&^P#_OPB?AbyWrsv_s=3q49qQx8iRQN2EU$ zYnFVNDEfT9`i}9hg~ShJ2_Q$H;dsm4Y*up|HF(AG1Sh6q$x1Q;nH&L}78F~E%_dopf2BO&Eo8Nv=$EQ*P zm&$@LS<)zpHI|vxg@OlQLF6l=z~t?Q`rQwppXhUoJ=%;<#$W;G-W@O2cip=7!dt08 z0{z>jdJ&aJr@PPF74DAU}J>0LSnvF2prSJVH2p+b(dT}}5J0u2V9Or*WO@OXSQCD|xD4tWD zzlwSy}Q8Z)$u$iS31RDjp?_Rj2(ah zpJa);v;@9JpHCz#_*eOa?hjzBRBVy2Z0D;%I~dn_A{Tz;HYiGkKi+%4^sZF`^mcSp z4B)Z{(}9|h=JQPAcFFVJ?}tK)_*=&ttiKZ3Zrh)Pq@|hBmKlnyBlG*$Ulom`s<(3= zP0Py2gn{AsX3hT80SK6l+x{BvO~+r--p0<(&K8(P2O1#!_VyRs!+NPdigx!A#e{Z| zF)-%~W+3Aow5J4klF$<9FIXQ8i!i~d{?zRH624;hu6^Ae$Z|FQpIQ*HF% z?!)EZ)usikpT3ZvH}Z7c2UkIh;|os=)WbvN7G=AB*{@Vx3Pne9e$vf{OYRJV-poMp zxV!wfXC%D;*WyC!yD)cp)COSeaY>kDf5VR7eRNOSGmyg!YJD%O;E&n5pt?PeNUem7 z+12LL`Xw4YpaYSnY7+?8Nv#*!V+SoBd%DkQ2FGg88|*B$U}Y z*83eH93=gb6DK;Vcs)R$@X0Ays64`?=a22B7G~U)aEGMvr&OUQ-g%!sudc`ZMfCcM zaKzuqJU{39ysKNdg(T$Rp{1L7sRCM>?pi7X#u%^@LX5_@>2GC`316O8fbKEhCU>jfaS#8qo8QKlx`_3XH zsAf&N67P=W+%CfU$R7i8RIud$EY(M`O`!+yI#zRS4}ME*?)*a#1;m zY6aS_RxV|`_WUh=AEZ$zWaG0Ffa*l(@?qWv_BRFwEll^vY5B2mr+1x7knbuFf54=u zeAz+&4teVWjv>I%#8d;|`YFUhEV-ZfAP_~#NCQVvJJ7s#7AG}hnZ5-z`J)RYPR>0y z^4J^2C7>RvrHr5KDLWxWL@b#3a}R>aY?#3#B?W|KmdGL{hY4>hu4A*+>B%H9JwrO? zE5gDzYt#bWWTWPhpIH~(K3QeKD(F72=J;XN*W1`*UB!~*FkMySM(Taz$b$Glv)fuK zVsA6X`~|wt(o4a!7~dk8cmwRiVzv-aOD&xIypg@%^(lOX{M*^a2=2`df?J%|!<42E z;R@`kKz={gOwTpVfgEO*fQK3s4lHy<>jU=_0WRF0$dffoA#lBye#Ud+US3|#`Ju1R z{QA;atpHn37TJe+d4)?Beit*GfuP}HFqD%Ys05miyuS`Wn|qGkU~FUimO$JODi~t) zC>LyZv!JN_%(^FmTlUpu!gBBBLB#_+Kx?wSx3YjF^vLm5Vo6}949O@;wo5*Nk>_1j z>YXQhZzf8BP2~^TAkdlV>FL6^4Wsc7Y4lHx<)C87=TsCHIP?T% zgozgl?mh;qSjYinisrK9o&o~-}*ert1c^W4q-?8Quqfvn>4 z5s%5ik{3}G)G)DM`>`QcMvrUtOqW|LL*-`bukVNiPziOjQz(Uh0Wsk;t)0M@3Z6V{ zLD9ko$w;WN{q+jUK`8$kNryn4z}mC!X4~w101a&;5wHtUL?8M-mc~!aIndB;45D%` zW*TdbC(wtswS*A@>2fFx8S>Q)rXS9I{@(G}rxQT|Y)&%>G)?5ThB*$d9yCBRyJkk9 z;?w#hzB+iXNIW6NX@?IAPU5k-atvAsoG8f?W~i=a4!>Ia!3m4ZXwc!QZ5f+uYbQUZ~sqH!$Jk7av-$#?8qxQhE$rFDQ zfYv%6>!NU9WkF|SgUKjvY68rcpRew4V|?cmQX^+hNpXxb*NF@>xZqOF1b)Cjd>)M= z?IGLViA**wwt76N2%3!v7N?@gZYdz~EMlX|5eIUA$~GRLbU`DZ(wZoj(J3tvdK4R$ zoJ_5mg)a2jTRyDbmV+t4@Yr>LVdqi2Wl7~9hJ0|%a*B;x^olRQ0{5EasZY}6j?c)C z^6#P1vkX8OcIYAhM$*=#9m*_`L~IuT-=1lbNe1}(RlMyORCP_%LFz_b^X!@Y&Fz`a z*0I@NKKeTK**{31K@sm2Mcyzm8&`xg?=06{1`SL+%{GkqSet>Ne07QQFxc7N0V#Sv zw#edX2o~(3OgIY0{Vzbext-lacH~C!8s+sh7T(8kM6+KT`F=aAKP}|;jZo`|mOMhm zbH0Z9{F1TUQS*>?+6eejy3|8U=08xO$SX+#X~o+vjY#Bci)#}C@8r&^fk zb%_efdcPn;K&73WZ+u!ZD_e$8&VM>BW>251@f6zhaN*q7n7X9SeNf!^f(l&_)#5nR1i}q86-`LSLHNIONueppOWFZeUKI>G}7j{`-y>xK6g+{TW z@zIOuyO=)zuf6wvYqIIug(nFmG>M=TsZkJ6qzEENNel{71Vj-KDN+=qN|8>22El?< z6_pNx3Md^>nt*~dDbkx1rT5;FJ@I+o@7sUE{^9jF(EBEnxn|~CslBL$#a`{W$s!=R7=|y_v1@jhY+6HcJ5rN^w$?rh$9wLk&JH-PJVy4+urm~33GAkOz5krIKc8LRuX#jWEP zL>d|4R1N;H4<@fPJQsp#EKI9H0(hlm4xZrd8PocC>F;bXLHX#qj>xtS53Opf`$$^K zP^hN<;msGrf~PmpTo(q~b^RRScFVnFRFwt~F6WI1DgE-R`(asBz4Oh3x^n{h^@|O! zAUS{M@2rfJW$AO)4a*;H)ab>XJxhV2AJZ^iIm|C0+4$iyzFrZ8ur0pVyw<#PJ{x*LrT=4PbPu!Zk^qBBh*846v-H#kA$)CG=D)VPz-OGXr z8lwQsferKv&i#L^@_!&3E>-+|x`ohCw*jOVU0!FSts&#_fCz(Q>(0vje2z7*Q}8Or z@3prse{v60S<$%YIX2-(hpnB4m9XgSxe|-j+@kf7$itVeGRd@EZ!LA>iWg#=H#)6R zw-}ld14zcjF15#bp@CPsWT>L8Sp^|H`}*428U*mUjuTmSG4wrH9A4a$CFevlv_Wrp zDJ)Es>(E>}!d!;Dhd=L3jj?QL_O!vkxzoIcF@R3r{S7HE;I~5oA(z8>z~@--T(}HF#l}hRhaQ~ z!+lvzp{$JS*|TSF-@kwIY(48O{(M=s<;8HuEDMu|y>4{Gy%H*)l=+uu#2RgOx6wv? zwx?@aBN-FDPXO%us?DCIJMN52a(f!gg75l1y^(rP{6;oqg-AJAJ)%^$l2EcV=vv7Z zA-2KnDL;2gI;I^S55#+jpMPM_l}cG)r@zm=RFJgy-svf6Z)d3bL&8Veh-grVb-L2> z1msCmiof5_d2Oj-WJiDOIOl^(I>_lmkD8nMj*0uj(-GrJf!e((>B4>`xjG09jzPUU zb0mH5m&@bY=>x+t4fMh-TVaE7aGe^u!YB6{y;-!EHES$sE7V`&S|TBUh>6+o8uo@z zd~at1Rv+UR0|pQ`+%@sy@()h?S%mXZ4!(6OH83&?`4i4&Qku%7f0!yzr?zi&B(1mI zJELrC;|&jrJJRDMG?WhB*q?xe(^~FDg&Xyw$qzW@@|Uj@8t} z2knwEMG#*#ke9z+`5f{g@H#^F`KZ64(!`igF1GiK(fL~}ro=YJf+x6w!kVWv41u@_%<-14%rHliO z6@jMEgzH+N_LZ$YC81o5sOR(K^STIZpLMNC`a*$992N&lF2^<(ALhScQ-SLYalumM zuDSHxisFMK(rKH^`bQ6WJ-u+qCUxZ^I*(>b?zgHb;9T$Ft@jto4nx4B_IWKK{LSG@ z>4zwHRrTboVb+6PZx#87mAkuk_ujhD9)Kmel%MyB>|xI!17O!cWfSsmFqADmMPb6G zyIt2fO^WQ1Kda1s*V>V{@L)Qmy!;S$13Qh#*Uq!mNpzk z@Mfefr<>d;2N&SXE^?}y3=+X%jV$(kdL z(A;1-9@YkJe1bW{_x~CqYcO=*=u^s0P98l%?h%5|J5s-WVZvj`+MZ9M`d#A18<`xO zsZSUT{xbfT^Vry{8%M}K#1oidp$z1*VKC%irO0tsgyW$mv!tenN6fsl+>_W)(qRYT zBXnr20z}%@hH*I&wrN!tq9k3%8ZtSpq$loQ0)I4NkmUBeDk>!x?qYpbJEH%m<59yyRyVGAzItR2K0Lb3CViB! zoZ?C6G?Z`mmO9DEBd8Xo+o##^BzN-s&D2ZUjA-n0NKG|XGhRF8)hK zq9z0Xt}U286)==+Vz)vXzNcSwz@zij;N1Bx^{AH$H=j=0;z}p&yS#K&s%ctxDTIG@ zAm`0GVfJD9GRkE4TD>S!?qGuJ2btMooO7{efrEyO-S6`%U1ErgSQQj_w`0UUdd*2c2@_$=X)I(niRSD&)j&nymz;_etOQH)kt)f>vjns(J8hL> zTt3f;FbQ+vmADFH=oD{gzu0j4b0X=+VfRyYwiiXydEn$Y@9EQb7F9%C*?p~k4`VIAevHpvY!VIkFi}vaP!f6^k(UhckAIH7ovo5wDPHwN z)SA$(Be7a03axaKpGDi4V9KN*YWRj(q*g~|_t)N=>C$qo* z%Sf+#SaDdGd;q5R-6hL4aY)S3wQrWN(4Rq^TvPt|E#BVCZ}|oF_*$y1CdW>JIIe07 zGLr97i|ewEGWuqrM1pQhF#R6?5AQ&be=tJbceEo_u1=j-+}hgFpwf+5uc>8p7s#5D zKJS9Ek%Q!8I~I>UUR7`oJw)F*Nn4btc24Bc3XV$7<7w5ej4MR-iONRIRs*tpxf6#C z_hYP&{*SQntN1Loro9x)+XcRj4DMA~8E!xR!CkoHGszlG{9;fYUD26}$Zl^Df${f~ zb5>Bl9XQ$mOj};c4^C+{Y3m<9m3%KvjIM{J@qV`84u&@# z@H1>jQd{DvO_>$XLRp}?k5(r!}ii)4URHOvkp^8L0%97vyAbe*# zm?uC8Gl%?CyW2@K!9pXa<7tSnXA&u>-i^orS=D=BEr;D4&YVklY|p55mt|uJ0-Wxq|X%*6#;uU%*0-J4KwhS8!>uiId5p;i84l!~Oc)yQ~KP{Nddr{td z6sKRvfR+_w62oJofyGa9Br9Z^9dy;AJn0%`UV6_eDa@tLmx*aSe@tReWKo`jAQ_Q6NslETc+xn@fC>Da_Vu#}2lD6~ z)qXd^6KxVXnxKAHc2~(0!Y?Jb!Kw1I=4yy+@WC`+`d}8<({f>Dz{+kMvivxu`8Lq-u^4oJZI|=iE z5YH$Io*$hncD~8-wM}IE&0<~lzFzt;tbK>S=!xyfWvoc$LDnyEnCKTsSLw2k7(%QJ zl7umDYm-nXMuc4Gc0JJT`;Zx7x3*?FkY_*F#c6%n=LU>gZomZhk`smSQEY5!kxW*F`1 zda&eL`Kt(tOHB1-a_y<}*Oa)@R>}LItUBX|S20|?*SQ>y!pL=BCYc;52}PVvk*Qcb zM76xP`EH7*7Rl=Cc;p|a50n+_#|Z%h&(LnNMDk@423ck2i3)olv;IQZ2oZU5V)Xv- zesNb+*ZlIg4Y{0ox3bF|gnDFG&{Yh98RmI%Sy@|GTP}tw)_95Nvf6X^Xpu35hPnjh z^G&Ciu+O2RWRa1V|CP30F3&f6&QFR|eVoB|FKt^MRclcAj>BM;OPg0nkIzg`*@6(3 zGdbf=RZZ)M_7=K46H@A7aPqsyod>tgA6ZuXyiLg0CjDsp>-Xkl{q4n65prx2f$KFI z+XYcv%;mRBFn7wm-PPuox0ER2Zc1Tfcx~WUm0+RoB_vGh3FD8U%aspbR7fPhj>6o6 zX(!F~+fEm$p-S1nkilJQ^)iQ*?4#GkOoZF+7xXA-?6vmp?r#6JYf_A0;fzLAf!@Cj zCvp#e-|F48dclM46m5>hT8yC#CnsmthwFUJOZzNLAoaurw?sq;-(S=oJ22mItFY2s zT-H|IB>rl&%{`3aG4J_-CB8uh`1YnDnIkct&&x}3mVK19FLJ~D37fZM*(#?!@u-i> zLr#;T+nuy?eRv01Ve4bCHS&6eU%_g)8`tg2%$EJe&v+tD!m87Nd=E30ECSMZL8SR#{S4y34{0eoq4G4`~e`WKPfaa~Z zkheIy-;f7#yGQ`RQ5bz%4A$=I2)nvixC;`S{)G1Z=T{p!J1kYMY}~xj_P9EQ?ZzGL zcu$FrZ=8>#GgsO_x?)%04%9g|*heZfu=d-*?gw9bb7r}1O*f}=hGMLtqE4zXt5B^XAx5uij(+#`HNhhBT0*sH(tKyeo1;e+9>(9^f~T~FBd+0*wdL6TYCk*w*4HJ!2SC*-xbIO z>Sk%yIdL{sT#A>w$I6aK@yqeb)V`v*|6ED`qeIjPoa|=_5%x+-Gk}!|{1CcueTDC8 zwy((XZ7j6oTn4w8?c^AiHOx6V)6P!hQJJu&|3fg!obh9+-1!~X)(j6)y_5RKv97t* zIbdmVe+`xCjxbc@Fc@ZUWM&FkEwa~zJEO&@3Mq?EJ^y~rOOVJjJZGwTzv~+D;KGEJ zSo*)ic4-iOa<*3(9>Ma&MXy)(F{d!Mn0~m-mJoIyG)clF6j_DqFxH0i3KuwP0>*?RWeeoQX(k)vF#LC6J z1*%NE+TP9nTaDCB*Y!^-FZ-vdTvC?KWa_3dZgCoSP8S%+XhqAn%c~Z8FX)a*QO|#xzkzxd>GFIE zzdN!9FcW}*L5|dy$6^+2V*2H3<3&ys7TI4O=2K_2o&<5cJ{A|v6U63U@yCC4Pc1k! z^>+F6?yt-GC-!Dp9S<+H%Rs_2^Y z4Hp*|%7AN2=y79Q-|`m4*n48F@24Ce{AgyF{LAfH#`_^?_0ENk)krTY2_*l%UDUVqk=lFuIb689k*=Kn*J|Cv zLeFz%oJ0LCS&kSZ(-O{o!vSRnOO8kpEto*jUHyf7_@|%Ao3%@0R5Eqz`|A{ok3Wx5 zf-!U;q+7d#IvGLn9rGXF3HlW1Xp@!jbviG{GAHqI+oa8ps!>7A*eVlV=t1?dqC9Sn) zSYO8tB`DpHBj zmJYGpn;yV)x9|Mft#I_yx6n!6la&d0Wupa5#SP^y_uE&i&s{|Jzo3WdLZ@a@!ka%4 zJ0*wB*`#+TCq`=DS(4!jyUq0#iOKJx{Pu&`D_PQ2mb>3dCWvcU9wTBnUr|ZvkE>gQ z&!k=)H&c_tgjg~?DRzh&t=x?t-IvCXVT`FSl4gOI>eg3J$+?EaWLijn%O~CW8%U&n z(XS!yELryy`RgCK*rA@Wi&BOUFxe6MDIGBKI6P@@wl=c+SNS5}bmu7@By&?V(6&At z@m1E+VdtZH0V>SI(G>qGBUI3LT<47mS1YC8I#y_5IB?lvv#pVOWOpVr(PH^Mu2mX= zr@K~ws0du03_xSAS5h2IOsBjR@L%Tj-8xrgd0o-MtX3jNRDv>gTKPu0$dGOkA1)3~ zYkFs9VanFn$WCpBe`c21SqT91aGfD|V}8)JEZ^r?eC47`((vGy+7c1rn`2Wfa7ArZ zRr`+di(BR-4GV;nic6? z@c_4}WzgNabMSWxpNj;Yj_qQM1X`RS(}8YG&4&OEcb7W<{t@#rMDEtmF;5~X;pk2w zGnJ3(_C@ye>C>(aI2lOd+{gPm`k56SJG++W0l-ZHaaLOJ4(RZz+8Ub ztnU1zq>uF`9j3gCqt35fZ6|amiMs-aFAs)>o*E(@^eze-Px*`N zqFU|seolBLG(clLcSoF9kRL3mg%Jx}$X16%QN&zk#poUUS1&cs%xwcMI!xZ-4Ms(8 zO1VKvi%^^l0=#zGuGw0Z)A=Zw$n2Va>%k;Dz_MO@c*U;*l~VaMDsMsOHbmms16jB# zv7`aC*UWCP#D36fv?J8=wS;ZJX|j3LI*7ya*!`sXzt365`!|D3cFHpzSz?q62$r7{(by*ElT5mDMN*rU~JDr)1CV$KXk*}7FW zBS{y3-ddj<9#-_&&RLxQwvpKTLdjo( zZ=wnwLvaqbRwF`3k0AR2c5tyZmyv_Xj5(rgXhG zJT!ve6rN?53=7yJ-*Z7RfLCR^$f&eeIE z-6{cRAddt`Gd%jMOjSDTii6E=p;*tI;(2qit*xqG$AHHMmw7oLk~QFRg7c2o z?0w0?g`ica->2b}RiTHUsZ?=<{V2kIKJ2kiB&tTt<~sil`$8wasr{CEyP|zgAmU*F zPq(kQAKSpj=4)ry{^HYdwI+{(bZ=<)?Vv=o+TUKyf-)R(1X$gE(68W@8Ns?bcc3dQ z5Fs9sn)EyBJ1+QA>6+jx`Da2J7upv}0NvB}F zpT70n(o6}z%iOpOWLN;0ya@2~PQyZ0WU;Ba;b|rJht6uHf!*tZocrsO>wk(E+b1p4 zf!wnI?*a|k+wPeHm_ED$Wz^?J()vWk$lAw7M&A{(gl=V}iqD z`g<9p-QTbJ-_@ZCPYxV5zyN>-;&EO1B**R#s_`R1)&3AiFi`JBMuP>vm9%`fn-O+S7QF?q#whgcN0C7Lof(nD ztue)pm%n?R!{EWRyCg%8i*%n2*PH+&aMuR-ch22HR>FyOFH9Cx8h3}}m>#rXT z2%l2O+}I9y!hRcsMF5hx=M1|{dp<}_l|S|=IBVi-i;t3^&~ung%1Q^$*`-s!AD`#? z#^n?5Hm-C0UW(+)EA+UMiUMgI0Fw~4jfyS!U^Tcd)-(0<`<3Dq{t=A~qd5KbrHM8+ zAd3N*oE8dj@H=|_liQs|3OJVuPeDm_NtJJs1+;a+l#6H`g6pylvxQbpyABPN;z;*C zj&HxF$9Q?{$TsdOK2;`+9Vnn&}iPMRve0@PDPCl*&jSD$VnwuQ0FkyhL*+Yz zJ19>nrbQ=J?VOk3CaTu{4Bv(q1qFJZ0`LIF1YE^)?0BtS6bB$#bAT!|!#q3w2WyFu z)ztV}#hToQvZsL;ZX)35_k${)*&ZRh4W95roz(tg)Hb{%fSUo{gGUEsH2T*WG050o zzjctS^z|Q4#&hifK(c_JAhuDp@2TdRhp(-a5N58KI zX%si1z3Snh06;5TDb6lIy4n7c$|(3AXrQm*wiyvz{F9b%=Z~MjNnL(luk5PvPkXJj zuk9h=?Tre=mA`Y%dx&dItl}>d^xs;5LEXjVD@Um|&nGVT&HAu_+VcQh{`HEjwO8nO z`J&%XiF~aLM2#~iQ;2HW}(DSSP2OEC-W_-GsE#0ZU_M7A@p!pld;VO`X^eUMEVQiUYsHC~!$jq@@Do2bBBr0|^1X~lu(l`sIK!hnSA5E521FF`w9`gZa87z;QM5BvLIOazU{2K!KEPn|RP zqYR1^--ZLr@?5~`Bpoo|fU#&r;7<1odNi^jnOC8qAe@=l&jmPH5!+v;Z!j3Fj;D5S zJ`SpcCIbWTKMEmSyNQ})H()5Y>~p2QHFiV%(*y0d-T5RPA&VA2`zIGS46Lpd0C#Ck z<--7shXA0R{SQbu_E#D9m5EsCpp&KX=x06fmO~N%saE|}ajW8<9(iiQj}NQNRO4p) z89@)6PZ+!=uBp%R??~eZcq;$drv9^qegp(N4ITw@uKqT5$1@r_qugnw~m`jU7XASKtluo zKj;WAeeAw90qd?82EPoOT>mAM%=)=y+c^Dcw&UBA)!hj2Dhg@w?PfjUI6Bu^`2MTADe)!MDz0M)Fp7o7tNj{RB#dqXch>tAa&!Ai zm?Sd=7K}|76ak(c39Q6BZ+rd7CoN0ZTy$zy1y$k`X5^BE$l1VyMsa0;B={ zC7fu~+JFKK8<0KM1-aKc-OsCmsuns>)dP_kBx($x&X#*GR8W9ZT9v4OFx-Ety$B%M z%(-n{2T&AQ&>k`pWI1w>=n-UDkR+hwcqB=;Akif(pd?{MGOXkrBuf@ha#8^aOAZ2p zut;V>R7CO;77^GV4*mZ+?|)O%U9YR>do$m3SG@_<@7{-NDgH5oU3`yCLn5Mvgr(9F!wZEVvx(N=}lEO3~e3a~l6)`(S@)JF4oI z?dXChW!LcZe<_`T;sW-o?mI`ds8P~WS!zFA(XR}m<_1#=Txsz?@<3G0vz%E?1!ZtWHsi*;lVa?lxdcH3_=EQyNuc?$;72- zeWm|HYA#v8EGZ<{?y_EmnUHJ_h+Uvk&}cm)GR-(fz>BLZ1Rx!neX-+)jT_V^;1#FnAktsQlS zQDxWULxn<83T(NmRKFhMUuKg2E2(b^@47-iO1~7o*||-V4vRhwF^XQ&&c-*4d769! zyop>WS>JA)JU#f4Hk2oP@>wGjb#**MT9EjKEPABf{`FnQq)t*%98lxi0y*}(CO)v2 z^)|WZDjKtx1b*J& zUIXCl@rH725%O3b@wHpgsAVj`u-zqG#1k~uF1aY!sC89I#=^>>qT!|Xi%^b-x9Ig) zR#%2r#3xSqZyF;l7|+&78YJJYbFb0BD4Va#d{*6b7}fUr(RdWau(;2CFByb^TyB$# z(XS&s45+bG4|q^h{k~Y4-kX<{K^jA-6>4vfHYlz`_MSDQb$1P7)w`5QlE z{y7fC-m*geQfdD6aPFLnT~TfeSL}OS(gP=FdNqCmL!K+f!P-6g>9^4mN2lzD&&-^2 zF>ObTSlL8qgsODnrN_KW={2D#%G_9jWLwfO z!|G`AW|gpZBWEAFHUb0<5pP{_e@tRF2K|cc(`lmezY)=Vx%3sP`fdd7bdeBGFMRs# zP*AWo2IN&B8Av0nH59B|RXCsJ(0hB6bC7y`HWytbZcLu%7ctc#kixVxC+Hym#9K{B zXdlVLP_SLjYwX3WOhnMQS16|TPGEQ)s2z-^b(Lw`>XTevgB8(uz_vqz1PW8FlXhoV z7_Mo&*F9bBM;&uHgwZ<3bjllX{VB8?LMV&HurnCr>M}fnn9`^f?D7K+UGK$RZ~8=* znR<}HdZ=teChzhL!T%vG#9{#*#2%?{ir*b_gfl5=Aq^i&lUOl>0*_mx!br@EM@;Ky zH$TK=6-}SQSa+?3iM?%Lg~xy0bnNBu)1j(I-#HT4fFAMZgr{g7(32Q{?(|uAWBMc4 zJ8a$(738n#W7^cYBu@J#+gHIa7-hRx=7V=q3jfrjvc$@tnmQawcL|)+*czW;XWB(d z>0wdIv?R~7V72t{EmUl)9NtU6@GUEDayY}p^m`U6PQ2av4QQlb_*Pw1DSKay zNxTdni3=lJ)@H`KF{`MY_=>q{@Y}{4=}N~RuZSIfc8&bbtIZGtQEY?V9)=A3yQL*pf(?TF~c2>UJMTDt9zXQj! zPinXm(L<~_Wn=~P<4sgGc;mP!>r1w@HrKC89ZtpfI3q~L(O2PL5Rxf5R4c9iO0IXg zyy=21Dqp^)9LZA%vewtHDe+l@@5g98$_#D-pOB3y6HHfnEN*s=x)SYFvFqEyOrFV- zh#5;t%pKHbig8T8tFVihdCJ_lpgp2aa8+1{+f9bERg=L7a=5T}l#~X(`;0ov-4v8d4sM4j6L~f3~ z1xGLu+vx{Ee*I5t3k@@BDc9)X-uC>W^OJ+u9VngA7vVF1$ zP+DHhpTS3(kX)6z`tp5GQxj?YdVjQwc|3*bU7QD%smoyp!f5d|wt>YePWuzvjMZ91 zQ~@aEB+Pl`tXxxK6FF*N)Lw*BF6rymM~`LoXk}zW`3RPVHn2yHPAv~pN8U0WB%b!bPKL_vx&hfYlhCacyzPGC~#K8?cwi- z*qaI4`ISh}t?~sC-SE6c=$r&{T1vltKxFXbepVJ@+3KL};N(Uols}A>-5%Vxaa)Cd z19dZjQU>*{F4Rpuv9Wn?ah{{S>nmq>tM!&z=zaDFQ=UbgWnEY{%E?eB{-@z~P`-x+ z8TUUhn}G2hg_;U;jSip(%vIc65%#VKYhxdGdrwP}aS~0xi3) zAi!znu7F0wzGKyGx%9C6^=q`|@~t7Wv4%6R`%!NzyQli6lfvN4Cv^~Wd*){bIo zYs}=y=cJ?jMVYX#;SndsD6IakD)TMi>kw5bl69dCN<|0CTzKP)fblH3>!=nSVl{k_)Tl^;7uylkkY2lnZ*ON8f=0t z;w%gIOxEXMs%LY_={F4Bxn++k2>SXk`W$8afxb2Yp)l}&anxdW(PySL=cV-U2I%oPR2LQkHS*HHeV(0$%II?rY;rvDaT-*W*=%$9z5D)tfBg1%*v|WXuJ5m zj{e{0+%f)R@>*bdM-P~zll#s8eIGw|?wF|p>;KA{9{%s=ST<&|OtLfWOR)7Bit0J# z(TNG~37*R_!{=>(H(T``i&zagCYWa0{`%o*NvUVFKLfO^&dN5QBXgOw`g8Ja$Ut99 zcFP(gYUdFZrFlrtYC;??jfx`=S0{~&YH7y!(8EWI>2d~~>8)O1LNHNis}u%mtG zQLlK|KHV5~w0|ZdjIldue6%$Q3!}G@!;Xlwqb+(T8Hr$SMU%tm^ikB&dTlK^>~M>g zp?R=3Z5(z`OU53pmp5&`$A=y5fKg##NX=>F5%Q4I4-I3?)xv;cZ9vm)NvJU9ey_Cp z4tLn0Q8{z}5BYHE$m3}9PkGqhmMnB@5XK zOjH&FbGFmy@Y|I(Sfjo)mqqL0QP8$b{sZfKLy-aVh%K|!PFSs z_@jLSDvVBpA`gjBEPc)xk5s8OPlRG$Hia=)!x{$!9zV*M~2z# zUW-2XX```6pT&@21m^C+G9F%U0}~HDya*53p4mB=s~}8v?oQhc(3sIk1z)ThYLqic zBZ$1Pe(2cKmD4h_kwFvD6t@Q#aZ1)eUikkRU>{>nE-+kDRYJ)p8Lg_Fcz9TY8kvyF zoavmTFVGvs2j~mybZ^F<7oHi^FujpPyo@jwht(370=yfyg5Xoe=Tq$V0Q;K+7p12a%NdGO0#3=3SVW18?&7V7xi+R1hAYn(-sldAk`S;`Xr z8IwB!>K!(71lpg4fPIYZ+-}t`(yO~=u+c9!xCFPN_M%YbFhcK{6P6@)H@X)&a&Fz#lnH=*&fFO6#9S+CJ4Ai~OJrozWN{5jDJ4p|QPl-~g0ry|6({jaKTxxN( zwMm>4%n#U%+TK-`&X39xc^RGkT|8sl-X-lwtBa$=iv3DlZBp;_N_S@tEq5Zo{}ShK zp<$%2kDqgnW;U12%myKe*|sIGLZ#!{xSck)J7AByL(3_P^s6SyAF9q1wtm)=5rS~q zYTT3Hsf&r&fm&I3Vy3)d;Gq)q<_ny0jVOQb7R?6#`;)ja2_u0-R#rnVo`u6zq16yU zW=qKV+(yh+o!X>5>Y8o7uje)LPUr9np{VcD*JBOhzgHI4ebn!~(A=Nl`_sy$vP?*- zWv+|e{-BJSxyTVQb});%#XLZI*Kfoos$;^`67if>C_L)Q>|FK$Oh{vIVGnLcRXyEZ zx+8oiByb<}BJ9X3Btk4%HNc^vmkaSxG&Y6L?E`0`!~|ILh}xiM(F#8 zR3w!uIVOE!bxh|3r{$d`kL5l$RA&IFrWR|ic_=Y9xhsZPu0Y}lwKU=Z$K)rK}?yxy%mMrAbS=H#3l>AeJ56>Ts} zBcWhp>n`%roN2h@OeH8TP@0)mrW*FC(PSeLOYRE)Q(;@^D>)TSK{y}z*Nc&MwtiZP z!Jff;GUd;rWVM4RGrH9ouMd8HKBYcF{%ngHWwz(o1;bPW#Mg@D*~|Z=<$uZab&`LD znJEjDZc6GgJI>tlb|Bs*m%2-$lVq_cYR%F-*wP)osB>%Bin;OSqBw+|ArhRnm>Z|5 z`#77Wk-Ey)k=kwb2P-DyGp!m*Vh#Rf?npbb`@Rmcw<-!P1UegCic_+!NE$HeOm~(= zlXQNja6wI-Rkqk%w-=YKa->@9jTP8CK9f-ByHmn~Al)-ovM+GModeLK zY!%^sa9QYa6);NW+5}f*Gr7qrVoR3t?OuJ*h}cq9X{H~ zmc?|TbPo4vl4=n`c`-5QWC;yX5ncKGj&5>G2uft^&%wg{zRaEjrkp!#4vF<1yXA|0 zAVF|oi9b)TPLcghSF9-CNa}6a&6>tH8Vi6%($el>N0~kY=a% z)tQdLkQr~lLR&1hj5jI{4=fyY$ksH|XH|^ApXqTm!{N01C ziv+xw;3O&J=AOPH>z|=IRlBW|P~1o-2Q-I#p73t&95ZyZd!m=@)~noH`K~sJo4}mM zfSzSUDmV)+1Y>`yKday9yS^NR3;oSJS^G9qUL~FTbdyU6m~DYcUk&ANq@7A`Ew5~| z=f(EE4EgGSf_X(P=ux3L-r4*1J1C`C(J-*$rU2hqV+Mx#)?8{o^(8L!oXAjjqd8OK z=57p+WQk&T)$qF49Q_Svimv!38&0jnnfO#Y=Vb#hL!}ju1d5M>a>T!y)Ljx2%)&GK z1{gE&z1M;Gc&7kw0+ggzQ=GJ!Sw7a=;C?8$s62lLO(U!ddv}YODK;%Qlbq#@ehE<_ z5Npohy&2gU({FNn^R zuix-?mDxKc>#!K{Dz2S(U+S9Ao{jS5J)0!NCt@pLH)K??x&F@a&i$o;KgA>ZC!?xk z=a>FRWxluvW@PH ztPA-C&PLGa{OFqr4YhcvV(XH*HU#+PRh&TchCt92*aa_8F$qQ++MYh$#| zbu$tZN!GCYn#U2Z)KDwj%zN`>Xd|w$^hCP$@}dViErEra^L1nxQ(M_@bc-4LE_OVu z8^5V|8R7%amA*^p5o9&+6Y{==w;XS_b%ISLhysgs&0WY6b?l!SL*JwAZ*L<=ha=f>DZGQg3FOEiIHuZ6%daqA``&Nvp=1TEkasGN7gE7|NO;uaZmKB#6 z)n_e14G=xuGWd#x#ZEN0SsLV~RE%ms1?_yR4* z@zp4NDdI|4+xVLXn#~ub#89ZMwUt9OCRsq5T(ZnXHB|W=FTRe}`fQo^&{hu_C}%~z z?cT49cP}g-DznZNVF*v)Tfz0)Y>YUkn&7U*+sW}G2VAFQvGY3^u6bX(=o#F!Y`UoO ziEmox6IbIwj~!m+-$HX5hpXp&PHEr1Ct9W&u&jqBFESalJO(^5WL{5WuDiRh>yq!) zFwgBJoLV=H*G+rLp#5s&=r@}O+4xLKOl%); zk-;6deHmtSffQ!fSriUU2%nd7F-bH{7m39iyLPQ}aoVX-kr$sDX z`T#^)HEX23&J@w8m5i|Mwhfd6&lsrVte(mfMfV?O>{QPdtJ-yMS{Erts9P)ZT0Czq zT3!D<1$F(McVzkC=Qn(-geD)A;FnuVK4J9-`jIz=G@%yd%<&hXqSN6 zquT_&Qx_`Cg7m+rvkGf{ai2O3qOxTRksoct>wNy(CceG&LA{X9xaGS~dy@MXF)E9Q zR8O@g?g|ur_;%=$&Wc{_H^&C#s^v3#uDa05 zwOE%)GOO~il_bM{aeT^PBy%e1dXy>=vTP_-vRn6BN>ro$4lhEW$d}-WE_m*zUbk^Q z1k4y8e=~n(ze3_m=8C~pt?G;lx{+M@b7V01FuS*Q?@Ty(jSRG=@IrnIrH1D}Um1(UE`GE>ArunPXQ`o#=0vpQ-7kIgkmLB%J3hPt z6pYT&5(AB-@)h#`R%cA|QJOeor2PV1-$Y zCtf=05Q*QHsf}Ji6Z8sgxl|PgGvYq#{8vThGhX%oNIdMN<;sPh7Kwc|_P+Q13jC^; zEluM71aE;5jI?49;=UANuY7e{WAaU3X6kVF(*0S^E5hG)eDliJgwPJtHXXS>ZJ$U! zKYp8cyZm$%6~xx7O&@ZmAG5n0!Tl`dGZ@mB(7LRvB`s{VXdYYp=GZ~Do_6-Ojvynv zaBq?>3?XnE`SAoAQI>SA9^=YSP&*XQ2eayF?nr#ZF|hCd^a@GHFss-ORX*TG=C^D{ z@|y0Bo7*Pq89>){w?{AbfKz2$4TH$4{1@oT@%!P0QC$;xamOom#EH%tF;+rC8Sl&T zk16bPbz@J+GRM|+V@~g?JWU|@X`~qM#)5)M?!Wph9Y|SvXuezdUweRNn-Nku@XoyW znhJ#dx=1j8{JyK4t*WMT zDXdIf@Ib$C8l_ol4WG4QAjLf~Ft$EquEbI=D#hqYqj}D!4EdAa+Xs-YG^xB_DBpp6 zC_@a@kiiV$wrq+@1G#NL!R-RmBoADK;E$%;r1f3Sw>G}YXkEop?=8nDww5_aqu$Od zeb4ezI|VfvRSLZwkBF3i^-Sk3dH0Uyb1Z|Fy|HZ)m944*}E7Cm*Xr;2bU z$ft5rh{4O%sYmjOojXI_K-xUq=A^!D>ln!7>fX3(z{vb)CKa#x{9W)PHc|zmotC1t zw0-&=G|vCS=w-yNO=4&wD%PNqC#w8XH+buw+0YW(rp=QOON+4Do#-v-pF)?BL?v9bc{v34#ZP zQ_rbypw9QNq=Fd@Xo3#o?*2avvw6JF@v|qcT0sjmvbDcA$;y_I>JbF_oCNf99<5 zXJQKS>PwE3y}6}cCUZ_4B1(fopqtkka9x8Dp@n64(=l289p7s;u1y(2SSzA8mbu*C zHQNQJ{95Y}sL?-suL<)~gqi|I*Q05<_Q|z#lM{h|7OZwP1x*Ya-RSaegeD zdZnd73u;uvUT`|=aA6AKr5`9+rqGWay6*VSCbBt+z$NHVIT~9)YaKY>WhD4UidAvZ zMLI&?{JP`0nGg2g^wUE*pmX$~P%yc>${DECm;BcyKA8@&5@RGkm{EP@Ti3d-t=^tgi7*w=*w~CS-Ai4k3~qUh0m`#R^dZVk zAAI4eKmfcl`L7$t&O{h^`2Ev_ZhML3s{SwY?zYQs@95os5DLp93lx1zyLW%@?tOb} zHX()#EcMBa{JXPf)OHiGJtpQU#mP`~(5()5hCGN^0(qOtT9D|-;c!y6DMwO7lfb9( zod<7BY0qrq8O3G(Xu2LI(M_g*TYQ;hn&VjnOmU~qex9JOx&sv7mX+ZrO#d}JKpLB zck&yt2o|e++N@irRQ2SRa8*_Y@kFa zRm`j`gZjJQwq*E9Q;EP!BYWxt#V?TJ;)>E?#BM=iHWiBg)w*lDV?%<} z9zD_hZ}aztQ;33=ALIG%WQ|bq#RHA;7xB|ZCafRv^yDrfrz~LE$p`z#i#}ba`A|O2 zlQ{-eKcCK4i~tp?<)wz# zDf=d&W+Py28`1N{GTQtJ_ib&R;B4OE&xCCN^pAVYJ5O0^>_K;z#EHL z@sB%$LX@g9uFEVgG`zlM#8Ug~1C;|Vvm5peWTG2I9HnzSxEiM zFQ~Hpn*zqY{{l2 zdJMm;(rxO&kE6WF;vF7{V*S{IK+25)U42fGx<9IPAXSim{r$wK%wVmW`0Geir?ONx zELh{o>1l_x()TNtxk-Qr=1>~Is8bV9rfD-A_e=w}Eg9v_LrODa1EM9eM*_i~L6iN5 zviOH{O(%MVokp0_M{!nHS*}w$a9EiGb6d?0YZIV1AmxvGNGR6A4~1#Zj55vA z5a%>gQon6NuV;KX2dyQhkQPL?3LGcQR{qk&hES4gQY6j)S3z&yd{}$v7)~g|2Ee{eufD^j}`OgInY@x+l4HQW(#~% zxYD;+eQIM25&@f}_APhH$?lfPUo`LArd#*WL5>}8K1A={C1syq$2u);+}?}a`=npz zsK5ev1nEEl^If^qq&(l*Vi^sN^rw^PNg-?YnmF!or{E6SO-iJ6@!qB2#}oBXe3qkl zr=9?QbeAieUhh%UMHZT8F5Wh&@LaX*pmhsNRJxO#l{Mn`G#~=Qe&fazDdW~onrVmL z@wK4UU^$liU?rJp|1t{E-bnyFxmrBRa?15Y|^YVu*3Q1*1Vl>@HZ>@xI6 zS9!kXtyoQrr(90bZ1I(*v!LfsG&-w$%1`#*czV!?qT*GWj3A1w=(|rskLNx!IKLpm zBbcB-8NLriYa?^88Tr1A0AV&gTQy@~Mx9M4KoyI=K!1&T34OS2xpXU{(5!x^BN1zQ zRvt2>e-*!*ha&#VCto+J?SkdBfk0&%3r*jBKeX_=tZT#-{L4jv3oj-&q^-m+RIQa3 zDCbWn?fT|q6f6H>+D$YN_H1*wxdaUg2~^UG<PM!BS$8k%)nR2Jc(_{qU;&k{_e4Opb>dL3-s0Y=?oz@;bC2W$Us2M%(cq z{ySG~>o)2~)vhu|?%3N7CrbJOYpZF(hh@W>=VN)c<_t67w%&zI_t|#rceh{7ym9nS z*I&&;Xit^so72Q$qt^=&~^Tq z^zZG5Ge3@oj(z(&$Oivx2FsQbIw)a#`0Madi50|NE<%SBLA>CB?rDyx)~9GE#$E8R zleSdoE)O@r>WF|Q$?qjTn;JXcQYV(muqiFC0d z)G9CHl+|pmkNGZ-ww?dC5GN65H{>xtXE~uWj zNt%0LLy;fL>*RsZb)wERt~jH$lO)hyRYW`O=BgEMNl@9R7IVN2z7ZlCt21?quZ9Ll zOVaJZhaNwple`PT3TSn^AGkg1w_%kVCSUxozT;!wPl2Aeqn&iSR6rT!AP!h$vngZ~ zwsfDB?t!M?8J`5Lq}s!->HdrzFYCZIWhbp&bZVs4Q%bE@IBQl+u3m_3L~vVEkVTW@iR=4LO^r@!P4(C8+P;Lm?Iqv9D=;w{&oau=y z2YtCK@lP99fD4kdvu4DNkmhhH%n;C5Nds|-!|ajVLSHUmPHF@7rr3rpsy*Q=|5OS$ z#7^^OD&8Hi!GUx?{R^_V#r#w+76w;7ZD8-e_bKJk*A8xM`E`i(bG4aDVlYaBJbs{ILHfzT753J+#Tn zE_O5Wpt0xD0jm31C8Fkz^G&!aPJ}1D@x-XJ2t9oe;#y{*s?LYeB+u2Hsggrj*cqBjCN_$=2_zE$!>Tt z{9NsJu~I9#>=kf0aZ0mf5>e=*N#)6z~S9_-~%xBNb9exAa2YEsYhR!{k(|C-N4hYOlOvriuyvH1m5Kj z6n-TYpEtQ4i)~F=Hv26^>Kb^yB3shb@R~Nn@p<&eRbZ{1621&7m5qM%^S3!9%$#*B z6|#CuXTV}GBJthVCB2RQH9&EI=cme6<@Q0Pyv_+?h*gBybny3QJ_gV}MfgDFO0S&+ zsa^~bTm8dcRo*%ACjzIZx!|?5ab`5$RN={=HP2$uHfOrZO=0LVX1)IJ2F-|4xNz*V zY&^oe$n4?IoTtn3IAL~y7q50<=%XitqHe(Y5V2-@4YnL=TJ26K9`Ede*BNwBarULp z`gsT&KaRppF_PSh2E6tq(rFhFkO=|#!g!n0EKpm3 zB2^CC>;NRNK)KGsU_@WttF=6ZBF}E+KfL4n<@f`)v`DWg7CSam%0w~+J2 z)hNN<(u{=Z+MU)sRLeZ47iMtJrsPwsPApcEOJfQl*E*D?0Q6^WBvb=E`z;C^Sgjw{ zZIV|W(=2t$#(2Iw%V%4N5xx}1rM|a`0kLlo=42s@<|3e%VMyH1YxOuiibwRbN!`Ti z`TFwfiWObuXRoZV-gUm%f6uRL5EyI_#=6OErg+Rd`)LX?-Q<&@PnB8!z^$d_8%(g~ z6SIvF)tI|0@qn9=U(PruKwzyruxrfsU3t!~aY68Bbt@iMVox^OEffIC4#gdYE~okD8Nksm6B(h3FoqSKp!L&fxxYC>5kHVODN8n7u>& znVn4lsOa|=^p#J++;WFs)PjAwa38SjmohG*e?^mMJ>*90eesYY70CGlJd!DU#aZ)+gujI6>g{8cJKj$8b;*PE`Yy=! zspR`A1$5@7tZ=65yc#LvgH{y#S=au~$Q@Sj_d!_m8@J4!M;>ZHRm$Ieb0NG<_1e}1 z6C{@SQl8B(J-mBXv~v$KzcA|0V(|69}9KPu28Q9eC9gWul&8Ki!d*> zxax_)jGvCa{KCS&az=?^ft2>Dmt@Looq|C_jYTb}J>Q4h={iA-Y~)Dc)(7_O5%==ucClJV>8CZX(wOrme8S8_Bcz`HtPRHw=Ejlh%mZgY9w>#? z0VR8Jl*9~xi>45C;I?kiWFaklZp%RLVe$OodL^+9Aee}ExXnu8l!KB`$=esy5>|w7 zw|fiHS>xfAL;gSEee{@p*%?`fUwy(tRaNEhY3(YimXS|IJzFbq@%y(4?8%}ujMwE@ zR=ZItpwAwWEr=EWv~EQ{_^D^&ej8b8^x3-aUV(?WA*E1HGR1!6bnYU^ewDKbYDESu zvG`bs)~gR@^mj4)o9*k*kn&Kbj^#SiFL0akdXsj961gdP$ozx$>~SxKg%n-tC@wW> zNp`!h`xJk#XFXtBS~?u-1Is(_tto^kZvfj5ETxx0>3P0J710!_ro=Ly0OQxv(6ToP zcLfhh48F!hHTbg?7n0z#`%s_Eu`y1NO2Aq|&2RJI$3LD>LsCHBRt;+bBSFcUWN}@x z!(_YK4B%v06>|{A_jLAwvEE$6ZG%M7_V?-KUp+3vM)#!spPzfa=9%-&<6dK@O-)z% zg{o}>rL#{UC4=eFES`XcWQqrV;Dp3HO@7l&O3Cu?~wB1e<^;py8@J@?kjVz%O0>Pn_D(zGUD8X z&X6jjWis{_2d>xmzLI#?*1Bd>`&9Gc;NP(Zp##wKcQF&y_iJe(gdb+}&T+2U(`6q! z>x>+ep%*l>{XfSbz|rlwfjNsAq4Iyzs_BE~{EKhO4Ak_HVsnVz0b3K53S2jm&0zC0 zL^hmLom-~+5@c+6t0N-B*)^z5@DU@^|LWXm&V$JI?6=C&2Yo7S{fVfSA(!9|+~Jv0 zfx|NoB+yz@2)Un+fm%b$Pujnnn$oT?S)JK3{vHRF_M zwIyqC{n-+RdltDcF)3B2))!b4mYNw;ALQjGa+7}Kmm5ucYM$uT$2n)yLw~8r_AHFX z4Xg)FY%FH@-W6P`;M%rH{UU=m1}#C93P7!%NYrM6$_DA*VFgw(QxqhcN@PQ@KEYTz~!uJ;guw< zz(lN>DkH_%Y%9v5ISPEY8_cQKrr~^Y$7b#7mns0v3Z6W2Y!P9J`C^fz1#QLsXp$$s zy`X)EYtnsiVDUfRqKZ(~^k>zd@A4vIGk%=hxQuA&VW+16uxhyL%>p({{>8bna61Vp z#`l9yRl*nFhojxzV&E0!pLjCG&5YMH9yK1fhJm(NgP!zE??{eR$CImYKh-%ed^_q; z0%#2H7YA4^MM~?|YPS~DP*MaOA-&q7F`bsZ5`X77>j8Z0n@)*`E;QxrN~0rM^puN( zyn8_;HjihyifMxu-CM9qUlh84xrtX=)SJE`yCkj{mIqFC%}Q(Q8Sa%9+bjQ}XTMNi zJ&&n>Av~hI3A4S0Joga@Fmn*`*pBA9%1D{=9N_EV*F}EJl1DOKSe!7}hoG4!!Pdse@jUmDy6!BvYy=$vYm~m^W?GNF#=D zbbY@vkD%Qe5->LXRgeA(rFn2V&4{O4!7P->*$Po;eu{cqRZvd}y9hmXEP^rRzf zk5bQ z#N+9ryYC`%_U-SFec(`NjVA3@{QyMl@8M1n`LiryyrdCPSaT`Jclywv@^a2lze+^= z{nNRIhiAGkrN3P`gLd_%eB9B84#u1?A>`YZUVFJ-(_@58!`gY6*y&-{;VyaL6Ax!l zM>ajBYKU(OmGHO;pEC`fMJNMc3v@t_#8hQ|F93ftf;^^%J{vNB`Q(Ghw2uC^<&Y4l zr9CDvObpR7vLOSnN^d&#$fs*o zrl}BV)*pGr-n5mL-yItNId zE!q>cD(mjAaiX|2_HgE3X)TSR%v=AB0-LV$E2aD9JrS+pYh&eG(AVwe=PJ>u|Hx!n zZ)YKqnu9O}IA@B6Dk&StPM(Z#b|-PhVhfM-gYUH^TGOB_|2XLP_WJ|^5My9=D68M~ zQI3(cr}H;jf4b^KR!}9S`>S7qfuIeu)wTE*VMyq&PfZcTFwbN);>n@pvM;n)l37UR z1RXfe!vocxGp4!(I=^#cDyp;%peQ4)johC*LiQ}SY2^ykmgk+{&GmRBUU9}&Ct_6T z=X=i|cPdsW;bsO)M>xGb1~TalA_Jk1awn8d3GJZ&amarZHJaV{D)OcSi*2{GC=-^_ za$BcaD5d@mwWYwJ_q@2W&)wX5Q0&lMhRC@a^my_LsA! z@*#>vMeGI-K0BoDw+_UlT?}0;7BZt~YP5Z4&lyNQ8Gp%7>Dp;S(Up2{_!50Popfh+mViS5JAK%pyp(`Mt{u(Y-ka^&YW#mPb1FU;AS zP1e30W%5jLaNU~h`YzkhB=Je=zKr;J363wSf0f;pA^)t@15j?kX^}`r^iQ1-o8O7p z?}Ppoe^tM~>qx)Msi)bNN?uW+VQecR?n9*jD>xVwoscQt5*-T=Uyn-L9iL43TDk2O zyUv?Tn6<+RL0h~~%Kiv|9lZtWL}gfy5DBNoMC9I}`~P7+!{#TJDodBa|B7A#@g#fS z1n@9GHY3^ijQ>~v3aIk7`=J)VKv_W6J6He534c*Alp?O&slnRsJZ|{-PYG4SB`5(d z8bBsb{qfH=q)2_T{u}dJDryv;wP?Oi0x0IZuP-GM6%PQ*ZhNuDir?myvu?jOz9Kyz z%28M?|Hf?DxK@W4%C9=J&c&wNTZv%q6;0b1*XDz>qDr^^(qzO-C)eY{^2>nq6^7jo z8Q_mW;ZkZVp>O>Yq{mc+QqMv+t~3E@h|Kq^)4400o#Pjb?J9^55{C`}@LA&@72e6n zZ1yq9?}v9gu2o0JV*L|pM&eG~$hth30T&18BzmtuMBOL_IxRfs z_M;^wgBUXV_s)c^ax^t`5FiCNTz^Dr$JuiOV)A{E&0x&oyR`+ycN!p;b=752|3Ido z>KFVl2ds}#w^8wlY}z|1^nbbJvG^A#%3|fwh`M0&lXAHw0%&_O<+*zjtKG{6MMtk> zs8rM*KsA9qY@95B9fW;u5?D&K6mVvGnxp@KW!*4bPl{d&U`$ia+07*YA_I42jyvz2 zPTw(^mS6=bKf?q%r2sH2>AmU;yaz797$9>6ubf(+x?dTrT}!?Kij`=~wIUVQ>z>BP zlik*y{^)`MWK!Ydb~qpqA)+sH>dbD;116jOqp^#+9yzt?a_q`|k_G(gi9gJ&6y>*l z0oTitqIAOGh`$J>rz8_1`g&OGngiKp#geAPo8H>~`hO(RTk?V(Pt?EN2HG{g1Q!l| zWR(`aURT)Hj5lQLApktm_|Dlogtr44 zJjcV*uFb(8ZrjA=UWK;gq0&A9SK%J>y+6+GrvfZDr8R|5$uaCR_|$c6pot%mD*^76 z<;~m57H`{pJDf=4NGcF4OH8u9s*LwaO8f>5aEae8zcMxasiAu6hJd$~Ss=SWioXDcCg97$g zcY2m~{LcDbLW0k5otJb2^6>d+T#^v(LS z?t}T35c{PzJaz?$W_C^7Z1x{&-KWyxDKt9dcAX-6xM zkIN$d|(A zOpkk(k=ywJT}e-?fK|M?6G94#9!40KMVZ&=1KgM!nu-rpVrvg%(;DTb$Q=W zn2+#csfUP+sdi_~g&Fx?b?LT|VqZ*7M0_$^fxW0${5}qmfmnQ|A;M@jYL@+bDxu{Y z(bf3g`*}|b7+mS(^{wC2aJ>qb9_G2?2Cj~S>#}BdUP(`^UchY4;y6-59H3)%Cgx_w z`o*nEcBAVRKetJ6Z<9xN?!ZAA0!riN)*^JSPKv4eSrdy|@K?{|U*Qfp5~QU>x`)df z>rR3ND1$Y}dg^&~8)fq>YH|avC+x?Q*qvAJS0}CkN+)Zc_59`RRy(Q7zr!1QMk?fVWefI)REnj= z_!cJPlG?hX6^bw9j7i%#;+|^B{O7Wb^CgsmO!<2T_1^U#{dU9us0+ocw9sUrkjCx`vQV`kIXO6{Z{&6!1goGcXzLfy=d5&a$X@86dq7wD|*9h+^HgV=k zU-*e7^*$1uw^VO(cqlEy4M5XVp6%B1?b&*{Qu~BZ#F9nM@ojeD13UG+`2MoWb%ddhaAp? zb^6!JQ!Q(Wd8t{xPVwPV@n^hm`IfocqATxi=p^b-ihvX^e{)~Pr1f)Cxd>CDH7af# z%>TyK`{CmllCZAtGJ1uSz3naq>UKHpDz*wi9|H04UZ?7AvGuA8Q9-8!I|De=FJfy| z+QL0B4@EW2ot@EsoL5Lrz?q-%WQn{dp%pD=*b`{cI}?|8H{r74tPms8Owu#DPP0$t zm9+IB+@cX2#T8Onr`T_i2At|~?cLdgkRwhopKYZiBW8#QC`ni7EF}@OwHu5QflwvL z5#K`*q-iC%OTXht-L@R??op@t<9D%-+yzctU3_x^R3S}#${Ih)MTlt+f!?fn{&c_- z_fw;kTNosAZ8;86PP2B19~w>Be;&W9;foHUKR+9PBXpPJYY3;vcm@jJ$?=%0hwbhd z`(y#HPG&l88loN+=6)aIqj1Fx+t#X7&>|KF9#hQ?-~4v9WVuAaDZbFK3SU&$mXZdc)Bwo2u9=0s_a+9yxxjIw9o8@T+`ftt(^y ztAS4tDo#sLfu5sEo zp6*UQvngx$xpS)_j&8HGJm~+bP%RWJtZ0L5@u4xFnQN4$ZM^RkaNL#jjFDHjPr@5K zJubDPL_au@PW5~5#U$_gUR^(P0|7B8mW!=PE7ZUNs zYME5$$zCPU0FwuoBm*33^XnM5yFTY0%d6{FNK0WCkR!fH!@oTk4FyBqW_xOq(QmXK zKB&d_KIeta&e8IuPi-qbpT&qif!o{*0V8cs_hnnNr&sxwXX-=tWqx>i0Y2$@e)T}N z==N@yuhFy4&WL{6d~s7-Y7}b8{4(;q9D`zG|55*;oFea7mg~Gt&GS|trK=0Sx_KaC zcR*(TApxcD!yiDm92k`q#1{|peK!%_?nJ8NM|neC6iaQ!>%yq7qpD=|hg8Zve{%}+Uc8<>q30?~ylgYx8Zw?| zn?{_+p|cGe&wxsN-20F$Ba<_>P46ZbJdk#2PZwt_bM7cO8)^t#69uC3I*u2M$`ncW zRtZ1?M!$FbLajEZ$iHx_R^nzq>s5z{2yNwfu>kQjSsVwiO&S16-->V&~Y^qsrkcFvkRN8WwdhhLZ|#$awMONGmX8ttB_*PFv}W`E(0 z&ZO2&wD9wft~{K$b{LrIY?<4Yg zYpma5Q!%};ti!on((3)77kTHc)u^pjyJ6Gulq2AxJlUk4qE6hTlCw@OJ9?{9HEb4( zBJtAv+lngp4hrU^p1KKp(0E2tA#u$Q-JDQj(MnPq3_bDDJ+nxv%XkGA?Q@YzSig>~l2|wGo zXsvqK5uWa?t^XkOqE({a7k&5cz^Lrkqc}b1C3n91@xJm3%N6?ByU+x3iOs;ZR{FV* z-D=9XYx+WDesc99*0ig<1%U#Nzy17~ODXkv!>$c<&n;DaQZlvOcG{M5jhK`CuMML! z-Q4b)?-uY9k;G@+!S5%oe=0(`^>Z8b%Ku$#==MmA`~=x$hDAd)2oxd<_^|S0!YXiV zdounvf4hw?R_n{Vm~$@x-1S+yo#>TNzD%E6b1NvXNa7iMgYxT=Vr~PV8GF&hiKVwd zY^q68(}S|6?{=F^Uf8} zzqXBiKos@j%nZhH8pNLQ>y#*)EFfvUwt4za`c&o5$rC=Us?j6K;EW;N4suV=M!R20 zVjo(&*?^BIQMIwCf`V~CK$E)v9WhifNCW*eX_=ZgP02Q`eY+g`P8pIpP1A$Id2Q7czQw-M@d6zV_LfigS#67loD0!1e=9fafmi!#7e~znPles=zOOUb?J0gMY!c1i zB7w049v8uEB51*{*T+P00bynyeaT4#an?oE-Tt0V0Rg=WhxsFJi>)eh1Gn}O8sl#q zSr@_n9dTzd1wvXnkFIw+)ivTRhyl4KWJpT9_0G$E9c~FR^LI(Exqx@a|JfpUt(nww zy}$9@+2bw~EStbH7Q&#tx7N^*a7^2y)&n9y>j+}D{agf_56HSj345A9p`Wp)%TPG; z?hpLkV2}%1BKf_(BHQW(tnGD}AtLt+T^XH}bu7g4v*t1X#X!1bho_u&ZuyQNGp=*+ zjp1zTOuR}!-+ZjP5pX_DAqY;EqA zolx7#U#9(TY8dvYAp1wWiFb2bdjjGh3?e!w=9O$#M`#VKuuY-VBpv3KAyC);N1Bc&V{!D%1 z>ko`Nq=5U)_qt4n1w}cFreABzv%|CkfkzYD%X)-g253hO+9h8~12#n=xRz)6xyqDT zJ#Yjef64a9Gfu}pi17$T+smoq%3aCp3ik=&@k%9bagbwLt|}Lj5t6C*&!XebxNSdD zv?V>Q%4$aQAd}8WcrE>QR&56FO8Y|q;c3!qy=$+Ca^Hu67#M`1;mFQk1Flx`qMv&g z#PEkL3+@aBWRu)Va-H0Ne%K95^wTq=hmlDkl|6$SB&U<_O4U$Y(x)sR0wAAxUifI6 z!Fk0VHTpD&8||<+@_%_1cb9BZf%UZ4#kusab;S+R;H>yNan$*_MDbd^erBtyZ>Ir} z6&*TQ!3nEv%AsF$NU*#o{Dd&HNFX}iNsEIvr2J!-e&F2=21_2W=}fnG;{UR8DCB^1 z#;-DVfhVNB`=-b6{^XID&_`bmReReX+q$D~SdsOiA{nRy6BGIXBm>lT_Pa(Ywso)# z=_7=sZhbOC4O4W+OzuS^n)bV(AdJZFk)*f9)U&@T5!(--p-^pBv1Ko;U=@m{HyZ56 zov$xMDa^-6$EvX6fVWL>O#!n1T5?yCsj33=DU!ws?9RB7m5+Lu_MNYT{q%BIxz7~O zAZFh*?m!c;FDhJ%>A)E2(AU|i`^JH7MbJdQ)ne`n+#xI{ivp@LS8I0u9LzIOY8uxrL()w3#n4=+-jBr+-YAzRnJ zu;k3^!8>#MSjc9}WE2WyS1Y?fK=y}M(&e3~-=h-Ps*JNZ<{pwbtlwCvtYsi)+gA-u ze@3{Q;=o7Q2QqHUnRo&hu=1REh@`je=XVxqf%4% z-Bhkbwb|{Mln9sdCmz2X8Rq`vCduh9kksQDFo#Tv)V#wWZLJ#Vdsk; znrBERYE|xsB7ho+z}glCHi%2uVS(#1<&S~ykQ=7Tjf1SPd@LH#U;(|f{zRmJs(S-A zfJDvxh&!u{JnmtafxY-U4NVkD{bwVx{1BN0lT5BXF{x=ZgE!Q2P9NB$R#b`_=8`uPOJ>%OYwn?F3l)ej=$+H&5-+NncZOuTv#>n*vL3FRt`=P7C zmf70;y-wFRRe5Bh?;lHeTX42vJaT4=V_D7K^dm?)PS_ze1O=py^3ndu@z>Lqshy=< zNO2~!WRh4AB3p+OJv%aY#QtfqgjCwDtFL)$7x>l)pju)#;QgXSZtg$2ro=0%&v~cT z2tBQV=kd>@HCC>dxv+rJ@oz2<$HDjTh_bUpxyi<%E%adv~P&in>^X6F3%~i*wwBdM1&zaikYn^J@BwtP**$? zwtE7pr%z;If?jd1hISz!$Rs3YB|=y_5Nue1D+YyFP}iHp7#$ih?70jSfMDx;#zpD+ zYw7?Z<5~X3*7*JHsqQIl$+V(}L!rQc%@Jz_+lw%b27JgxJ7W61lb=_vz(%YI2-jd_ zT>JNkJBi%|!R12WVI?ATLoj11z&CC4mrI&K!hz518;PBxzqY~NVCb!Z!O7>Qdyp`+eGzA^&YP?| zjY$&X&*X|coC3nczCCMZ;EnpP^g*SE8+oN$A?3@pwTuT(TN5Qr=hW<0Ylc*!kLl2y z4&TP0rJk;@Pj9!o?nKv4_r2WUyaDUK4XquGFUb0wt~FF)24O$#7*5E#$NLgq$;9XS z!>3}OB2G>PF5#xx9{b|tc6;283;<+@hg6|3HeYdTu+|Dm~g?esOlG6*Ptf!^!gszg-=v zqI-VDizw4iq)HBgfOe=lnFVCoLxmQRqak;|x@?5irBw!8LRz0Cvpc$rS+8rV z&n-_r5Ufk)g@!Z(7;cklWElzusXeBxVCzsY zvQVBE(o%ASel%jh(IP%`=KbM|fH6S9WrObTv7J=Ls7Zn4zOR=F5g+C21u!7Z$F!~o zzbjkN#IS%wipt*jt8iQykejIQkADeK5PBAr6@Rwhs+gHp@i2Nah8)^n>=C}FMFbC< zfI{>!!uC4LK$9c(8!iY$o4ZY~HGWWhQP@-m!cvB#s@!~iLwVT%)#u}6s!~am#=K#j zm~rs_%Tx|YEQl94q>Rc0j>Og^^bpLfb3wJyShQ(j*%1_?kiaXQwDK-@h_I|StSYDk> zZm3aTa0+TtVV+y>0dLiUoy`3kyN!z-RT^|SB8OO>p(EGd$rS?^1G<{}{VQ5QT`Z`z zDoaQ^w_=%u{-V_ZqtuL^h$fF~S9?cXwWApBLyA%?@UT&Cy-WikfjTcRv*sDL1u|o! zd>E@C#nSK!CWV3KXyfH1N$Oh04p%~x{7kpjz;uj|eUT1?&45GO zv1xm?__Og^0}N%+Yu~s~P(h^y6|dj*q{l;bCJcfKz72!$`T62;ZT&`?m~CrB{nuK{ z(uU^N30bz&V#&nv+8Z$2aYv*A2s|id*T=Y_1(N}H6b#O*S(#tXF+kw}l8AU+5qo@?Qu~5X#lRS09Egvo9Wni8x3yM<|EFz5ewD{R z%8iE)v3L=s5C3;|Z|n`hSvXe5m}fA(lGgXU1CUYW*UiQPss>&H@M!A_FBg~0kI!Ip#EO0DU1Sq@_6gr@3|J7bbQ!qI=^F+o2F({ebL4sf&=_ViC3FO!~kG@ z6Ib=?<}-y8K@ft+?(18hyFd0^kPxVUG&%+h+}(N*1>&3V$h!7NRJCQUz|+iJz1#YA z?E&M@F$6HfpVPy;Y)7jkZ}ivI+G4>aG`mGJP6x!{w5op3-6}i+03tZ5t4dCKu)RL; zbQH_|0r5w7ehI8FZztbx4GSqyG@jhHz0`5LJ8yrh#Q%OL7I0!$b8{y?d+$G+LulY3 zJztVa-TiB%5C|x({ViTj{k67YKN6twm&Xyal5|BL2~H~e4!vT=!SEp#q9A@U&`l1_ zc{<+O5mK=10XX1*GQZFd1wXtZ?0GQ+17u37D=$E-*Yb^Slf-}A7-hGg;OA4sXFPdT z{;0x^$Mh`z#-fCJgNfN9iQVo}Ni250NiW%ZNyU7v5|pkZx!Q=J9Yp%+wqAT6acemM zLsZj5R>fp7voZ)bf1=?0Oben-wio@NA8?TqOlZojNGJ$<0Z*5!AkUK!U^`d8m%V;? z6MN1Oj@vaJz97M0+6(KQv6AZpjT&>bD3s;@V&LSXsTvagNHPo861h*2@H8_9%y-%qa92PYD9epd4 z!V;gCwfCoZsYA^EQnH_=LO*9T9}U0317W5g3gkLp%SjAmS2%2`RtTXT^)@`AqN93~ zx$~m)YK|l6Xql7j5ca93{WC4pvvbhmEk>e5jq5L~^y#ED)~eq05E){csi0%21p^Bp z)E17rMduvZRHxG;R?-&pq@}&QvIwA?v&vTzxsIxr?N~QjyP!8rJvb*mh69T{B74v7 zy}t49dJ7_82Q0rwwbVQxwdBC9I#(epNod93>5H=#>awj909U)DQnl`cX4sKZ07gs( z+aAX{VLw6v1u~9JvSOxvbL&{k-13Ti7GIp|$y)#bof5&X#3it5ziX2792?%|zaAKV z&5Z+Rw+Qzkb<_{^x~}iUzJpcD6>#Iv5%dv)aiJN8Oh#0zpnj#V;(bZvFdfd<}pnp7ecq^2%QNICa0g4`Hym9*U zL#T_C76@v>>SQ~wmOSWP`bVOjixFr3C?9e2rGN_ldGw)HGVN$?tSUXLKDA*=xgDx? z!*2SmRYi?#x}7)m$=C;U200;y-x z!$!Z(ECd?Q+n$zKQi%nt!cm;-e$|A6!bdmOzf#3nmu>wKr#Nz zy!S=YSIf?4ydvf!J(8by3FnAW*I-=?G5tgt)#)+csk}CsaSg_W!azLfb6QiOD%HA< z`B0mw*;rU4uyk(m}s}!gr{AZyYhzs>}Qut{HhB?LwnEF zla`YnaGxD_Nrs*^F-mI!Er>|UW?cj8CWQMe;uke<=9ziv?yAPQ4se{60uGFa__d;j z*D07Lk$LyyVxL9~uRcymK!m)Jk)!B|^|?0F>TNL>*ms)^@-)bNZlH27Bd~q5TfJy2 ze`c+DnCgT*>2_FMow%X&FeAW;0N$in6&xr~>g8LeR+ZH^J2I9xCY85+npp+X)_GZm z4qxPY@GQK{aD8B0sb1E8&2+URtUX=@#EMsZ&92EI)8iQ03r#^hCL-=TE$_85*SiIr zDv&ywFDYa3{^dYgYK!WW`_hi5LC{T?!#tfG2zW42%v8fXx>=Vn!Y6uvBu!AtHDA^P zE|%wCIKOKmk~shA5_D`j>0x^bM#rzUTfKP;-tG^Uq+eBN3TD=xBxDJ@b}_H3LX6>7 z9t_I9SRo5JR%JUk?Y%Moq9?fmSLf;~=}*b!*3lXmP*}(ZF_Ped`p)}lU;zn|qk6T= zrg^cmriIa&qfTMXMEr@EK$8Rdpi-JJw9_s_#mjpGEa;YdUn-ey!xL|cqGyGnv8;5Y zhRw#3S#D0-RhKNp`rPJH)kDd*9OePVe_A}%xD}J%BA}jv4`5Sgy#I+D>kpK30!%5O zFLmc)z&wd1GPF{Lu)vbchxU!b5dench_zv>)-5?mP*N%fa2)kZo2&7i9vm^TAqkTM zac3;V{mT3P1@t$79twzbWm0%0lL%m~lFDC6j~}MrqlCD`|CR{n$LL&8@|iL~_FBy| zK)@UT0Q~~&o!>(nHS6%po;cyKc74-5`=g6ofORz@MTa+~_CIUog#+hS$p%&lFY6}Xyzi9ZH8fKbCEO8dElO9mD|IMadss}p0@Sumj-L}T(8He*g3Pd zANsR4d+0hd*r$E#zG1(juNB9tygOe;`IkKtth3pacPXvK{224QZ`{=V(Gr)D_gJYc z#^dZ~`ZqtAWRV+@&kMF-IT>BYRe9%p=sn&dp@Ct40i4cps~5(Kxia_;D9(DdK0Kdu z?EMF-_@ULJLP8I!x~XV{4bKQUh)rWjG05Bw@I;HhilSlkNZemY?;y~D+OI$zzR0BEW3guC1qt@j^=I3}*E4bhl+(x!#`MRe@e zpf>A{SlaBKzuQM#8%vCqidBHnk^j;e?nl@yu4Z0`WxY64-;1l}Y}ZZ~!&Y@gQ))$c zppF))&^`pxryZMfx;b@UaQ#+Ij(+`Bq;byOB|tftx6C9qu#^8ZWijBeS-UW9xBMefb`XOAX8zNze}OZCyjnb+8Wo)K&9rlMM2q;IB(kXbouu=brE} z+A2JwvYG)7Lw+Q`uWx6|ik3oBBUmb@GXhhpuMGfYq0^C|4Dx&n#5Fj~CJbOe+Io@9 zTa`6RFiGImnl!J~T~Ft_$%;&xcE0FBBz4NiFm11^-4B1*($z84Dt8%;NWWS6hmUF~ zWmtj-x<_vQ(tsBbWoh+!s|{_DGIcM_9VKcDAkF?s)VEY}>- zR|x|02q+#5Wh`C2z+-o3y739MiS2d2SNq|H){J}*Q#`IweD!*dP|vWb%J%JM{_Qr? z9H&a%pSECWf11*waW+km_+;=kcx6+Q_=2@bR9YT8s&>m2@DI(Z_+HFjyhqWYCt~g? zMlO=OrMA|`0PUkLu~FbKtOsg-1=b&HXMD;>PFR%yy%zM>{YI6&qSpS#&>~wK%Jp4z zkE_W8oP4f8VM>(!BkRLF8c)Amfxk~V9D!WZK-bh?oz9c9o{{ z`!2dq=Roaj?nPOgq4iGzBi2qa!?QaNDX%>rqJn-MPQI@;`CKKy>Z)Pp4Yew5ylRQV z*0^5w(MVZ9Z$K_OBj3+jE657Ig$Qz&?Z`gNh4s=9}Xo-ymxPV}2WLYXkkz8{N8-Q@MP+SSQLr_3Cev?9x%WdbLr z9@VoCUFj&n*aXBPNp9RwTl*19@F=Zyq4N@t)PV%;4Yi;lm*Gv}aL$IIF0(_EJL{7% zU}LlmNH3al8K#4HQq~hx*_OeCyyZX6T08w7$(~1i7=RLGv+L>kgL;%CA2fp_*3#!n11WeET|7A$x=b@#Esx zs0JKL#>lOcP!@u|bS~kimVTVLhocvFa(aGAM2e?mq5shD$p3EERAdYJ)HlxKjzo~K6~=@_ps^~8__Qx zq0#!0;bpC+K(Px&a% z1KBEAHK(N+{>ZNOuN%r1$ZY|K$-eWux8*3Ff*hViMxkWX8XoR433Pc2Ijmm( zDh1w^u1{Ue;c2TiR@Z~x7!t|pf4WsVGo`@2d~O8Ps^Vzn`WEaU34jF?vn7$;>sI|o zhz$@w16B;=tiBwv2QPg@l(=nx5Gc{iJ>Wf1yA12sYd~dhVSz4H+f&&b0}_w9_Bj~xip_6q9DFd*u;(8=3C2PiWS@DE0tgtSBJOjkJla4Paz^Gz~ka?6^+|e`m_P zd4orHq^)xVz;+~QVj*)*d0fET(qbB!^AEQdqtCsN!=`W==vIPE1~t`*A{DrBqDOOH z=~X221)&azuoeOBz%cMm0uZuZGAQgBgq>cckx5o5u;-5gb(kX%k4ms$Eh%eU$NNoJ zCl|PL<-mqF@Hjr8ujqB|m9P3>c*&Dx@r<9M^&@^B1bCP$JZjGCA1KyKcG%`(GP$uP z)T1QyoLIj8Qdd&!*R40}Xgr<;xUbp>$)a;=1h?yQNGrwSb`umiyr`1JdNXmqJuxgG z8)!pGR=7KC`KeZYz#p>NsJFyo+!9zhd5>yoPf7tKma9}mH>@ZJFUrLeWmm8=R=|tc zL&=+2&Zsqoq@MUV=t7yO_6e-t*}%}iJMkBMXQX}P-bAm!q!&hJNSPsTc$9i`Xl*_LAwpg)+4@WG=t?)Owemd0k!J(l4I71|N~e0x zlTv$`5cXs#Y`fJZIQ;Hvi9UI5i$4rm*<7e_8@W*1ZTot7#gA2&Vlo4G2N_LWULJzV z)qCm<*}6&1)rPQ3@(ktRWs`sh@@#r*njCuAiqIwI{o8I^N`;FE3C-0dE^)els|}3EI;B+l zWu8xzg29%U(S+THg5YH~V4yms@hOz zQOZ6Ki{3s<#|VRr7lf`{Jg|^En9J3r<2}hOqb8Ze9|h!_2qyr?X#$}dPpYkeA&}5g z4KC=^5$38vrb?NN?Z0mv*rI9E6I$XxE|^b2r@E<+oE#@ku$ zE8e#N?otKL#*UHC&c|ic0ODeJuh^-H>oAv_0RG5DT9?|Ik4i}3g7lnm*IP1&e6X6h zSDgm^Tw|}NM%)N9x!FQZq;Uv*cHJRQI{V4Ik4@^N6xdNfJO^&VepDxO4{!Nz|GGW1AnDOKTs8J+!4J&#kBvs$R-vz1n=30 z_WYUBWAnNXJds@*k6p(A;ALWF>hWw{!jKQ#Oi7q%B0rkqlq2xz5#I55|K#vb3tOA& z++J(PckbKJ$+zG=-Vj4X;rwW0_xeANrgyovCJ4OIipzeQ^CTy0tnoK0SoO|{cU!Y? zv#ameoktGnnxW;Odf1mi#1qNpcvkj~&o$dvAsdBhJrtKp^!KpTjW=Zeo{NfFKy!mYD>yF+Orz zdbFPXBKr@YuIrFT6B#|~q0C$1o zy%kLllYhcsbHF`6$3{FL3*3)B#8eh}J<-5x#fSgz24X8cpA+z6&h3j3PtTILF#lo0 ziVz>)Gx4cPJb?!f9kUV!VkoHKj?>IBQ?8Niy&XV|Ns=}g7*sS20!b5TUH$oLI^GH9 zqsdqJ$RK7ST7~9A$$uJAx(6oOMmPho$l>$V4Tk<_4dxF9wGJ|7Mz2KwU-24uGs@ib z$FTLfS@KsPLN@Lh6{zhT3mQ?FeRCGBVKMwm#1`3>O5zC zr=XDoezi%+7M{IA>OJth$1tMfiA?|HgpeXfB!xt^~e<}y(87B=l_c`@0Z0J z?mHE`-wM%P=steCd6va`GwpgRm??5CbTzXU=pZb}V()3qXTnGa5erFm<4Ob9DKd{Uz~=1lt_oMs3g zgjiRmH$B`F-THZQq^@T%>2%=8zx5Nb`?9zEjN5HnyCY7_6j%Q$DZ(7R_NzvhjlQ^H zz7I`ZalHTa;9!1hO}&FGO2E-M+_m(1oHk?R5b&SqK0ofy`A~%@m=4{$6R_w zLju*h3B2!t0Oed?!RZU0U{jUGw#n0WRcw2QL__`0be@dsi4T-tgZONK3a}}EDe3ZM z;NkW3f9bdGf&26y-?OGx!=B_@W#55ZUnM zVcQu+;KlKbfSmu)XxnAuW#@4c_>kHuc64J@JWx6i_eo6ctY>9LYvAZ+UDR@t72C`5 zy*bQp8aT7>rrkJi%$U}8qhO+w^XScjn?37}FZ?jGLyLV{z8IPkxXNYpU%KC!#_Azi zb=EzmnvpPaleQ|-iEOL`mA`)SmMJoKP?}H}y(o-(M>0~am5xD;)-yre(C^0@^b4!6 zU>IiT&(5w!a(R3|LEpI5)Oz*n8|JxR)UStUuWYq*2X&4r`J`T= z%~AG!5DS^`+M@MyfwwtP@WA4LwC+;^do5(|gdJJ!uGX0I=wzEk{*`xRpW-jRooZ-7 zqYlmIGjn6(tYi*NUZ}NcSiou{00w~l9b|AsU29^|>P%#YhWp6eV}rzoXArPbBc&xf-2S?l4NkQ!NaX$e>a6n8D1RnHo~VtVr&*#% z?te+!v2+@xN_!4uM)+J9_Rp27INX4WsH6oBuV=4Jqiy~^b+xkKC`bO*xIY|?q^lWh zrYn7V+D{Wzo$c)z=~OK5Uv+9T(gGRx_OmoZ%j0S|Yg0mGnIpA-E*DHa+@3?kZJ{sl ze=PkpQCV7Unx+z2SpQ@XmBi=}TxU)Fz;D96cFxz*Fg_+i%e;orahQKuNQ6G%V4|97 zj0lmR?CtWn+2AQ(`YJ=-CrWC2a*+2lui7AHTfx-3FS@EBc{ABqlpVp0_9L`xY9ZGJ zsGpJ}x+Iem!!}!uX;x8+{Z5#5j}6K6s&Q0+ih-Y{pQlI7#Yg0Mddu90gw^vnwSBt! zl+QB;Kc$uIe~F=k;If$Kr6804wc^W1n27mwtxB_JvmT40(GdDOFWfX*b*c(YUoBOx zJ4ERG{)8`@3UkunM`X2yNT^9y3&eE_6r)Ucm)#aB5=9VU-Lqa8HtkF8%GDNV7*C_& z*RnVz*yQ-v7h9Sg9bgY?6>}Rq%$NuH%^Xue!w^@fEM}lv7nAw>E&2Ez$%(G&XCvkq zWhjP%0U`(tDWGB>k#C47#U(W*3v{MSw?)5JzwfhS+e9U&k{140ZP1C$) zB)WnV4kwlEe_>Jb{GF8Jj8o%=1-4GDI8gbS#_tkb?WS8SL?pfbUj~GR{s8kE%yHhg-kTu3m%6ljb9CY(cP<%2W;dmC9=UU+eq7wlKP_u>oq)Zt@)G)`i#w`j;VFyI(TFSBmW-`y| zppZPQll!iU2c2V&YN;26^`4j2-$Uf!?|9j7HPBZx;JKP9mbU+U?jis3K6%{uU7b{~ zzwEi6y6g!$(ebC|MJXZqUF%;m{wf`TJE~G<4Jg?C*Ll$PE6;pAdkY91?X{l=vlZ;A zL~@`94!k0r@Me!IkQegxWXced7-_L^kIA5N*`z`{gCc&T5AemR@y}$7q5I=0E+%EA zX0IS*{UYxg=q73tOWA8o2R>2r)jT=e$?Htb$*iS0ud759ZIv>O6eY zR(lwYbT{AzL&7YfU#E>7W%X1bb(6dn+d!XS-^H?m}u!i~y&qTz30uhX#4 z2SXIv!Cm+|^3ltCHe%plGRz;e|68O5QKh)n#p!loqUvSeRx3)I2kZ4`(lq!ySt--H z0FNyc4NJo9i5-6Ys`C=V_u#Im#UpCn83J_h%zqlqT|H5!Q|4=reIv(X4`6vSjjxCK zf2Ls5sM29&^#m>Vhf)Nkeg>(Lb=$SoS|1@5P3cZ%X3gImylD6Wv||>m3n} zT6P1}p0-PToNuGh7?fbse}gm(mgSSRd&YjAOvaB?m}Y~P4C2k1Y};zfQF*Wov&g%~ zSyJ95U!47me2$P^pZu1zqoRe~6Oxbt_P|)I3cCh&ZyQz!CgoXLJYGfKEDvql;_NoH z%P{$ATxl;+U4$fvk3>GjMi|r2+nyLGJhv3MRzsg;=l&qF;m&}4+$}Jzc(tX~ays%B zv?2seyUGMCf_GZWk@jxk9<8i@^JW94GQpFUq@e|jQ%cl@ecc6g4Bbjg3OyWA=vCxI z$4d2{NZ!nUgZbtYe4W;`#$ox#Vuayp4T4E&9ygwMF9eAQCg1HXU*TCy1X*}`YST5h zt?*-%ZuS}mVmV5QYocDlC^WLnGZ!6Tr zYDd-}7|~l6rFIDBkfj@KbAnAI(=@-bc*OZvz+C8t^aA)dYS!+W(`v3ed+3Kxsug89o{CD44xJ%>Insrgr$C{wu!Xl!g z)I^(3mNBRxd))3 zS?O*1B$7G4p7UR$KQnGG&zfI8v1C2ORFKlYXI3ssk1u5YCjx8m+yuS~p)~J$iC18qbY5 znXXD}eq*G)2M@3p`)NOdxp@#0+3xtlVkg2_xykE+Toyt6zZDj~*|n)c5P{_cT-x`= zGROCAe|5hP;Gmx`%_HU@TM{1;Y0FLq?cVLRJ=M;(MbH8hngR{?Obcl`r(JYD{&qx} zIN-P4JjBT-);F}U;-}RGG~>kqo<>a zM5S2zbUZqA-&REOKd2@1cetwgc2*zhztmm$*av*dZscaYpP%@)LFE=q<P=P>1uB8uMcBBL$bmiVgK=;t@4pzg8RvP#PRK1&KmsOSGRV3Av* z2cUAi4TQm05td#9V{xK2mar4&A6x%;UqsXWz{#iypE|vky}JUD0UylI6Ylm#X`t0k zNnFJ|_bHKNO-gJ#=)ZKOVh~=P3Sfo6P{ST8g}i_UP zeED2-&NYv9z%r7bn@x;=tU7Ns$0gU#b7{n?<_{e5fFU-h`;(LV56Xtnb#|iAJkKTD z!Tb|b#eoQ8*0M)R|LW!t_3{4e!#CUYZ#55*iuwPI7_$iEEKWpeq$o4Bh^@a7Rl%e! z$YUq=b@29|HK|O!aUY*{mP%JyY=npswQVtd(&iN3nr2AJq^S0do^=*8kdBOWgcF_f z8$$m#A1vI-^d=i#4@Wo1FYz6L` z8T1WiIzBFkdy4i#PGCQS93EH{ERC!%QS!F3DMnpYur_CJlNJUP-A~MKpi}j zJxGWdM|OKf8FwnbWc#9#7p%hV>1Z^KH#*p?va+xL5?}q~OQ%tT+`4jJ)iE)C$2&CE z1o-v|p=|iJULXLI@4%PgA&CPh!z^(;Zk>gw5?=B&6amhsfycH%kiQ=v%2S zFVE~Xh(O|_sVWEUc5{@?N=9b?>=!gPnci1B%#5=m=_jPl7&N^%T&-$RFTnn0ziC`8 z*I~c*OMivVmH*FTxN7kh0}pV#qhDeFWj-3Ih;duemXXfQilShM4-pTHRhs)(QGHvjiCzXz_-Q?gC%WnEU_w7Nf?5cR8cEJAHm@x%out%=O_+ zCBFn(#-EJ4-;Qeq8MStTVT>xkGARr{xm$eih^G^s28Q?oOgqETXEEJ|iC3)@`SM^= zEnqO0;@+a;Y_e7!4i{`%r>NMg$z({l{AWfbpqIFW;#=t>59q|hpc6-9)vNa0>Cc5w z+@7=eflKr)6BO1UKUQFAFWylN?7!^S@t5{eOo@x>Uv`-4g=|Cb`KcAGSqYIhPNkuO%zrb45PHhKPbDwfb-hXwJty$j1_#H)1{{Q zwH{%7BWvZcvP{7|*JZlJW(@61)77Pk`2^jf+Y{jVYo6SRr8lWRWpCGG7bX_^Z%-a< zb{xWu*uYbW(s0g zq#7tcTw)FVcEJs;4=f;cc1>kwl@2q`+wJA;igbN6IH45hu_nbEIXVNK@`{duewt|t zHXm3fzbkeM$Oe$`x3E~C)I4U1hTq^y2U#%b;urwg9nBJzE8;XrquWC{{UV{aM@t_S z;~&XfNHFYm{fynGesN(sz@8NRKR3q0;kk*~q?=WrQVZF)nnk(puw_am$|Oz%;~AZi zWn^R}vyb8{Z~njDGRLCH18ZkrEZ)(xiT+T0?&*%xfIn8EZiX-oT5FLU}Y%35`)a?mLrmx1Mj847hy}Amlk7WuWb_z$_^&4ufc*% zKEDr9Eb3F>2FARXtWE^1su(g#79F?z(t%hTNu|y&t5Zlgr#~rvb;&u18M$CmeBDaw z6f8!F>7E9o%GIT*P$p=klk7f+#KreZGMB6v*``NtLF3|T)DnK~*UpO(`g4tue%ls^ zzB_yI62E{e?W4gDdgs%h8yrW6^ZzY5w^QF7U!AMn)oFJEEHq^mHG;wI683)U%RC5BXwcAF<*?bBh~1fHJr z^Ohk&ug7mH9i45t{m6D=R#nk1j??|a86a-wRKF;DEXtmIlj7(D2lctT2B;X_Q9?tPOf85X0%is%5goBq#nip^1$QRu| zH~*x%^X!a{bc<>x7CJR{^BH>xIorC4aH_qkH&-=X{OweMVR2lPU3ORH**Oq$SEPMg zl%K&W;}!j)wB*KTdppsO^~7oA+EpU-r2-o7?6pguYpT)9NxeyJ+IqjdPBdqnukk0G zR|Dkcd+E2?2wY9go@iXbH^VZQclINE6W(fRU-36na^9rioQ>eBtvPwjR`g07K{4to z-4x_SZA)>)T=dGoh3V)C2c+pw!EAie^ywxFeN2=Zwn{FRRcvW1cD?8H!98WY|1YQ4 zoir%EHE5~PPe_1=uh8AB=a_Yks;}weVF3l(kN94yBChWnE9SyfB~n0nbaE=w0y~K0 zZGBEpt*F+i7wfjMUfeJYFBn8jkJyKMxv#u}PdT-ctj-yYg@^7tKkc%uIXryRvFdtV zGGp+`wD;YQdGHQdKF2@nmbRN=F^@e!-V3O0LWw2lpO7x_)hF?oAbFt2hrsI?|u{PRe>K zstAK=bqzK9X$IqV@YnzY4#PNsLG|nUBff;U|IigwU;WmcKbWUtX{$Psc9dAQC1Eh) zymZ2%_DnqF`F9ZHif_H30JW{KTZ~^AyF+3$igx0}Zl5;YdAT~aM=3%w$mj5!o=*j z4Qh#eE?_2MCV%wUEZ7O3d2=ehD24N6HDDX1I5u>gm8hI2S2uq@(f76hEY!B z9?|i8qi1lHmS0Ak#_>Yh9RdZBpbqUq`wgdm^!JU}^b&t~H#smB*MhOpR!dWkH!z6d z`0(jf^P$a%+KBUk4Y_2-t9W@f(;uCLvz8l*(~nbskN9Sjfu0cT1u8_t=mW4EL}@9Nl=XBDTH z?Q2Bh1hwn)c)XLE*w4jcIARMV{e78!BNKpqrRmui7!3dS!H`1bd-HGOk!~%u#6l7O z3Keo?y)H_FX$iZ&k>4atytgEJD0@fXn3hiGku93_0FG-`KCk^c+tT<1W3Ty;_wQ{U z7tfHeJMfEt9z{k{GbUfXx@IclQx7UNPlcU?+2FgRyh`Hg`+U{s|7rlABX1*LsYHJ#}4Y+KCBj(>GYqFVZY7<=omD4*zkcxgci36YRcK$Mo2E@4?3 z=>|cTZUjkb=~}vE0TBu5?rs*OL8MzGq>Oi#&-eGnb-neEUbD|UGjqUmLq!0+67HQz>x)nPI%jP zI%V&@2>_#Xg(FUyB~&OAbLs22<;@AEB(n>}=Z?lv98iQ6%x&w|VmdN;a~Z`Fn+V2# z6N9_Z1dr$>;K<7PrwLQkZ)N9pPQZm#F*DnLAVD*nHLinlBL$x=eC;DM;E7d&!<6)k zL>M3;9f+!oE>xRgX3C}|{Uti__0mVic@-QJcO##Xa$C{Jhc(X#@MD8%8j<)8%BV(J zknS>R|4oa{$1#x)1=DV$r~S*~WSf9^yk~@Bv3`NDYNFu70`x?XUQ->x7I)(nZf6wQ z_cZbx!dZ>qgGPP|xNUF+8zH0fYoFDxzU*H6$itiqx1JVx?C9DMm^o^--6~qoPZ(Zi zU0XfC2)J7?VF0A0`#6}?z7yp4E@RY#x0VrY3Rvo6Gcr6LncELYTv;(7aKHQO5%J3f7-rUr#KUBi)z_0z}txYZ|pl zL0!q`p5o=?*28V`zoXAm3|=FFV@$!gdI$2Wp{e&~?xB#Py0Hv0(YYOM-UPCgr;_t8H#z?7NqLlauP z-~WS&r{(e{6;o0m6Em=zqqM$LBu(f{!#M<0#ClA=Jm&Xox{XiEjyP`+;Og;I!*u(- z$+0Yq>A=rF@zp%OEegECLel1A^Gil*$ZAQ^d#i4bZ~Ch2t9-~Mc4+o~&a=H`mb>JZ zO-DJeX5{kgpq(;Qw8YCLtknj)4k(?d zS5~&L9Q`X(*d_J2UHfV#)HywuZIHWn-e@yXb6}U%Ej@s8P~%JRh@jLEXVEwKJ3`i= zRc1SJdVLR$Lf#gOuHf7|?^7!BtcJ>xJ*gkl4Cd+%xrrOs>=Q`Qv6x0YV>Rw%DQdHv zF@HYN$9!{f*~|dkI_Yya|IYgwrPo1UD{XnQHgAPKV=c3MuIl!neH!$&kp>2Q@4}u_ zye|z8RZ094UioqfWmV3;qbc7sNqeA%Xf&9|<~Ptc8=TxvW({(H`YWkbAsLe_j(L(^ zfS>e$^Dc9ga&PpdG9^g?86z@@x&^UpQdT3g90wVoIE5mxS>4jX>*~(28sUNO%td{T zx1njBFT}13wMz6H=TJ;zRfn-D;X6b($|WQSNGYRBW^Q4T zZT4SI6w61n1bLUfGCk#RbokIdW)oCoHH1VnPjGekoSM_53K52zr*qi9OJ8^MP?s8sEhqtnp5-IHGIHwb9wu3l z2iwO>^;@SF=nl08Qp#usvrOO+Kr$lc+zb2yAE5N7ZWB2>zJ$yPXrVo{I<`U!`>15nqy zo8n`@ z@I(N351-n5TqGd2_AEqASRXaGii{ek=d!$^CY+GNh_j`(|6?tUpHV{c3htrq=o5$* zH&1}L2?y(sHdXhSxFPBcQ6(T=lW*N|P(IK`EduFVb*Wq|uGxt`$GM zNpD%RM@XmN=+sE2m@-A0zuP0pv0mmt$9b@sP%csrWD(XYWJogVrK1S`dNuAE|I3r_i0^3 z8-1qMb)EE#4)l_@IBMg<7&ZvGUlq6cfGLA6QvKrY;R}@N) zz;q^7h`7&rnHrIyy!Mortq_*}3M_J~`aeR;oK*$#TJ0I|g!Xo8CH#zTCQ|M`^)x8M z9FiKlSs^2n2*qFUGhO3&nn&Hy56p5BiLJD>fpSxtdh7|y;KL;4c3<;F+SOS+yj2hD z+o$e(*ZriXp5K+H`-4+jeH1l{Wwfv5w&{b5cLuJJNdg6w`R&2+khJ@WSw_m2sjnC{ zo`z_$(!>;Jy$oWa+;MaM_tfYob>F=>CifIK4^u!}Utu?`x>HdRBstR7;f}#QXzTh` zqV`aq37pr@;`V&zZsM!dyt!d#YEQGo$ZtScS?u>QnlSyVuBMXh5f`^yU3Al?H}IyD z8hwcC6aB64H#z=b1>I?YR@8N7pw?=Vew77vzWT(KGgeIeP-sepx?!h#{XhrR>&Ff0 zpJ4MyZ5IvhcUOjjj!%8LotoUDlzqaLl;ed8rf;R+9qUjyVACWKOLfclY^oU0-Bin6 zdN0txqfW4cNBq%At9R!66lsJ!*Nh;NK6&4$*0O-(148}$YB?ZtF;;~*v}*|>g(?i zh6xvbxC6>X_Xh+0FWX&qqO3t+WAr5%@nV)~$KHz6a-3tQtG?EN+SqnGn{Yw-7sgYb zlisR&D3^slZ{&ST>$=5F`R9(I$&;w#d&^P6QdDlRtNt_LU1Qt((z;2#gl>s6ACW&o zzW^n_>%pOSyA}Ck^P|_~xmfT=5v7^gAnuOz-hvM8C2pFXQ7RBjk_X3>50PaB=Zzugh>l(&>`VPiZp!4_wO`&dquI z4}AqqL^(|?SxWT_VSji0rdh-sSKVdyh8+`L z5pc#3z9{2N1|19$r!=n-Q}$7=OTo7ADrGy=kb01T+BM#5g98N_CaPM@@VBH5{|Hh>TXhuVl%)e40*0L;>wbJl?7W>{NWb%Wc z>+&Fn^my*|u=|lc5(<;th6S%h3{XXM`(3N_?&al8%HGQw)-OIV7cKX6ujaVz4Rk4c z3taF$DmM6*ru!R6Vv0uS7=1JP14lNCpQ?>KPjq(Xyrg&&P**?2+5buj5_V~MXAdR# zt!26C*3L02c9Ap+?s#zu?$j)R{s)MrGmgjW3-!nF7nRHePtCv(6fb9yTMbAm=+gTE zb9CE!;eXnLs_kGAT~USwBZi1-(zizeow;wTNcK|wsZ+2Ak3V-?C?9oyb2XwbZ0=4+ z^)d%mPPITYwCJE4rNiep8X=?6*L?6{k*tYuBJ#&|-NiH?S2Jhr@*)`u1!2_WSc^rG`zPGao>AdSk=$k^`@>m4c61DvhLZv-#$xpP{{ZK zHf`9d+^7kaxn+EN?I{vYxP%5WN2!li>R9p8pn(ryap0Ia(t;4Mu*SM%MA+zh-DQ?) z(#HO&=a-uP23ptVpW}AB0t5c6*9qD36$NL(-{7Rxfb=-*wRtM*E0?^JcKPt11Exa= zM9JM#-BqcI{weVH6Ymh%xMj(4!o6};5_bd8fN=FMDA-dcB4f{=!&PXh*DYXDNZ+KDB6|t65?cqUwrA=PBJ@j$gT(;_SuSH(wXlC( zwRSqxLnt0Qxda1lUA&;Xj(9;QqtJ9-Mx+-m6Y-V1b~1JQ={FX!a`W^9yE;zAE+3Uf zcRY_ofqM-|irKKsOt-FTE0{joVv?fj?9Bl5CnP2XBUBB@GsTe|_Gc{2?se7QO<{Z8 zW~AT{37n~n7M#symq>^pwfQ^@;#7JHEsi(?}05{}ocd{vL&>6?b| zO%L)xYYX>ZU+(F>e#H#W+|(xk?V3d0$V~YwTE_yT%*|cMU@LNDn2kkp^3IUew2Bdx zhGJM<|Ga1156q?BdA4Jz$vf~WYJL6R0s}Th8Vaffk?k==za8KkMSjCHMQCK6Hk%;$ z=qaO&8wGLT;TD_3qKIS*^J8smeefp*V2W2$FgEU-$<+307QCDAqK##J_a)11?<`Gu zJ~JMs@A$R@_{K~heJYb+U>(j_nzgVb(}^@JAZ&x_|B&gNRXGS{Q~`0>tVHhBF^^Dv z(9>$AIy!S5XD}BWWLdbY@4s{y zN*fTKOl6&WlPGY&2~XX;pBUS;t(JxfU`NDf$pyF5^t9r+bMRs=1$d#h+E3(b%YtIn z1CAZC8ZWx-skdwy)pHokBRJ`wpN$#i-r8UKomr%SV0y`w{1Sbrs zgyNC!5J}YrDV1LaKVR3w^xn^!7Qcr5ZZy6B-*?mahyi}#t_nM#5^>=3C60U^QnfM1A{4C{HDPAn!@7nId`(@eEk930{!O5c*3iW>XL2pN}`(jTmih`Y&ct7RT?r}*jXZ(Vi0$y5*ZKzF_@BKFGs zvH6}I4H9(rHT{n9ta8m|cL#ONu(sOgvFFJ~>sD$ndnwsEf`1f{)l@ zEh9&sa>Dn$D*iSQ-x*998B1;$kf--cCSKpQ`Awnii;ElxA7mJN8AA~k`=A?xoP@|ayj0A6xxE%YMh3SoCG9lq-K4K%b%uJjsY_>EVlam( z-H43TnX-5GXM{Gvz8PjZs|+9-t9h1|80IkNoct&tWHV=0EXH`$!;iTSUC{+Tqaip} z%$-fvD>Z=S#<;N06Y`*^8!zG323WI#G)(ND@$>IPeaZe)|#p=RZ66 zV$HBe#v^bz)r2m#d`fb=9^al5Skn=xn}56iBJQo?F|uG}c#rKs&F0Q|s?_CtfX3UQ z3^Vv;eQw5E4LAP3j&xM_HYJTAyf_{KQM{f}Uv3{WVe?76n?Slf{f)u!O~%fs>dc(+ zx&p%rbrKi&WU}vbgS0;@=hy?T^w<$QYhM@}*RTFNLD0rZVtwyycI!bu7vdpA>PPIs zGL%EUW@hT!`)(Q0D(BGxa93kCcuxc|S+XS6JUBgQC@7$KEUxqt6Ai1-$M4BpiHCRw za@+Y*-j6z;~OM4Z2f{mgjNC!FQLzd9M zN-iMoVCf>pQRhpU5$cE=v1g#a{H4iD>=t@-G`9ST4;_Bf-0VB#6{n&(IqYu=q z@@?VueJ;%-W9v@-LuoUJNHMrR;h^d~aw~1?5O>MTI|AV(LY-9JvJ3Oe&PBwwN!!^+ zp6DAJXuWj87(SiHc((C{W8R39!1WU*wf9V|5RJOcBS{@f^DS3p6h%`&nJ*{Awdn1; zy~L&FZD&^D4-#_+Ea@ldWT8uZ%Ei!K$+?tjxS-2VjXD&Pm~8$(t@P9$9`cKrOd4HX zs~2R?LIVHr^mPaSQh%{@rC*(4J^CglLi#!k{~&)K<}L16Q;lx4<=+8Ei^Bz-KW5>s z>a?30kH9Xb!@DhF6E(&&Q4rXLO{@Nk*?v>wJM^${;;;!{=tETFjNz5C+X(Y{-&B_T zZim2!|CNj81fCWeXy^is0ApKuBWU~_4fMXNRPTs-)n6J!bxoIpU>^o5*rwpl8NRz< z3J+Kvv%M7_@tI}QnSraPt628i*Uz-&L%QmZ)DRqVz@y)gGS>DD9=E^lPEhR6Qg02I zTw7I#n%fS!FHqEWIJPea);MD3Du;+HwWrk(`eAL#wO~~<&p${C5yrtVViN&?{hGbv zLYFFek!O-@s}r=8JA)A<;-0Q?l5fn4I%`=j*K(?%iVE<1d~a8dado~1R|EQrn_ zWmwPOH#KBLKHKHoxrCJzd^s;Xywjj+@y^L-)DkxKmINHVd;eR>W;~Y?Jl`;cKVPM_ z+C(ECUL@k_tPnOnKe}bvEU>|KspkuhAG885S0b0Z__r}r@%-?tZ?AGbkJ_9!&%cd9 z*s@=RZ~6mbU+3XzWkzHpnesLKUxsST0(N=TiW!1_P{@*5Y_h(}iSPY_iJ^h=fSQJ@5xuS190P)hEK64rve{G&=*`|25RJc9LdOv-)qtQ} z>!4w3347cUB%#18KzLWeJcoP53&?$NHV{vFXJ?M7hVR!|k^r$>?tefw*4=!<;P!3A z-6lz@j_&SphX4HW3d6bkcJ|hB87vzu*)vYlv)+1l;}U&{;&=jR0GuxrHC?_ySEn06 z?^xD!DJqH1qMZV97ANU;LH*jz9}IBzAiC9sJ1j>cRW7nUk1+QXFi` zAQw;ag^s;dgRwpN;h=GGgQOQE1Eg~f%o|Ha1oSGm7?-{s_j1ie}CxK<(MH~4_}XOWBas0fD> zwRzd5?ET;&5je48wSXzk4>S9PGk^}Rpu_)oqD;>jHjpORrqG^quOG3UN8k2iyZ$`~ z&h+vunbFrx4;*0nr4052se|MbdrssDdf0|KB@Z=9R6~Je-JE9<6s*&Afc{}Wg4J-% zFFrtm!znp?tFj<|Gvz>_>`@Z4YCVS3{CmR~#s-JxLT z@~)lAeaFPo^Ks!{d5^E?Hcu!m=MoSC#cSa7>h5V{6m;zX7Iw#fLiD8;xp zsq42v5eMk`ky;FQcHm}T^L|S(%rMN&!ZgYGXLv!O4Js2Zn$~iUNi!ybQ%GzCrwE~qsJEopjt5#L zSO*NQ{hKzJ${YF&yrA&O2v$VW+x>;$l_HwmnTzpuL_ZYDd3?r9D?^=d;wZn;x}^@@ zr%;(v&5F<2j8fXQgq`;1Sy1da$B(|Xp4{JVNGakh(F5*9KY{s{{YnvDAn=Y`6DC<$ zQccx?m^OS4uAyryDB@DJMNm=j7ECKrIj*!|<24peAx}K<4?R>boBT6NuCtak-H>P1 zU~^VSUYyeSvp8ll%8X*$IavVEWcq`?r*ZhegkekQ9?|bfuo z*K!>y#AeO7$+d*gZYL+d@0IY>RY`Fk+XC&*FpA(bw;%w4wHD?x4Skad5Wz)CaQsf= zf;3iSyS)uxO$c%P4|vac(priIsID0G+|n64k2(C+j@1lFW6O;mjffMlKFUD zR9W^|4|RO@4pXy9Ds~FIi4d#wtf0E{h+!B>^QuvSrW~g@1$Dd-#0}m~xipx6a|vyk zDqV;HlU`cs&tF(6fM$k8Zu^wO5)3>rr)NsjsKfk`+m56;1#6P!OL$U15m^f$0iHp* zD1)IN-eMS@a*%thKi^FQZsst#&&s`nn+Bd3XLpvPyxlO+XemUm#PtQ}?$9Itpv39l z_EY;`Fe6N_r&46WY4aQsF0j|12Fg)`@)%|;@VF-n;1Qx({1gnn!5)B_=n%K_(Eb^% z0>B3nAMGtPfU8~t_w2n^mJEan{!bvDkD$Sgj%dnLxR-8DFMZ^*eGO)N(br6xwX)}8 zl7Vm~NdVjim^4^Pf~r(1w2sukV4kTq!MoUC?kVt*K^k{Pb=nYOdLKQMHFz%3zz)B( zxETtBmu5^4Hbj5T_g6P5{_*oZYJWReO4!#gihpN|kS4wKu-(AN#}w#0bwJ_ZD4lZ# zXhlB&H$?C~%R4a`>~2sG{mHo#MI0S9g8K?a9_hz|!vY^)dze)=ZBnoi>-$`Lfc}~t zz7`w8Qf=^8SIyKW;`bL-Xj_y7;?;X&Y1XlCFP-qSI$s!XRrLdO( z_Xd>J*$IAWxAg&Q;rlg_Y?1MbTm^ncsNijllP2Kv(o1I!U7jjDGb-46i6Y z6SltL|K#0~qC_uv4qBzBJurrwyj}3KD&Bhzb~$Gp3MR$7nG`ajS@&HkFfRouU;Qdk z;2RSI8FEG^^oGO)mX8zsb`(d4z7TMp(RL&V%w?fVK&)&4W>;z)n$(oFHDPR6la-3u zRXWg&A#F6u{RBv?Z0dg7ffqpRkjA@+t$jyQnc*DxC_~Stq6e_%ip<@+UJz(=NO@o?} zPovI)qTelEfbuVz!*U3N(|Tq7Y7qi-cQv@&yXN8#1U^es3F)GGACba`d$erAh0=MfoLbpY*4c=AQ+Jt3zkbiM4JlBK93JqYive} zdDdO~^Nf!-Ps}OHwQOT57`aY9aYY#%UUjurMA}j;57SW`3$I(ZtSfI@lg&(q|1@#< zYM!rj#_afzXewlym5<`zhIT1R-jUgn_Un|~nSO|1qTBsrVL5P1^o$SX%+uyH!G!$v7*Zw|IPb_U*qTQNn;O89O z?C4zDnQ^&$=S;uGg>g}7*6(JS$&BS%kpceM>`K+1H&gIB^xJ-Yc$~4T9Op#VcSe}$ zI?VqrZtTfmF^3nK!F*4R=d#i-ul_A^DPW0$`OQ7ZjkuXlTCeu4Rv+EW>q8mRtX(P% zUar>u)>A{^xOwtBRGKJeeR6^S*f>W+u)#tmXuf14k<@?6jbp4uMJ*s2<8iiU+ zo~2LrKSBF7-ENId@{#*c^?NTTMKGFkWtexVzUALVB}0Go%FV^lE5Gkg6oZfFxccS| zR>^ezPZ{Hq+I3}eXRxX}m5m)!pS0dwSxKkowRYQnEwpgTW-HZ~<9DQjUj)4=)pvyn zr{`;xn9hzMOAc3oyVouV4%n!PONA2C7w3!&-)@bFwt|?+!FiJH-ts<4X+qyL!Ltq) zmhZ)z=6%!Nd$wM4CwbnJ7vC7&PNXFTh78_n687(5F)o>2Cl3b8e&q_j%pCYnS+wB7 z-Q1U?F$JSEZjALu|Mfj!i7yF(z*17U6ECIO2N0b40KyClUy}Fgl57S)wCy8jKGlfd zs!vKdZ)|kp-8r1_=5X0)>TAr*La=vNKJ`juZi%%0`d5e^o1obEn598r$~K;@N#HNr zy-ish01oYjpS$XmJvBD|nZ%eOI$mq$00HaA)V=;aotXyZS_a2yW+!;grStS#L6VFH zTwW)8gcC10TnuLwm;a$h&1+rHGH@ZA`SizG{1zH*Stg)me7)vF!iw}B5NTUyDbxDD zv|4`kT{5VFT=MiAP&O<%`hRSJu%1YPza2-gtaAU=R?1>`&CHD-s+iK(F%u zZwtijlEE)Y1<&y@SRu>-fJ0&wFHpMH7);U-VN3D9+&#)Qw|X^~bn(}zMEksUdwjTM z?q*qmL8Jx^4KnxKZNDO#(F4@jWD&2*pNnsg%kEkDMhhT8THFL=zw|$UKK=cF_U`{n zt6Xc(heSd#1SvzNNwa60Ie%~FzuuMqH~zjC)^~Sk5ZK}-6H7FbbQ6!q%QunMCk|to z>hsR*GmbRWOZuKv;hHRy{Wt`~47&XRkVou^RPNE!AHNea{-+VQ%l!YXc{o8-7yvHwqw;e5h^-kBJO*oTU`A%Bhy8eJnUv%q!=@;=^V%Czx%y!xd{$R% zgN!g|$-(H)h0dX;^XkR!K5Sg4q7s>zw@AyiBr*i;4VbBO+Z2Y#L5W#vOv%cMenZe* zA-C~tK+v_;8h-GYa~6fN)vz+>(w66a|8^4`9@Q`{!Q%rT|8nsa{$0Qf-NA-2Xf!E-xrG|lLZJ2Gu`pG#wE2^Az)VG9Gu1N@4xsF5)6cge; zo=}#D#M({M2I>ZObw9l;`(aeMd739dS97oVO;XE4^{ofSk@p))irua1G#r?AGdm*} z6!er!d25m%GpTRgHXeeceJXZ0#u?O3_~?pJP7#Ftb?Q0SealzSDKEd7RcT`zo2(hc zaVOI*ZL6(4;{*O`YOX|=Z;EaX0kRwG{7>F0&azE-RVebwKU{cT&S6P4SZdqc$edn< z(BG;lm{M5JDBM=+BvlSA*lw;(8jJWmMb3X4ciMk$#IS|YVV~tQL-al=(IaueS7c-S zw2d;00#+P@BnC^r4J;5&?VD${{8q|J;bV)l-IO9q+q{v}44mJ`#j3o>#=gFDcJ}jP zBGPDc!T&dvN}BBzeQlPijvtnIth(9|#AIOQv8PYPFFotTOw}36K7zs9Xkb{K6`_?f zn0uEnjZ)utKgEf!yIQ27&*OEXi|ie|L@yn)u686!PKet4`P^#5`%1>pzyXn6yL5_v34LFr7ju_qBJr;WYKeesphIv{&Xw<#03ze!8GKXo zwu$Z($M?A_ccIUmnTtwggDkC41#{K$&jSuLb|yVTipzRs#4rN-$NvR1Il2(Cmc zfhl(V6V~ruD7LM_LzZ--J30}wHgJKUlm|@&8AOGxkriKNxm$F;r_poAqW`T>)cHP_ zszql+n8hQgclxzsj%oXV%jF^524TPP+=F+spg@N}aOC99>hm3UzD=s`uhWqg12JC{ zA1vKTEM}{_Vi6t7&3-r^yhRn&r10>GzaW?W8T~hVYlz=thYFQHZZeA`)3v9442Bp) zZ=33~vvco9&5z}L2qzSB(sDhNImF|sBRAk5W{OlPdeRDs5xNs2;Dh(_)4;dY`l(2k zrKORl)?Jvvv%6hNTh<9m8W=Gb(M~gAIJx(2xpQdZnJRp$359;!2VzRvedW$*{m4k1 zAWV!Sz*|IFy0@ccJmFTCgx8+kU=Fh z{IegWv7S~Q)kS@Jqsx*)YFokE^4SxTu)QB-FnQH&BiU?ZF;K)$$TMx)I`}Y0#kLU= z@4E7F0)mY(j(75;FG>t$5TKyxL{>B={JvCtSj2jYB;+%O&ciAw{o5cV$oo?2#fs^$ z2d9`vJdXpcQTmhWG9%TO_wEFY8W3Kja*aF)aDA}Vp|Yqy8J`v(-Nj0!^KwcJa+$LF zZ!x`$t4kF_y!QlVFYFxO8qXHw3;tkVCs%vlWD^F|ZJIlWAs_nOREkS}z=4&pU8RE> zeai&6<=4}fSQ=5j-1Oz~G;PyvuQF~0n1s-shha{|%7>A@?%#b>i$PCWzVG85UMgPC zZcyVzXd+3%dsgm=6&!6L8m<=xjV(~v1L3)@^251BHCvTOQ^~@n>aT&VraHbJ& zeau5xV~8K>pW?RTz^7NH-FzKoxL6L|2DE*LaLjEBm(t5(PjNGUeKNt?{gW%P{Oq0a zuMVHGzKX$HnaAn9{~GuSxg4o0aWh#OW$YQ~yM+{Mp&K&R?Qp>)TvOW)C7Ra`>zG?F zLd3~e_&5(A1a#5bI@vZhQ;ZLJRSc99wtI5fI$gUp2#sQ{cy7oK=>%L|UT*8KFHq(3 zF9l3mN}SruhnJ>#T)PQtQah!;aj({S0WmMZtt%YDnhcmMcz)`u84f~|&RM67rIPWx z3S+QV|B+fN-8OT|Vj_X>+IhDv{(6GJ5nih9VoNPt0XLvIs^u)lWl$OgL5Y54Ii(g*H{`LG zLeEfpwK83uQ1-~M9BX=E$f3os zZ@*St{!~B;FYlkfce=F+t#U$5T)yUxt?4-QxF%K?g5kMspD*U^%J?3%E~gt2(9}e9X24$01HMsAc#p+du4$SHr@%R0R9;O?-fVi8{8U8x{6=#I8 zI3_GdSX1~@^d7*De#3BlLC!dKeNM`I>Q~siVg2X~3k`erx)I}0e(ED(XjEHVFYtRQ z?be$)wiME<6>*M7ZO#)JZCL#PDB7*Ai(eEzRN=zJOLwkzrnXZICI@|Ez)*({lpz99 zq}}%nI9h6CX;%E$#pc-{{(=wk*E5Cx&{<<;v&3>Cf{6A=Ea;Uy+#uD3G)H`MNXmfF zvskW5X|C7v8^h;hWn=FTR=>_pa6X}We)#a~`zpOr!ghnkOaQ24nFl(hE^o07jMi)D zCw#Tnzv2d6tAE24b-kS`QtC_nU%Kf;&>U6E*AtHO9!&Gq-koJ`?n%i%b)r}PL&Vz+ zI5Rzzn9tU_y^ARj$Dl@4#_p?Wm=>%WgE+B?jGr7rC*d^U>vTG_I3CF&rldIgOF*!D z7SA(2Kj)rfdlycdhvZ0gQ$S}?FgNk7AhSYc0sB*_>r<@2*D(nqre@6fp_rnJFh_;W zfV0q9YUa8^XMa3?21Z(6Cr{*+@O2ocgk0OAc(m(%8`kzNO&MEiz|juMYpvwAaT%-M zTjj3ta2RgD%OOvA#&K2AWR2xi_FbuoKl+Co$O3u4hf#=fjI-KKdVh0F?}?qJ7K#{2 zF9CdWT4vX?SJxAfxQkhugl+X5Rk(|`_nDgxrkU?jn{-pgh7=4xNU ziMb$Pc!7VH5lvZ3kU&66AvQnrIcS-&?ezfThckVE;?!p}pQH+9wJ_FN&6n;pilPFV zVDg<~n-#?Uyyo<@E#IkR=n0%W$Anfr{dKX*epCQjNtVBNk7G*|!;|tsJ2B-KTqEyS zG2fKL?VHAjs29z$B)o%F_R{rfEeQ?O`&lSf4o7`N7)sfnA1vdw&IGlM!vYLG&_Hat zY=tt1Zim>eRv#=c?fBuX7YFg*U+_*UC-t5SQ({PGalm323+LCBB(Ed}%S`%rKOU38 zHHbg;%qeN?zV{p+<+E=P1-jRSR?74ny{(ihhwtKOAm0_HP4`jiWp98-l+3-OH0GGG z-{=N?DU}Z)JH)BqQXAvV%2CvHPQ5$5qHP!=n@j2YroL`67E|(0K$oz!dN>RJ%J)oR z8i+CVFGeD1*P=78J1zS`*P`N2FV5P-p4n88g-wi^*|$HExZBHO4=LSgI*HGV%SVcg zsHgB?iZG10{QCYKWvW(h!y3@=UCI7-TuBChD#iJBqH_ZsJ-n|-eI5f%E^LPrMc4E6 zbPoQr7~sEA<-pFbJD91qvpkCW&cyX(LW)*BVpsXa7@XxUc(05;Gu9%!bX~tO8r)uK zrKjt^#Vje0cMWyz+kPBi;KR4$Bq7)*%~uS;$a;N+sgNH*J=|xD+~8TFfu4_f5RaZGC~^n~!&_Lej+l`1ZIW@XRdeCZ zS*FX?W5^62Txj@obsL9-f0%3r0_g6(qjmV1DEkoQjKrq69i-43Y2+d?+%D||$4X@{ ze-QB2>0?KC(OQBc^<^gB=35NY&JhvGiM3RcszSCjzV2@3lxSspyM)$Ri({D$dt;0%`~p95HBHD2vM> z=NT-7x3u9A`re88D;ii)Y~rT^m+z?&%MAQ;`VdzN(|Br#<*&rht_sc=KMW<8ZX&fZ znWG_ReB=v4LT;ibHjh${STPYF3j`ISsi{pfZ*@#F@k8n(!>AKN*H{DdzXe>f*~X^_ zEb6NTHB6G@wTN>2U~MY%Ov{wLgLIobx7GFbiPcR9w_fKE4wt&%` za{7RKj%{bJKd`T}^0)gmmuZpkcbnA6aT zN^|#d{un76gY^;%5FnxVEKl@UIoX<`TQHpVOna!jPj-1TrXXuGx+M(h}1sl8Cpm3f`gu0Ay$5Op52ZX4i${44E2f2J- z9c>n263G4SbIAF(F))@I`$%W6fdG?akFiaWhZwQEvQ61;*s50oQSPj|_pt*-L;D9a zHMnTS&sUYrKZ1{Tr_?ZvgpD&zX#636da&rD%CF8)vn*3vU#ztDiHH0)YP75u8{4H$ z(&7A+ADwQV5P#0YIO%QGmV&I7 zKYIld;0!E*;&yGJF&`AXchHqqIxxW{xrz9Lk!vS{a>5SC|Coot64$6e2tg@!pb>F; z+mus@T>~rkn)u6{h<8x_HxNO>ijQ|Ud0PYe)G_7tWU#C_)oC89=(Yd!FM}#p<9`t) z>#o3US?cA4^xQLb&1CBG`|?i6x8}7$&x3Bw&XXR#3l$IQb=uMUmgR6(iBU;JmM-fi-TT2krWTi}uCp z2Xyvj0V&m?KBbm2Ax>3JmBA!gVUvZe(1lSUM9sesS!4Z%;gr_3XaKwghS)Kp1%MPD zT&t_!)MUQhAi>vOPdD?lpXI?HsKv8u%HlQ%%S0iDRM%CuK$;Ds_LOx1EM^w+hY;kZ zhCKbdP-Ss+&Zsg(74j#8guF%K=1fn^;7-KWjj+$ks7B{=%<+TV3YFBc1}HmI3fe*u zh`8KmJTxABtpl9Ja9d6^@gW!F>OuK}--NGmho|3`zQd;dGjWsu7>1Q3I#Q5#K0}=G zp+R?b0WRyCsS>CI6oW3E;oPi-b|a1ycS^k6p4R7ZjH&p?O)3SfX)v=m1~kWsxOtai zPN|)HNc;zIqtg(9UwMZIe(Y!uRbbpn$+iDlp@RAIX!&lHft5Q=+$W{oH=Ka-C(IzV)f zQQL&I_pDwanx&)Yg01C}X`QnD&GgJuh4u-0Kg4ntgjMor$hY|(#5}T;7nG6-Ji`R3 zA|opa9Sn%V++%=cyM6Wkv}H&Pj3#$D>aD{M&GIeZUAnSkHKjHvK#?GkRk`lz5{pUO~`N()1d z?_g31!?n})Bnp)gvVtOVLXwVGQ{fOWcboFmxHavfWH_pcUoHk%SRZLf zlY40C^&i>}FQL|Jq@^=V=yA zr8wO#0d)1n@lmppsF^r^qMp9{s^8rMhn5Athxt+F!vGwYC#3}%y9_Xy8$;$jRM+-n!4IiLXqvm6 zcFayc$mREa{*RPkrh>pp==cD_F)?|W0;Dg=ij#a8`eGxO2YDGaw$LKn3eQ?O=aA-B zNC_?mQ5_b#FYj33Bwbji7C7x{kR)%znx7gc6ca;Qhb1$?yq6iE9ASBO(9Y12q5r4A^qLF)G~&TUNlq5FHVp9kjeS%2f!r>^3DB>BGJ zo0l60grcvL9Yj`>n>c3qcOWg0Scc;vU{<`*6g{jrA|rgzKR8Z5!*DYdKFR|U+{l>2 z2!(Covf2oFCr-_Uk>Q^q>#AV7Ap4o7$V}5^HT*FQ$1Sm5h94d5aJ2O{0jn^9-g+L| ziY~sV7~d(~=5dU2qxl)qr2~X*n?bo5hn#ygV;n6-Qq#6vx~c$A9Ihp!m(aRhi97CO zNZiQmf^nZS9!O-p6miL}hES$6iX(wKSs-ZPqOuX{E@P^#dx%@7WmTaW^|({z=K-Ob z^!@65#Tc8y1q-+zQ&$&IP13C zpGG`Ab&*e`7arz}mO8?IxQIz}9VDEbNYU7`O62@W7W?~>F4P?>H98Ba(jI6nGH0V~ z=}>C|NqG_5(4320@f-C3qKd-^wSkcQZ*6@CSQA0l?uI~6 zFo0OV&=n9x0fo@P2ukl3Kzi@Jqd*WTN|ho-kSd@e9SluCl-_&qy-JsUC+PRz=lkzP zOqkux?97=n=e*}VGn){+4;g7({ zGOQuS6PUYo7qsr|fUegcjzENM%08#9w8Uf3|8n^se!7%4*NKoFJK{yp=3i`7ammf2 zOqYwK64t8x9X1ZD6Xti{bdd#h^GgbV*%%_@1GA}9{>&~z1NuCE6BDthaKV(n7y1_u z;R6<+^QduLH)LpSD}rNPD9Yt(a{ErmNat#hL8IjP-Q^T9&x^z3Q)ve;@7V`RM%=1k zC)@H1O_=7sAMFCanYCH`w?E*Xg?Z$cdaKta!5p_sa)HO2Bmkd7S8?N+yHEER#Aiq3 z+IQ*Wx3aERV$Oi_Q|Lle2}v7%u~7MVt%7F!Wi>OpLG={mR6-orrbnRihzG@`4i>+> zN0GL6jNvHx&;0E|X8iV_eh9ANy9m^Cc7fajxOmBqv^VgIgr&D>2!IN^$LCuTUB447 zhl)4=2afd>LrT}!MJ|FH+D41wNZOf)XEA@rnlVAgAOfW%n*Z&i%SVoKn(+<7B&&hm zjTZd4&x^BMF4wAMltx0M(m&YnhDisaBEI?wmdW1^X2~Q5G^0(h&@QsEV0A$&WMda$ zJG6>uueDxiVPKZbXezMwii-!z*g;eF(M3yn5aLju*TTBEN{Q;7PRuTlt47}K>P6S0XrY6sCBdc<| zaBU@lWAh%=Sa7U>YQ!uly9d$?gal(WiNp6v$2aOx zzq1Q-!Ca6ERyM~r8T#>(S-i1J#E&A-TFpT&X(ZM8?)z&84-XlA^bqv+IybAsf-Dd?W*D2>!ln zs`q&p=DUJ`u7N;WeO;4?y0^4u9e=+{SO|)S5lLn|*h!OSj6~Y%lp)&>WYZ>0_inNu zk3iAW3gBi^F4r}*A{GTcy1=w&k3Mp(ENnKQ7$r6YfalPG=G=#&-#3gIwEtls2N5xv z11RyTx$Tx`l*v&KxL_I|iQqT_-SA)#OqPvH<63-$V|y2?>siVlg@JH#^H?Q-L!ZlC}yz^KT2uztGmTY{3Ow-5}G2T{4@q63U^N2sEqBTEmC>z+emqfK(@ zPh$_@4B|V#z_$PNHFcH=FXiB+U3hzA!8|C#MW9i~RxG8fb^DCf7lhR^r=-orW4LY> zpXzOG62D-l*~_Nq})8E@Mj*GnCt>yr%7IBB7*#MOL!2Z30>Cfy#oKTz`sS zTOvpH4QT)mQHy)uI1yAU21>D{! zsgd?=rm@9MFZLnoZ7yXkY)!UV_PAq5(K4*_8=o_0loy6LsffjxOiCetS^Z(y#H0yB zyvfm%4^H!19$eR_=#n-&n@T2?ar=x_dY_cP%LL?+w;pWXG*N()JwG=B6{UAobGJIW zc_q07hhG~*v{QNrxSS{HV>@fB=AbjyMIj#XVU?5!AjK=*HTZbRT->dcn}I}K97g7% zr%#-=$*wxwwUWZ7i9TbCW8MW=^r$dFqDk-O5?S4d2N{MYj_}S-wfu0w&H|lomIVBR z5#fi`f^)v6on%}L<3dRi4Y!j_E@uNI$Gbz9^!mBrHSCWkz|{ao)9hU}OZ5GNtaF1V zD;ULZf!0`%501es7Tj%2j)q()`(bwpT%to>iG@(|aykws80V@%{EMCbANd3tw;9Kp zx$lXvJ{o`mzAC&&&Zw)e(^P##+F%={UzBFKUbK%yjptADVYEiQ1ao*mf8g;$*6=C@ z|JM%M3nl`wi8NJT{gFyb)ovdXXAC`io6;0_w;BA{toiqZL;y@hU$c{uQ82jPlJzIrYq!QCxo%w_VGILW*sc#+YllB>Prt|H#gx%+} zVdT$Wu|q#~0jHrH0PyXcq3FI4Su&|o4Sv!o3-V$MWQkmReP|VB*JSL+0AGU4WZuol zaIufH{952AOXJ5&u=u1cV^b#eH;t}h)J&A;kC7m}IgxDW9n46G*c*UcL(MekD%hDf zZdKKYSUglQiC`@5H*b}l27m&O8eKyR$jQMZv=`y25{Lz%ey1`_7y!1es7sYFywweU zEiTG6TW$2~9v;rh(u_h;)5nFtVgai>eCOhzlnpaQ#McnnOX6<4poS8X_Ov(2SidRZ z$5PYkSbvAR5#8!=kOFHdiAp;rz)_!yAKyW|d=l&0CERO9g0{!!k20wnQ}L(~@0+pV zUst>G!-x664LvW9t4psc0$axOz7VGQlYH>9#!MIiQj!Bd^~GK(Qi(Htf?>T-reof|O65w3c{=sD&)p@7@Pajvw| z!LhaCcexB#_hOJrp>!huh?wS*OM_C#Lw=$-Jmkvvn+)Id6Gedwj>Oiw0|CytO4}}h zd1FVf**@c4#R!-K*|Ax`j3~=qJSa%G7jMyDSW?cyVhtXdj5x6;nq~Hsxo@-zVpTgRQ+R4G_s+G{A zQ_2(!dRv_^N~y$_98gRsfN}*IlY>1n`#9-CMnPb63K4l1>i2$;5uy|{uS(`%{mW`P zfB`7*`{)9mn*GQf$Lx|#aE_fmd}89~3%JP5BLepU=4j76*m&radQDs-o95O#nuCd# zI^sc4YG^CAv|&Kh30gviB4ICP zH)A5jflT#IkV?vj{zmM;ETP7=eY}f}ZZ`Gk50Eq_`?>c`OlbQsNuJH*yx8(9Cxk}719z`@*3{*9pdPiRmMtcnYgNPcvH+f!ZlmZ7TQhToSW7wArwpU zL+XBK0F0v21rt!r^+WJ~>A3-`0bevfs`>aO#kR$f-mL^43oxjw_A96_&E0UW1B&+8 zH-$EolzrZtDNxNZJ-vGnHid5Ckvt(Cx9_F-oq2J~ze!zk0@XXb!Y{L$t1Z1sE*!sp zAlyC0K~p7ysp=q({+$o`_gI!mK8yMuA3sgEpf)3TR~Ig=7uS@ ztgbarFC!3T9ohe6=l+0mriHbN1`@W)!PfT91WBQh2sb7cY-IO{R2}TCN^BHIUYb3` zSyB1wE>*y+4Q#41HqfFRqQPR_x$vOa0jMa?3)m-L0EnnJtgvrxcxs*_!k1!R4*}w0 zk00-&Y7ksP?FhEY-G$+PHbW%QAKctsXDsgyH=S#jJ7(f@GR!!g#`fKp8OQ9)7x7s{ zV|RFW?3yD(4!D}8s(A~MNgmRbu%}cHP+9B(Qpx{k7abiap~iWC(J5V++rbqSXBCiL z&lqCh#jK>546eVG-M&e__2cm%!`alml$nSU|LR!tB=kOl+MRFwQC)ke(32F3&0Ki1 zv@X26WP2+7+G*u>uPm|duT~-uCbS>l)Oqk?&-RGXyno#G%tI3*_dS7rsxI_1$K=H0 z6h$nIkXP_sv*ix1KWlO3PI#N!glX{i)2{Tg&zqp)KXtJ^U~oQda3T*V=lSisitu4$ z%}fm`Ed6tjbY{xT-->jELgPR#xRKwcVE~n``pAZWSnmH{Y+-;j^C}eEfBDE zIo%aqdPrkYU#uepPEPJOe>=HYkSy`O5%FU9`Y~Bp+59Ty2c-bzm)f_ILJuyFK)4IB z+#W7-EEn_r2*ypkubVGJEnA$GSK^1HT6vNOnETg1Ay9iZSb<}n@kfbu47){vKhRmy z;yeu=iZjBcNt`4=x zRH6tU7P)ABZ62O&mX9z%i#mOsE`SZo@g=P$SA{5gB)yf4;KdFx<3Z@|+YZO=g66h- zzfHd@!B;2GQ32(!8eEW`9E@!&_Nl&dptK-$IO4cfZS<=BMY42MrJ>j1p*H)f zHtGB-ncAs>sgu)Svg@Xd&2^c8vo0vH!5kLx?p)IjztT|mBWr>nh7TarYr}TjlKhwR z{g7NafO)+D67rVyYWNA2%$LFb$u6g<(Pi%aeWp1O*`T!w(*9z|QgRL*5IvL*lCKWPSDB{oiFmUhMeE6hqZ+ z1@E3)AK43125qa73(jPkqpEo(aNT8YS7PEG#9d)SlN4UO+UgQ2)(cX0a@R)Ad1RJ1 zmIVS*7le;|T~Wd=IWNAK1>#vkd|B#~1jFKL)yOaJ(;5EWOjBPM`3yr2I{VM^{LVbE zBc}=#XEFzVJ{>@7E)QJH)xUFFD`$9g3HGuqFfb(yV2)uqgRyF1!w|Z!-|}AM!Dq66 zBV*>0Ve6{aw}-u#{ld${y1^42G@lKf0VtCY9b=Q!&+snx?EIw4Av&~^NGa9u#QEU`MNiq5<`v))ot;KxCJN5KUPhR11X5cqUoby+I9Dx zUd_)_t_OU^^I8+)EVrZyhDwIPjfwk-^N!H$rEkS==XrkhlJVp$6tTa6r`Q=pY>_?1 zd?9xhe_K)s&2Cq%-0(*qlYrREl5N=P7{--xY#5`I4dtX%VVD)SK}!Rh!6Rz zoyK>%6JhI76?!|FJgUt_v2TaVE-&~2eip1g=!KRamQ1d;4xB2=YyqO^ypkE0%kBq= zZdW&fkbo0rLg`$$ksbmC?DRQh8zdwC>1A{(A?y( zgi**ko&C?R7WRYRwrf9ukgm2snJwb1fZLZSwHvvTmd#rh;jF7w{E%ZH_2ZQ zONP$lotf??L6s||O(+S%Ve=}mSRxl4>#p58iyJ}`vBvn_QssjgM*;-dmk9S1zad@> zn??10zaamnvRAnMH+i>O*$=8J%Q7mANAk47oxZT4GhpdjIeqVUIPjtUhO@5?@I7uF zw&AYW!GW@#(L@{$!7u*Qr3vTejaucMtFU%hh5Gyvx}0LjD-qPQOwLJchJe8u$tS^9 zoI&H}*XfwXV{k=7=eZtT+wtFT6iY)vRItixLHH#p-?EUot+IK^xY%s9%Y2-?4Z!5) zQo+LRl-kkX#QJ{?ds<_oo*aC^A77RKrM3FxDmJTAoqO@!pENBc?4MDrU_1EDb?4&c z6hGwnhH;V=3~1N|x&TxsW|qxvQ$GT)QkL3K%o)jLOdP+pHX&Gy3&=Ni<&zJ7Uen>C z#QLd?zv9rtTKx>baceax#H^^@JinF-7 zF>4YsrVn0}CoOpPCo8uiE$Io&Rzmmvf+a0$>xg4XpGU{hX1{VO44mc*OXeey4n~ge zEAbVUxFwTw5P0VBop%Ou(r+F}Ydk6`Q;abEM+H=Z6<$>|SRb z0bpsAk5e1ZYdiVA8uj57t}M@{MyFa-XBupLw79Hq$hn_{>eOLTt*+9`K4b#H-LTx0 z%R;-q4DcI5AYvE38n$FNV zoN9~y1hYxYW{a_^H;=u3;Dj`c*cv|!!yFOUv(IWWoP1KW33$Fp;>i^OE*G zCw9aeh5hP_rY;^`>LCIn(@)1H@P<{-0#+rWVV|Qag>yU4wbr$+75+~I86Jp@U&l%{ zrLfu9QCO6uvx}lbGF}h3H4Mb18^LrXb#V!uNCq*y3#0IAEvolCNBLRn^raL|=i5+N{|b`kZ%{&w665=ci@ed;9Q!T=Au|Y>je`gT3T<-w0^j$XZMySI!#(Uqk@SY zy7icVe)W#ke@43y+>7w{4-T`*Q?Q6#zR<0!S)~`<3$qx&5a4G;3=ctboM$Ba2i9O< zU=D1S>cft<=R7HB8&DrwD+`Z?6^*PXCH+{NzMT-f#ces7tRWf4vk2`QeiG^LN+RsW@1eIv-mD;`9|S!B)q`ZZ3_>XT5=W=-kWr)m4zL#?{pHB_roPI|u9!G!++Ax9N6j?JfL3RCl-Bb}^F}RFrFJ@Jc zByO(f&x+ybR%$q}7%gADN0f9`UQwvasokU3;Ig6+c`29O5VZQ?YT?&yrG(;5Y{y*g z{n6iC<>F-{K!0w|{cM9HB0Mrt%WM;Gs;L+`iGl!UFhb`# z2m-0xu4I?I9$JMYt6-G`^fFt|^_B~X^k7XKxv7AV7T)G1A~O%jkLSdeNM{D1gQ%W+ z+>fkD4zJGhlh2p~j>t`+c#O(d_SH1EsQ1csMVj&N-p-hTkLB@qcA3aUVu)WLlciVJ z$5&jmmt+v0TF%+`$H?0dI182`o1UkQD>_~_-(VcX76%n0GM|3r9JYE$h1f?fhRt{W zC>}ePO?|-Nwfm<8Hr#-VH>cSf{x*MsA#z!KfMxFS5Ul7y!lM;+g`(?M#l}?xD{J+v zFnjeMRy8jA#)IJdGR1dH$AbOmiugxJeAM<+C2qeRt2&bCDUg98QLZ5ixN^m!-S_p- zmw0o|xyRy6Apc9cgpVYeJl-YS_Ot_aq)8Gz)Ls=LJB|9laup{vuaaxD|KO9!cU3w{ zot-S@+!hjC5Y-6XV9ppFK$ICZ|YKWg&c+tQ%`j zRT1xX)^{1ij>jJnws|0}uxdf&nZ zTP#>My4PeoKT)i4J-^&sF!td3YO*K1`8XG*OP1f&1`hRNpJ#$JZ?Cx}6nl_yR9&*~ zeH%r~+ptXD?iZVWRQ0ojs!bb*dm~!qNin|TPtZ;a>DoIQuw*PTTJ*!Ly-cCQuFPsf zw-N2AZdo63d}Ih6z_1OE16fc!Y7!U|Q5!TbuvNK6+Pl>%zSv{dJ!rI7 zT}pI%aIBLc1%vt|aNoUubwD9FbLC3jOX=88P6JPI#$=+;FDeBz6BW0%+|)TtV$ zpg@=m=4(gM$V5nhF5=_!aA_S-B=tp=Yf++2lc9P{bHD$a&gD~(klWhi>!c*bfrktM z$L-CpsM}sFG@%Y4$v(bQXBoR?R6+K;zCCQFv0l%klxC1^n$l`5^xKQUelxhTPg(L& z7HOYQRZu4)lV*_9MV!XBsW}a47%}Xg$v(nk#+V4M_$(Rkf*r~r#n1$xG)2e0Sevdv z-{?_Lgdggzi!HOxNB2%s%JH2AL=}QyT>F!IPwJyWZ^9U$&NAJV6?Y{;v^c}uiO5Ir z0ex36T+K+;j`}GDDl4Cng}+8?Ne6L8{|o>?U|3F-a*C7{DQ_k#>$vH;Ga=e z@B1&U069hdSy-h{#YZ*Vb~H)Zl$FCH26!Jb{?1f{UalZ0)|skAJBl_BK0ILv*12e3 zkw|1E^PPvU@W)LD+tDfL8o}ya*VM#`e9Y?3a&aG-V>c?M4BBsZyb1gccojfPWJDFv z7x!G5A`$Dnl}bKJJ!dT2MD=QXz0UWHPL%4|Yn4F1_%JPw2-Z5c%uJRQ{{8V_yB0?l z{qLGrSxE*K@Ob$ypV_+(W~x^&qQ;&R<6^BbApNxvhm!nsILfsVPizxpk=_Kda2%W< zmBEXBzhEC(NzwCz%}vrRNWk2+H#oHtHHGMBG}H?JsNTw|ZgBXy&o9yG;X)hsu>L{D z*E`w7Vd{NeBw>%FutJZMlwUQ4Rf{aY1?9$hQ7gx${Op{Aa42AUJE zR0R+f+tyI?-MHzX^|Dkr_LwYF^5Upb&=^M_%$IAIhVjw4bb!>NUjKG(IFDz<__ead z;f#nf$I^OsV0IC<-re{tN*9sVDa)nLMw9d+Dr3_x1;6q|fR7L3F`3x<8Y|Y@4!MAA z*q?sIjggUs59egcwz56;tpHo83Ck$UI%j2(ZbW!1S}Gt5=H9>(jVORE%G3hS47g2@ z77!}@8v8C*;C3|TOt;cRQI~IDl2uFLcLs$f777A%yD4`X800`7sBEq$ln zTg`m#mW6!+oW?9oQhuOiCW+KQY(sgn4NU_6JwNJ!h3PBY-!d3 zDp$Xdk?NDUAox7d$y#JZSeHv63oXOZL<&~dYfcfPm%$Nrai_tLz6r=GG;J?MyIE)r z^Vir_<-3V>G)XCS8G(upTm$}}t3y?sw@JrWljEu7pqr0Ywo)UJdjvR5e$#77ow^Fq zu@1f0ThzzMGgjcwT3H*~T#TO%3)^i7VuFlocalZTcrm=zpC`2YWEA)dt66rBY~Ssg z5c31-GR4L7i=4dR63u+pnm;Qs3>R5@x~#NBAg6RFOtEM~LeXnGc#n4?HpWh^}_KAFIfs?>WqL-ZxW^ z7;lW6J|E%=5P07Td>m}ib|cR}V*exg@f+@WtCn+?M177|^0%CVy4fV_t?P%2hoBfZ zPlFb_K+}0K2Ml&d2?e=u_Z24Bs)ta-!3iccjFv$tR|=Ez|8<5<^2`b`OwR}`Bwa)> z(p|omBz(P)A*Gq;eL@l?wn|Hk(0% z|HPD#7i+NeR6Lx}vPdGyp0aIUfSB6vE!Y{NKu}^SiD9|(A?g5L^P39er_)YsLAW^Q zsnEThNbt%|4>@{f`cQxJruzILJ7c;g)M_&$x1J;R5KGQ6R|9 zYI!H4Veep42Qo2s28quEUv5Wait;z@ ze(GvAS@IFk^=RD4vzsscH1S8dba6d3^79||cb!}^x0>}vV6*HN#Pg{Of9{Og-7*Ag zWfP5rFxT^^>SCS)q7Fg5Q~|e*3Dgtul(S|d1({yC@B^~2ZBb9TyEi})X3fgy2*=!N zIkMxv;|!u|Ixfy6QjAIPb3rB(4x{<=nkvOwaI_R+y(vsV5>OOug`o^tVN9H%JmZZg+Hr})hHB~hN!tX zgLOSI>z-yrq%u9coK|H$n&zn-70BT)Oi+L7fCR+!Gv{gqNd>^d#BJDSQ*1BW5k3Lp z0dLL~8%n7iRMJl9>=mm?P9UM)-tKhD5gg-TeqAEKV=lEakX zZSto9pSteV`aHCaA*%!mZ_k93bURaE`>1`2d$Uq5$`&1Df?*O(zTIrPyM0; zP5EjV)QD!KI`VX(&ob<&R<+<(GOg@xAnyfZg)EZ3WZlOy3=!ktIoS8oS^=y^svv3F zlQvL!QU6NqcLGnjdf`zH1u;lz9ar|cj*jn$O{Kh{9Bf{)oweKpWGUg9c?KBMy96?p z&Gh;oPihDrAiW5uUBUKJeppWqYAfoJNmqxvWQ5AOnm2P7)oO(8sIWLkhu!=gRImFP zmSVr|CRQ`p$guBOQw?lBYF0)fsHD01D}kh!b?4pM4u7{_dA4Zj-hgwB5P}3QbO*Ti=KMV?qD)X=3VwJK6jZ zC0DRL{?L|i^K{u(vhm=9P)=;ojeNc?KT4{X0(R@8Wn^lmv*$8J&rm2ifJZJM`K~XA z+gVF6fF@QL8Ry43M3j<)omT^o!3HI^N3j*L7WExHuBSPO)1?pu(GakpyO%SMA z2A{kaY7s-_m)`Kgox5>S8`hB)E4%K1u%KtFRc@qvzFbI2$^mJJ+g}u-SUoaG-xN2` zPKFiHFV}`p1*(A!AjJ|H(Vv1uZ($C%B{3aCBCGcEUoS6e)d&`#;|jrA$4Sn*UZj$2 zxApSOW1rC;X34eD{!2yRYrjRe+%qrDg%6X{QuEsp<_^IIZu`^#4&WtNrHxZ+0|9S+ zM$K88A8xjsuC&O^_Z$A}^R>d=?){K+y2DlnF)T#GbEmD_6m!L3Zuwqe-lH|tQz3GJ z7|YJjOi8VefBaI!3}WQspT};sbI4R!(8pViKt_AwToejCh)3CQ^ZQou? z9;})n!gzxU>}399q21Lnis+UzK^*5|36u^TABJ||V7aK5uP-o0HlsBmiy>syus>{(c`w>F!wuM1g}VM{6Ujjz#AXONnQA&gRXx(D(}eXxtUK^T6#k;s`sI=n<<{aTNkjHWrfNO}71;JD^$x9))99{{cMzLI z=YukI;cf<4_|9TZUdsQ%K9WTDYR)$UcsFIy+|ytAFYgm^8MB(#M>Vtd#1+>m5ix;{ zzx`d!2yi{YU`N`sF2UoUC*b)+n8f<#5_I9rVG2kDe+B7F3&GIx)bp8H@8ES0Zz=y> z&a~2i(`FLb%9O&b7>n16IX}bNCe|vEd*HxR6F6eGGBsZL+l!x!rcH*IL_3+O*forp zz0uO-{z^r9Jj@KUwwpfbVq>!Qi*;62oBD!_D?B$a_Wtp^wlw5g>f<%$+R)e#j{vgX zH;;@@z;WYRzcVur0!q!(#Telr559oR#?Y_9Tx3Hv?^Vt-`g~SLo(mA*iG^zpK5>+*WfCvhX^?azDJJ z>n;J`3(=1~xLERg<{I+P4#W?~nx9_QhAI14sVo|01de*WSy5R-u?S@e@Mh^<$v2%X z&nx&%5>R>zZMe$xPW}zcV*Pb0uI+45es>$}^lb}UQYq1?Og-3Sj@sjEXx^&)uN}ms z?UcTwJ;Xz4i0(LEh1pDP*^6TfUl2lBNVcZkZ#B}x!4tWo@$X;yMR&KmXdWhBON?*s zZW4`I<2TnpgO6x0K;3cKYy4|RAorbqa`~JesO?)iJM{|-|42InNe1)RBotFd{zb5& zboo~IZY9}5(Zl$}HG+Brp{Mb7ckA|UULoXuM9x0i>@rNGWHIRK2oG!W!&g0KM}_%n zHYIQtbGu_dWOFqIXv!d=N~#}tIsn~Vc_;qt1u7&)flBgIZ{c3Z5N!We;EBUDyQGvb zvAFqzjb{}0g#a`47QTKav0aqDB-HAAfdf3QqSk>DJLtSLm!l~)O_1p^=&C>fn&r7k zx*q2iiNd!*A;s@m*Ck>xOH;h5O%I-9{$6D+KlO-qj%Dfr4lBh= zo*yv!_w%1~(7|A~>R)OF_6amI_5u(??V>R<+3>U(UzvQ+FRgri^^wkM7V}4|?7>He zg6YaO!M0%>x&me{KU8xzB{@qEhCk1TmpEIf@@I5##08vTs4cQeFNAxg-amqVpAqBA z|F^C3Raf_wWG`M<+o?XCzk`HYKwv5Sj|=`)aZv4@g9zy)_{lb)_Z1x+O4ADV&;NO> zk4%mpBi|{ogm;Tt5x7oRe?=oh86+G2Rl;=TMtW?u>J9Kj22`Gmb7^nk5hTg8MG)X- zYXMe3#I=SVb%a@kR6pw~dp|jLyzOk2dMPS!o&1-9bFI_GUxqHlEi`J5$lm z`I0Fx7yU6xUBQDdN+aA!dDTz&PCt|N6=8Rx9WllV=dcLl$os($R-ZY#YwjQl3lr1I zjWy&2UEX=mJhfLV3a z+ls@{KEG7t)+LfT^tn`(l@X?LKUVzi(8CIU1;vn`P0a}fJ4|Anzc$@?o6w#6&3QcC zg3I^V7@pbQ>vjxkMah?+b^2Mn_32N5@@SFvRJWOUSN;1NxK_fx_-EOIfq%*h4l7Ae z)gO^MV|_<6Z{G&)^7O|XJ44FY)j~u@O!vdMzhBYJ&~03CC(f8Mcf)mNa+|2!d&T^xfrZGU zRWCF~PVs5R4`Tb}LeXaldqH#vE9=s0!%`)`{x|*^IEc`+o`oqNtQ)VFb6K|EnYi zZF_qE=`jUF`>$hI@Qy#q{eY9((8|Wp$dcpq6$Vj5Pk4wFssA5kjQ)3%|6Tci57hsz{J(~c`u`cW#*@EIR+2>^ZvAWhUzz+Z!2ccl&-=N5 k!(Iyg{R4cC7=KAE>~9(Wsw7K7YPrBEndifhs0?(QrL#TF^Fe3X`mMUU3tHArSW<+F&9{_ylb{C*_({>&ukb7Q0D{k~hg>HT^>dvtR(%ck7pM?-f!EDdL8XAkE0KrZ+8zDK94t5n@>qLK96S)OqPp73_GqT)fcpemq+g?0orG+;M()cJ(i8^%dj2Psj6p_rb^e zeY|S({d%ue@%^<}(C6*?C`0i5KoIJMhxcut=i70p>g&Oj&G)GH?b{>4*N4qc&! z`!=B-@Oewgf7|qVt;*?;AF{D9{g>nYobvuYis3UbnJKV0{&oU&B-aH8a?<+SSi85gVD30cdvi@ zK3DBAvj?kt*RH~EN2BhSM>ltFyn;F8%$+Z7y1FiJrrnLRdjb`nf&w|zq3ZNVkB@T_ zM345o{CY1pb!R5^R(?#yarvn$+$10apil z$2V?Ek4Y28gS&K~Ku&awg@9FqR}iYB!>)~hkHDs)aK~P@B4?JD=99PEu@KQ?Iuo_( z(%pjPv(qn~CubfKlj}B;3zJu%?vwMc)`KtD&TdC?Gb0bXyUvC=u-={m>y_1UR}I`} z2TAp4O~#Q(TPJ4)oAz6SPaG%#$t(yRL;=pkkIw$<)Yo8qQBYP&`@!tN%`T2!T4k9r zdROQNMg51tgVC%T8SHyYqopIacTew*HfGn$huPgkS}?rcMb7qOQ}{L~nkTO=n=D+V z(USDm>NpyF7p^xSw44VZK3;KYdVheDMJ{bYllx@|!5KN~FmT>P;(vrGe9OyGnXjj6 zD7F^w@Q~8gD=THT7O(CgJwlF`Ekw1qMtd^oh>zYFPON-q&j~(nZ864C>o>cLEX?7LQiO$Q#K zd=+b0(o^?01^@|!1=A!qnp>eNDm_GXS-L=HohvJ zslAcLXA^~8{l-@=A8(xr<2~M%L$@%!Z&eIV4xIFx+XQOd7t5#O`n||k?zF=H!EqB8?Cq==@N=EoqtF-(e-5j9ku6=>^-DQ z8ENH!PJ$lO&rO~J*?PvmE04YZwq<$m3+Ww1Xleq39P3(+?ZgJSk4nqkiGhz#{b`wF zBQ0g4dwKA7zSo%ypC`<2DXgT-NFFP-@lyN39{ zavot^RzM2o+f=2fP!3geIFWsqtZ_v0g4(nOZ` zu(CsS*tWE-4IGO<#{h%N&+&dO80~W?V7UFxw>l|<5Q2G!_(?99kbYKV*-2gTtpSXF4~TFD8fSkQ;u#>|DAl3ULdK{F5@P4!~R@^XCnu1^Vf?E6vW|q5 zFi8ALdy04B1OZK$EFpXiw9b{;wXyVp^ta1I>?x*7SqHB3erKT9$OryG{Z!s~x~x4M z7woZbOT4&;6>^Wb?w@ukhFgB^_Yq?%J3Pp~e1X?@79(JADY$sha3*5hv)5UlHoVKs ziNWU4F>sQcoOhr?)oCuf%VtTR#kMKYacH3)ekA3~mkkY}lMn-fcVhY~OgbT_a#_MX^48Wd4ON^9EmmmZz@&IC^5u zFQJ;u<&9@r!b&hA<}$d#G6Yw530qUQ{^U5i(&c2@z=kiT{)=8!Id2O;cw=yOgP01} z*(Qf6qA_9Nr zG)Q@ZDX%dY_iwMjWc80Lkr46j5jB>FXHR=|6ZF&VY)ZB7M4}0om~M^CoGB-|5d6@n z!)wXSi1fCEpBb-H|2(<{7=a`v-)YsCH_jeVJwimIJd28g=ut21nh(l9U7Ozc4ryS) zZ)2fXk0{8v#&qB=dke-n63+%3c%dE>oZj5vP_eZMo!)r&2eWFZq#$%M$nVIkKzt6Z zcghTPyMFSMBYo7#6=e#Y_P08tKbrb-<-DE3_GP!Oo+AgiAW$aKF_?=hsSmNbs4E^Nmj27-rMLXh zCQhBhZNmBl_*Hz58w(u7rNp<^In7+;X8nGYnNceOlkm0fKu#bFHDzbE{^^$f-(kJI zF-(45(tbQYh`H;=AEi>Uw`mXH8qGFl!q35w)@ z_6)xsmz05Ly>Kzgv?GANsd>`p!h(;KQzRF0??&&2OCkZ+ax{+X-$s+Svk6pXlN+=iju!AjK+VaP=ZTgdzbsPJUmT4*u?7%| zyib0~&=7r@bC~tAC;k$W8%E#ZBN$Q%mPbyWGpP3Cbqo5;Po2x!wkhR-Fif>_IDuNu z?=SZ?g22(FUs#hn?5sjIbx&e4Kx1ej1kodeYfTgLUyZvGhhP}DhkBrWKy%0@S~Aha zOZ%NBDd(kiuaUh*A7I%b#brM_v|M9FNiKNo=C*xYb9hsb^l6X-{w=NtU7W0ebedZJn}b&Z7ywtqhdW6QY8Z?0zHXuQz=q4l#CYg-Z!7&8iDD3=9ejkM|u{W%aNseolPD~8OEwMQb0kY&A)ai-~ zzV%;WE-Egyf>)RGK)u_X*D_8@sT+nZDoPOT4L5Oh75g8_~I0`s!VLo58!H^E6I;#`v-y?Y!vKsd^7>oEL6I zJ2$Gd%}QS>kRu+nwZ`mZaiE@?y5J(Z)U~pQ@vf|=^%0+0%lYFvKTz69wywrE<*(~V zuT7s9QJf<`4=))zDuSKPwe7_`4Z{i(msYya zEOAc)O>|2ZuuqIO1- z&@lv&FPq>d7l7M-9v?e(y7-qRX+h_gdr0$r9E(; zxl8|aqdA}wmN5szZHnCqJC(B?Q*TFgX zTbZcS3z~Y=Vj9O+6YBkq{>zY+3ZW=1W zr>)f$;jw~nu4#tWWK#HJDVu}?wCQ^fuG61Kq_^t6pRtC&Cth(4p}~bhlkMs&MPaQv zHt;Tk^&i)&XC!P#Wmda>&|CF3yg*Q%Kh*s$lrMJ%*L_^AnV~x6t-Jc93~iZB%Brsz zq%b{@8*FhI;_lA+FWVe6^g!AcWWmd8n^Ij{TDwK+uY>R|v-j=?Ii-=_Wp@ z!PP86#|D-u+5*kBTPCB6pWX`gzs{>Sy&p~qr?Qhcz%8Kv3{Qd@{@?iQyJZOPJ-`2L z=6&&TrM|HJ9^1&fO?r85xwM`2GxL+B%Nvs)yK8wMOez%tZ9AP582Os!Gad+Wu9)j1 zc@%l2Ue{0kR=Po$FEQ%k`lup% z#rU(cIrDKt+y6anMt5U7xwa{Pq34b6No-^EB9Kdfcu02N_A^;;6- zKmQ3V)|aD&eX}X`(VxU#`<_m?!x8v!$pG0?ZW9%3-`HXDsIW?7$SEVw zhZ9x@A0`sSKTRXq8aR5w7M!x0V|+Hv{9X3@#Zv*=tpk9jf^7Qd-5U$BKR#~|RSb5{ zO)aB~Y*E6Muj*#pPY;G^sDg2%I$w|? zr?$*-#__z%oE4H2gvwvOUo+Qa%avG4g9hf`6q_`|+c%w@VuSw?LpfPd#E|BKLhs)3 zq^SVm{7tBBmuphV!((1=J;jDKG@Uf|4Umq*;02gQ!P7WQb8};y{K!9hqP`z4GD!hl zpVc)E@vn^+le6&`e%=vp_bO|s8=d027a{lDF$hmGgA&odI~=A{M3H=fsjECpY1@Xlf{rl+N9nF8rXz4M3Lg z2+nSvEY+n5C#E*kzK#5{1>NpT5R&F$>GUJ`np)321bZq(8TL9IRT(Er3D7&ise~y% zO5LL8a#f)`jcJi5oQOP-eGB|qw$LsmP&`w;-6F-=734GjGowUnDR#1BKRzyb>`6p} zqQxL#Y?-E>JFb`ZjRXt_)%lemTvP2&1Jf-~CYV|M$a7r$(F~!4aUlo$ZwN%t zfU3^Ur>)0YBbW395L|Y5j=NB}Cj%_bk=>1*C93g_%DdE9vHpvuNDPj9QLRG_eyZ)z zGIr}H_u!e`7RgOCe${05W%T3FL1ge@+V;O>DpHpT_Qe~PdYF+IMv*h<<01TAMlj^W zKvCc#Vf-?-^K&mb92)Xh;Y}aXNZ&y$G4#C-gS%YU&(IAm|=^U3Th255$sBKQiLb*GB_ZCo)W}}>{1~;_~?@f1&dHW^2vtzPiMzf zr&ngVkvv7VrT_hKUrR7waW+ompsSs3g|ok_XfJw;&d%!-%PJaOGxxm*4L7nPFNPu~ zO?bzV^S-u7&*&Z3eP2%{i+GdpHd4aL9?M;#1Qh+%-5c|K^dHS3K77y7IlWhI$tGFV zcaijsh|bRgDLMHBiaa!2spq&DLg;CG!HE>2U72nE09(eA1w{>mLKpN!|0w*Q4FWhm zIVe8R5VH`Ar8?m!2z=h~o&Y|c*2ImrESe6EzeKo&ks7*@$oo3dzrnT0&3?JbRwiIk zOMWMsF6c;^IOCr9-dpdycq+&IkX~4DJsHEowU^jLD835ny*}3L>*ZQO@;Wk~W zdX5a>ST;ufK(N3-qNA_O&z7)=!(UvA7*CjYz=v(&s_uD_^yH_hs}I^L#C}7Jn7eHM zdM&4v@I`%64*L}?89DJTW{C|4qHXY`yS6ONj6C(g*^d-UO?uZ%TWAyLLF1Rid@bmkmFOKCWMCQAVee~@o6`u_(I(J@2%Lt&gVc` z-ZpYg>P0TiBR$C6M#>WMEyKaou*t$6K^ea&nz#bl&Ah;NCyc?D5Sy+e?DnFaW zMRbVKE#}EFfHJHnaBq3=2^^cidJKfbs>D^B;syUaOxXedm95NXW8|^aSmwe7h zGA$++XtZ_8ZBfZ-!mGbW7>;M4gC3s5-3*ph6-_Jed967vP&JDDynz{zUOR)3@Z`Ox zbu@6@LZi1@?8mrk6?y#)=EQz%TN^pTa&Y|C8+FjnG}SG3Foo&~{i~bFoxrtbaHjRo zX^M|Jv&1N)@L~p2(+~bhM;wM>+KSckkWhIK$y_8=0gei-4}yEp^RI$Ea($ExX)p!D z4=KzgCXIrUyHRY-Kg(1;HsLq2z1U#;%xtQ(H~hrJMZh~PGzejfOT7M-wnxVr{0Z%S zj)b87n49LDY6Ro7M%K+3eF__Hk(-4Q;0 zB&oamCItRLNMTutym?4J0y8>rPt%NPT1~{5-IY|>DML0SYU@>!=EvJQQW^Nw@s5y4l$s4*dO&Rado+zTyC8SlW_>#lRN}(FKu+Z$ST#6a52+2tN{cgx2;NLd>(4iENBtO(PXfQ|z%BPa) z?et_?Z~VPMX;L_uF@egU4|;`Ya!S-G{0swfELS54;Gc&R)oZMUU4C;?&#YZJ8r2k6 zpy;dDdIhm9`ct(E^SHvTQ95~HLx(%Srpi> zGYh*+N>aMC>B%NA^LLSwM0${urz!vslX2qH&yXmGuM6%}jL^>jc|@s8rc-t;w1v{) z0Q}s(+p$=9dMBMFuEI*$B?XYY{m+6@D|#1xpIZCH{`aq`x$_et0-?E5x+z<7D(3x{ zp-H)09=1ghXBRcf#1Z1>{fiM48FJkwu1rox=$z4!7yLl z5j~Z0H~M1jw^F;sTai)c%6v$!P7jz@y(41A(~^ z8fm}G2>*dbSOU4hF!GF1uZe1`1Vh*%a(@~{1H;p>s2C#4*`f*_h*HTUZv5Rx`nPn} z$q#Xp4k>hLn#q5eU2byMvw-bItm#^t?;lbLLeltfWHUaYUj}Ub*`-q-h^Yc=we27S z{w7_8pNVNyM3QQz{cYM!XpuT^`Y4Z>$|bbS@i2yob1i2F!wb{_MG<6oYs{#MpfSTD zTO^`IdTCY2{Uuj8lGhkYdgv3Zkz>o#kL@eqiGoVI}#X~ zq%UpMqNi!iC>+Be-8262!vqLNs43Q0o$8i#?|Nv$?m*n|JGyTCPBP`*Qh8CiDLT<} za}n?>?W7A=?&QdIfC)=M7$gZj|MT##toYTTG&Aj>I*RV}JeJS@u>G21ZIHc|duRQw zcvTgM^>9BDV7Pz0;6^8q8(j>Voz{Q-~olab@F!0P1A z_o}k;3YojciTh*D-3&6JQ8P9SV|C8$y}@k^U@!}Xq%qFu)}h&|cG8%c2f#PXLN1uz z8KMYAc>;OBoT6V>iJp?m z+M1K@!xRhE0v$t*lLCGZDEZQ_w{(>fF__B_&=t z)IQ`W^5OL>nIpC5xmi?%13&#oGVvcqDkyV!3Hrghc zX28i1ZVV^2im!t-e$csCeSQC~8qsI0%z0g-0#`rPASXoaz9&&q9KyEU^|>7{Ce9Jr zwrsr~WXc=v@&yPRy|d1M{>SPY5XG3}3knAcC8_4R(5lpt)TH^|VP#jB19Y;fyj}-e z|2Y$WIw!F5(dazs7JC(-*}C(iH1%*nynxLR`|(x0zh^zs0T;E`Ca<6DWp~?5bhT+w zRY6zWmbMpYN?5$G!Dg=||5dYJB{6y)Cyv5Td{qpApUX*e%w9|9YyTM#(Bdg>!CDMG z$$~rJh>+7CvZ~kAAoFX;pEMv-ywj8l$6Z-~NFNA_u1c9pFi;KPar|x`y2tdq3xSJVZyNwH3aPhSl11J^nvOzoSj zOfyub9}i~mN=YjV$ov}?!5ed!z#aW*Ovxpi25D2WDL^Dp;fGxi3p4!|Jh4qIb)D?~ zQb4ZPU7viNyyBYLH;r#b+GUT6yjlC)mW-w(H z=KG|^(`hj+3Cs;yqKAMqcN5M4j2yM>@Z#>e+82>JRx8+siB3`j>L4_6sp1%^C`BM^BE4hrIN^XCBs);p1+Yf0 zviuK^&tNPp(Ne?*Gy;5*ScYekm(+hrZJ6#%6w+krWBaS7F7{|*3o4U(I730&uq-y> z%-n$0XrK&An}GZ4=aD`s!i2getr%&?|MgrUBk8Ck+_w*g;V2cA{lBT!5I+!y6{b9q z5AZ9dJAEvX&KLTeIY)MK5s*~PYTBgQ0BW-0 z4?=hPx?-(JXc5}NgxkADQ*h>5nzzT9RhkA2ywEGXHHr zE@f#-#O=a$S1Nx0XyKLoCpnSthai}Ncs*HU2&;ea+GK1`0j1ZEYzBA@=gs^`eRZW7 zC3IkuPxj8FKDc9qhIzS5s{AB_H{iqNNNptCg?Ebb);b1TuyE}|zhVVZu*E%PE4-G^ zwd&TojW=rB^R%L$OXt5*c=91umJf8!-j%QioIEFdf`)x(UW)4UsvX*zzEQ&rNPV22 zPBL6NY3P}|Ne-x#R759KM7$W#kq=M3${&4U3T$gw3ki}F&yGmw7yL?5)WU7M;y-tf zSoA|psMG+}_wCnlx5x-sM$2UVtc5umCXXVmx<=fGZgo9F<8yttn5vfd-BvlFBY zE8*H63Z3wOx_i+cu(+cpSoE<8FSIH%c+tImSR+auirJy1M%PIDT42hC4=Etio`Y+O zO+JM*^M0!|hx>$ASr_eEjLsZx-QZ4`toCcyI_EHsiB`2ktdJ+!Xf>472jp97_oI5F z-;YIkNK*TE(@YS+a&iK!#I0H;kB=4BC8jU)$JGwRRZW`2@aVV4vz)!|ca@pOv>eEA zE0d55F@>I8BwvmArrEFs@S++jmRrMNW7C$B2IV65HaOC&`Vie2!Zjil+5RxxwVX<{ zG1l%j!mkq%Y$Ftm{M+?Qtxp1h+Nu@v-!;pOQ0-L%?7=_>!a0VVb7_*k@6+856$8!b zQ0kZRwR)@L3}eG~W9cVAsQP5QuiHO!s>$>o$CcY^cRegs)DVU0+igF7-MU&$X{_bN zFqW?j!zaSR2)nZ%#_Vu#p+oqI0j>nE3}S^@(4ymP>h0{Cr4*&f;(r zAn^%xSJ_A-Z*1LH)8(m zeSyT#yQn-o**KrFxeWK6bFkN(Qy3_YIzjN$R~k=lp10z54xnth7Xg48hb9L-ydE?76@(By|gE z9USjwLFRt<;$XXE^Db?iEW3t2Sq8Vhm?1a_;z^k`{WNN|QIfGmcMIK%eYrJ^p%cOx zHEMZdMfuL1nGeVy6BdnH9G@U#2P}7)N$?37Nn@xwIQ29Z4`V{D!%qvG7RTZr8~}3F z_7j}L-ur^slc9{0BQSh|O2}j1>>GHzdE0%0#PGJ&)7W}uCQWxMlm|puJXxgz_fBew zDH)C^Mq^I#^Xm)7c4_a2ebE?<<68|OiN%A|c2fkrl7n^)MDViJAqie|kwY9QkWjy6 zqmQR(CzDc4S!~IUW*;FJ1{wu6N0Q}ZYihxneybkB5lbbPl30OanqY)q;qy_f+_x}^;&Sk0kcP77wKb zQgh8GQ0O|Sx5+2QKO}4;ihSK(45Ko$*w4htMP?9eGhpsU%e+o(HhN?@ z0QX|5d-lz1xi2WW7ybM_3wtDf68}aIfl!!n9DqiV0~d{Th14Yz)ikc+_~3UXeA}Xd@rGcA5^BvwC^+`h?egrL6<03k(M9Os?WKsJ0K$ll|{!WD%G^MtxNEjGkhz zMurH<7%J-1UR5I&MOebF!ZKpmbl^wiio?hcw{a2}2Wqe_ zk*CJY3LsJ&I)sXEiALaE*woI#7%9&_HTzY|4>&tIiRVToauOoTwU5Ym7H)c0%oHcJ zk>WEB>dR6yq3$b!U>{T_p%>LcqW7RV?7~Bje#yiHt@SK5ov<_7+H49v&~`o_qO%lX z;QhB#o?#O+!-1x+&9Eda0ZhlgK7PH=GWx{AWL-F zDdbqkeq0=UvLkY%_B)yT5^lIxdLS?6vZu$E&_ExjUwh`kzYvSZIk(n7W{%@T&eRdL zdR2uSo#``jwyBC|0qlJ?ZX)auE0S*4hfqk-0wCg3FDck^k^Rz0CXXJ1;U6Q8(Ie>@ zd!`i<^G!_@J1yW+F9T=j>6I4_iCKJRAw4*dH;gu# z^=kTuwl6f|m>?`cL4=y$ut$9fSyN8-H~l`rH5r48d!?*zQo1FT3t(|P=HvElCB5bCG^ z3i{3FI@qu?6-x<^lE<|W4lIHcrS37b&{QmKOcK&35rmJE_$d7>lNyN4X#DvxhoWeh zMv&_dXiQtXv9swfu%ZB z^cOfyOBy}5ZAe;}XY>DMd=pnHNg=<$hpzS%3IgS(ww&bR{oA?R7IvOvD@d zRDLcFv%;u8OuQ&<898cU4H>xz8{^7IYgy|Il$X}oJfIRYXGdk`j8vH|&L-4+pQ<$K z$`z{vl(yBWFTk^g+q`t=s?z_U6DYLwA9RnfVRar>?!4-utl6@-^&j*Ig|7dDJn4=% zi32|yo<@rxxACHr4lEIeeAZxjz>-_S4*d@ngTmPV!4aQbU6@58t9c2aSs1o~c!n-~ zB3tXQD0cJbp@B`P9xi&J+EA(l8~!cq+oWIxfMx->aZp;N0S>O8c; zsA|~E;vkhZ44ukb`yf$y1_;S%rl+{;FspJjr!A=3I2fa64lk7EkYz9 z&eV^~@i8|E5L9|yjwz9lIdfDJ=+m5m#a-o*+1sTEHq@=qw^Wil| zB#CU1W!%i|ud(|bQNZN(toY;=XBQBj8D30s1Li;XcP>d14^&YswyOa;m#%ar zCHqq1pzZcQB8y#fBuV~By1`eYZNS1=A|E!n&`LTuh@te`m-&wS@ngp<#a4aku1U3g z7TvT}p$Rp0n{nug0bShpM%pz-ap`qGVT8*9`b6F-r4h)K3gS8mo~E~q48PT`;4G)g zc{4AwNJ9Bps}zAQ5`A7{KBR=OzWzAEcrfpmf38dgxcI;Sk z4rTf*;Wf(I5HsB;)8Kn-iOj&BZdlN z8T_egih-8!!;0U&)!$;+KL!baWITJ12xgsMTuUuZY0#OSSd0dUWy^w@vxcv`5cY`; znQotW8L#EC*=6C10GsML=E0@t0eRNHPII+0 z_rR#~8bwI|B{=G*MH%Y$gI(Z5@?Ng+oQ{(Vwr~LX{>B6Qx#2fo8_?rZb%yU=D&TX3 zl`%uFqJ0GRx8h-ep3-av_#BZw;MLBpKtGU_sq6>lm21W)O~(ri<+)K5L>Wg-6Q%O7 zy)v{h%FR4cp9q&CHt6DF82+HD?^+;XxCrERzirEcm)K0H+lr z`J+0i^Ie!L{6XO|ox0}Kaqb`Y4GrFyQgbZfdFWG~<51GyUp8J9RikRS12?MU8Wdf@ zPE6(zZmhSzG1RRQv(J=VJ%tiXXrzDKr@eA!-AKn#@J;Gy)QFjY#}}h7C`E)Kk86;^ zW&eEp!_ODvAD!+g!s?T6EyI%sB}T{CR!pBQMS9y2Ehm#7-H71aM@N%wyVE;U6@+ zX2j=#uUol)Y50<| z3aMyX3q0%FuC}$-V;1jm|6W59 zlo4|+eAH?SN<(Peu_t^a9-fUDm8WOw+Y5&2F_x@K+zvd zoDFSIv0uFpqLZN>8i87dvKBG=V+$$`;km_fz2`wWhSdK0RX}|oBr%s)uVn{Gr zYh5ZNy_^gE5J_E>c0d{kNuTFC&_03jhj<2*Z@_ruE!1PL*DusTHy>*lB%R>#`M#&qV+_MBl}&@jogEBe3}!xg4u+%%fVOyftDbwx>F}*?qmyqj%ogN zrOus^i}8+|m;*K1(0s&nvUz41q*_=@pDjhXplqH~^K_TZ?s9ibeRHsK-ke$6EdfX; z#!rkGIVk({flz!;S&-{xLv7p|LPT6YP^q0N!#g5&^flEqzlB`FyCP{?bpbgAaro0j zczQ(AKQEkhl_Sl^l6JkBHj60u9#&&pMsf2A^L9`Qf4PdQwt~WnLxoCeZQd`=;M+oj zDC6}ztUYbQis)!qj(Lez)@JR*3VI^YVnmGtyX*FArKNs5fp(h^xJXHfyBQ@P{WegCr)|yuRo;=;350U#)zvtk1qwo!0H}7DPAyi<=%` zj`QvGmZ?v{EQ6k6?p3AaH#BV_=vO7Gmd0`2K3NqsP2W7Wy_(aWbn69r`9i#6BmEPA z7+U|RQ^?bW^G$5Jzi)tIm9RzTx39`6x%+-}2^?7BfZef>PDs#P&OL`UGp}`ZCSDD} zbTDwAZuZIJq;{sRH;Ham+la^T1Ue)#Rg(N0+J*148I0r`qklLcGhK*Go*gVsTgJJ%wbR*93CqB;DG*^;oK6m*Y>b`cDwvnZa zda3X7j8U0h_HPe)V7jTKYy_jNR6?m|^O}t;5Cr&Qu_%9cwU?>oR%lGlfJ)72@Ak5W zHI)fS931p)-OgtJD*`y}=O}km0vOZJUxCYa9+wBrCc*`;YbONZ2&;rXzLF|Qk>wy-CascDemsH zxCM7BPH}g4cel3xJm;K`?}ztOvdQk9*?VSZXJ)VK7sz4&d(Ua`@0B>GNtPxPfgczU zA$Duha>2^uP6=~po?;0bOvTDdjYcCBfFqeqjN@O@Is0g*#n*iQ({J8v4&n|>{{VfW zikOQ$ks2Rt4BPU(m|t_DiB1OlrEv4{&Z2_-?EP-XRy_zFliHck2rP>1Wg&K&AR~ZH zbp2&C%IZq#9DrqZh{Ti`m=+aXc}(u#s9}e3SXvQE+yEgi#MU8odhHR9w>6CdH*I^! zocl7bDA+!zWJ7QtuVwjLCDTzuJ z{2>moqu&HyQqp9P?N1kZMsO{Y$kyZcCBM=go`%~Gfj87<=p|;eUs^}UT&%53l3MsW z4^OVkK4r#DSB+~mjb~kRqad+R_*JZ7A=Ah?f#dr2ji)DFY_l2NOhB@63Oo_H9b5p1 zchsuE>IKxGlG2KMk^8L}<7=fWO-y1qjs|DtNgHXV96}V4?xP&^7G) zd+0xT<-KDq9#G2jany23unaNAE)Ht9bs0^sND|lj-dBGanr*ye9D>gbjVrst{l(#AgZg`nK z6rl&1m;BK;Yn0xkX`dm#sS&jFnViKU`jn8_yP5frwkJcnG`PVSp}jxUe03o3{FBV$ zbZL+hBy96nrQY*!%fMN^u))8(b#nN+;CF{8E}c;!z{n;qj!DWp*tqy;!3!Do>;=!K zyK<{CT_j`4(GgSb<;6qJA;IHOq}96{>{rLUIkc*x2R3c70huF^?vJ|x{jjV;>-JCl zV&laOd}6`lLd7x{56+;+kcNIO&7ZsjS(Kmi^ zaDDKK={D0BFoxH~?vYj)L2M}vF&7o-%k|`!G=s_zDiQyTMOeI>1T6=$9};4IPGQvS zxt&$7^Vs#{{-zN3#GGttORB^-c#HINjONoL!eOFGUjuvFw?Xb$PZW{i_V3d)Q3aw6 z%oK)o0YO+tUBy(=yQaC7&cbUf`#Bfw;1hkeI*NN(n2g#*xPU&(TWOr$U(QQh@)*Fq z%f3wv;4gt-lhUWu0ICG>Gs*Gax0dPWiYDUq$=hY5;L8;}2?f46M|TnZ?BERK zxxXdy-*^)Rn7{RKDQ8i;PcjVf_0s_xsmWhk(@OtZii=O$&$a~4-?@-@r7s!pnYKNv zER5L{zkk+2lqOSeIa?eQoU>4;N1QgdYB^ICrS<5+5o9u}bKXEge>Oq|`K@?h{M>A? zPj&Zo9)Q;1tgU}u?FP37{DcmUA1S2$x&d0Ho+{KYI?y{pr*36_Z! z!y6ssD67v`{Yb+-jb!Or^$uHsq$uN}8tXQ?i=u*Ug|?wcmg875xr<0LV5a28W!HnB zDwGl`z+MYAX;*XP(y*p_U$;q9&-d>Fjjxhm=JD;)cRo^(%oN#O%HMJA^gQ5&P#eBQn@xY}lwb zDWZ#`j67Hnl25*;iFseumG1qH(2Tm!_o~$!k^HUX<`>s*ZPL1R<8yZk`l;K zI}~=ordb|CgL6GcWHMJ*$4EN~tz1Vcmcg@)d&{pqIfpZw*Z;z{DhKc}`mb|F(i2x3 z%W+`;ku(X50sJ^czOeQYS1IG+U&J4AwbPnseu^=9>XI2n`a|6HQ-J@AM-(1#$O;(> zflx7`yr7&C!+46xDdGmJOd)B|gCK7bL>I&O&^1Rhgz~j!n9uA6?vP(MvMK0{%2jX_EQ0z#pmx-MZ10NzJ0fSH_t)6yXl}@_YaWg z$&6aDh$|#WY5MQ^8+^c7vMkKr-Yhz4VsWb1qD5x$-R!U@s#>~;E)ehSILXOGNGlx% z(Mvy0z_OmAGF)ach{x)%v`#qgTlkONnFwrShi@Q`TXjw=IqxV%uwnuyzcuo1s}wl0 zx-M$CU@X%BmX@0~qlqpF$X#`n?;;0^&lE&;&};BhFo2~YO|)6WS^TVU8u>&Twq2B$ za~$2tqj=yYbb(PntIC_u7!jKzO8oem>gRWHRUa zVlJboAQW{CJ{e-%#CB{D%8U`Y3>KS4gw6dd_KsfN2?;Gq6T(Veg8}VsASPb5e!Mq zbCdJ#=1TOcAfqqi&Zx*Ef`}K@I$Jd@YH!6fNlT2hwU4C=zWH|3=%)4djoVu!0||l< zJIv;yz4&uG{?*nFr0uOyZ&}msQXW+ zy?jw=Ehf!*X{~3&%?Zv2(uloYKg;)@)v%UT+_CQ$jFZ0ITm|`mu0u#8GW6I^>69xs z=_w^r$c_oXso@{t?TyR>K}$7epX`<0N+eR2O|9Z!!c0U15W zRB+YElGH!7ozh+wXX^2Ks*L+&0CvatX_^k4jg+zr^2M_(PDo_>!pwhChc{89@=$1! z+Hx+iwv)%a_~n(6O!^^Gaur_gPG~Xjv9Tx=z{fiWo#bD*RfHk7}_*u z@VU}R_L|&jNaB<-nU9R1Gh2q16=eH!;(W~s>M}1Tkag|H!kEVx$I|1Z&n2QQP;>kV z+291i+sB+SmA8i7@DV%VEbUkns|c%&G{lj#Ibkbhd4o5J-sSL!ZatpGydp#%PQY9~tpmV8UW{EidM6*L7l-X>AHeYSKg;V&Vl| zpJ!490E2pjo z%5Ug*nJ8!=c`GDmd2&zLA0I~%@F{GpC>*P2EoWSEivSoxA>&Rk%H#sxy$RLxa#1iB zztX`t@cOc)z{#k^2abe^Clk9iwq||6nw2SaM{!O#$dRsHB^qLwlWM?{;5$3yvx z@>GbD^qZzstpXiF4aQVdmc5X_=Zp1}eApU6K)o=aiHWA`&l5%YJeQ}e1AfDZapZSL zDUAAc4IkaCJO#vTLp}IUO&CtT3FxakI{d~;1&)HdACSG zA8Q-VdS*imb9KBX`)0cwF95akFX>s0X@sz2>STJv2x$d_qv2JK8fK=y)16uiuES9F zv20LHxoR8bg#IY!rVtlO0vf zL?IFGt{C=Ijx6o*Y0hlbi@B}Dt#@%^S~9E)q$JXDRlJYduq)sZAZ2gmX8U|$QWre8g(D37O(;KdOZuo)}wgG|6HTo!XAyOU_AtMq2c zlpV-eke)A$XvCb@*&l!7 z6~UO?y3U`Gsm}t*YUOo}N;rHCxStXPTWNI2%7>H=Iar5Ag=~`&&c@}@b=yjLfYvgk z!#Uidk=&Fz(3AjfgsKxXyk->JT1{63^IdW}Y3ipCCvGcD2!-Wi-E|iee^aA~oJ}L3 zs`llpEmC{ny=QpY#>yUoONO3HX7s5&kYZGEok-eIGW#ng{|!#=TvTzvleKBoM2`zv zg$K1(uV$z-CwJP{PSe#$2^xouORDPb$vHqQG29RUXGruM+~CE0UV_ZRVAcP^E3l`W z?B$9_jGGl0VZ9v(m}!tDa*>z(cJD3<&*_ejmEizqeP0_$I56ZqSW&Z35fz zB8NuMwPs`##5jyi6_AcT2yyLnyd)BqpNTa;@&w^L$Jn~v0K8TTK97+%fkw!RQXAG1 z1xbDy`sNSPcyRDL0+ECfBV{aXlP;j4q}RurdMg34RkbV5F11O5OVK|FDgc%Y+t)eL zhgfLt-okvd8lNyN#q6OiwC$BJt=3~&?&B-s7C<;$f7#nvkSme!uj4t_nCyV_@6OIJX9#S<{|j^-RTByJK~ zb5v1L)g-_#GKHW&sH*n&L!RBx8tG5b6b_^8j!O;e9N<8HZ&wbE_);osV?KDYzM}k|itT&Mdtu3|bA&XR; zvD`yce~iqK^Z#(OEi&yNcg|26q9)T{iJNEkKbn|ksW z?ZNfy1NyV`uW+^dPZr;UCo67p;i4}NJfo~1Ek?%O!bYo6f%VSt%0S!t7t$^)5Z4%+ z_e8tRrK!1D7^*6u`0g8fR}}siY&}N`xyab&Q_eM z&s7TbGJQvs$!M?#`Kgj7Z#ZzL^>OK`IXL7`_d<| zp0`oW?oo;M&p}dAtUyyxev0uYt_tJI6{MpO1cL3$h-QuH;#tqbfZE!syGr61FK%Oua|URNTTa zqBXwqo;<1$eRD51SX}V=4mZ$Iu9iMC97LU7eMY&CeVZxKeTO*Gw81H7P1-65CLUVs=d7Y?eo03;d13V+3{m@C&;I}vp*wb1M{i}wPS0e*K_U9`J`~oB}Xi* zm-YbxX**6Kl}zmCP{`V!L$%@M+)tNOYd+nrAr$L>w&OgXHlMtEXmm!&m=tRaoE0!% zkG!RPLh=p}+&>}U(-KCguxMB~VE1amA2}i{8fU6iUk$3}-Z95k!Rn%n%plk&nMVk! zfsWjYo>DouZ}n|L6UdL^;LhYfk8D!OsxQhmFWD6DYjXXP^OrnQv9gY&_IXT2fxLI< z3)xUaaCnn9M;$IdUlqH$L@jwe(t&ai985>@v^khgQW*5ANziC$o7KV^W|4;L{zY#D zl`u_jVmA|8Ngu?uH}oBrezX{@-rGnVsdm_{`_6!0*&--5Nvww(4IRfx>j}^li9k_X zR}O3^%ZG@{u{Pr3y!wZ@qo{7~_aI63I!mZ&fR{(`Y zuep8#hGYE7E(5H%*O9-f_{=|VfHQv_+QJ!zevnP^wt8zzySR00bq(^BNd_P6jvuY( zq()30J@=flk`*FSdnaA9u`|C#-L4R?P3tI?=x?C_+a0SzZ6{IIR2FQZ!yFCet2dQj za-Vh2i8C#xP=H#>(d);)>MzcwQ|yK*NIEm!M@Nm}J|A^!LiR;{Zlhq^Q?n$h z#;DrrkKz!ig#rPk(sf-65bl_4>>H>qNK6!Fuv$Bfnu^fDmLUX+b z$r}nBG3M_P^g)lGhXr4R+xv9QsG6d=k)p|k@1S#%dYySup~bg>pZoSCe)zIHVx@}h z2eM)!b8fLvhvfJr@o*J+raAgQp!0D5ON}i$UxP_+{%VGlen#VprMQ zrWnB)&Bp?F_Dpi11jdW|ys#ZyG(VskRqMJ55|f|wa*C{P)y?LI z_sA1sYM?+w|MH;leU4p~APNSLXQzht1(*h#I@CMe$Lh zFGjR(-8XyV?}_(6V_jr61x1581OPHrLhv`^^fAZ8koFU6a)TOFkFy6kYCEvF#KE z?7rifV7?$Z6|}a*k6d?=6^|!+%BTKCCsyuU7<$F~MyJhj`(*boszBVv#Wi&Nm~ASa z`sB<@dZ^^%Oax!TRLXZ<&M1U2eGN8y3-tsb?+@m^i7OAoZ^)?`v5Tou z2tDz+bQ{J*)6D2};uX7WMnH`}qfME+CA=0B;)iE`e)-ej4c4gBy3mDAUM%dg66;B3 z@ZB|OSFF1sn@}ZSzy7(t*&)sr(JeLw@9WZ(wdBIhwr;}YZ4NzuEhD6>_bI6_*RDQF z<9o~kKC$R9@3gh?jAh6{c-{JdB~;E3Z6Uhwx#{mWOQ4RkPz<#$@3@DkX7~Z5EZ8Wq zj%nFdc2<8OU{OCYq$LPN)D3S0eQ&Chw@MDCCce7$wM&?ONH&?5 zfVi?n{X$>%%vUxUoj|NzJ@;%<|HKu&9PrvQq%?Hc7raBh#s0!esCliQ}V9m+nWsfpyyUK7!&P3nMK&F5P^pczP9q zIPo9!7R=LiHpTUC+G_10fDsTz|0(<~nq``@Na>$V)WDCj2IMSBR(ksEji8a`RQ=rL zDgT9E%TwgKuS-j`)QnR_=9bQE-FjSnh}hviG8a65bO$k=mImXxW&*6Iq6FY5924tU zb-8rSpo6>t;=IQG+oQoedJE*kPz~{QC|@#$ndQ-=!Fd=YRiYbyYNf$5dknGstr=(T z#l%Hmp67VwZ9D=()A7-b;>Ow}xlKF=X$CHqN)J_dw=c6?wD0^Yk$aiZ4=$c9KV7KU zt>ZZvG~zufrTS4r6e>H^d^Q?KUt#?K8`NSy82$uklY|W9LYLw$|0+`Qw(TXkr8B@&gnrH z^ent|R zN_QMnRBHtnQ0mY){i8qit90qIUUX`VaV4rI^XWaEh*0Auw>R+Ny9qlO4%ds>ZJPi< z9Blxq3r?(cLswo+h_Uib5(T6Rlr9aYLwlX97YKwleRRD_-&uQ$l#77Z_7D8S9#uEv zU#Hq*)2_7ROG46&(vF9@XwmSP*wZ*pC#K1tYn0UhV$_*7=-5r0Rf=&8+Kp)B>n@JC z=bEfJ+DX^JnyZ+wD?sRGYK`>VLnz*PNr@P(hvlR2hb>*}#P;vt!X4s(65{8KL=Osvhfjt4|pjwxVz zFUzGPk3}b&zICx)b-QZXQF_#Wu5DE1GJoZ5aV78YyHSG@<8bq$>(-HS}{hh95U`GRJ|s;}D_ z&n-hZ%R{!cTm*v54bIKP_Q281>(sm4x|%ZH+{w0+-)4RFNjuoziEgly64|<07&YhU z;*h)?&01q?EAqchMTxFdRpvWvBE)6URP&#O5&-u6W1)}R$X<6HJfU|Wa6)v8w4Nu0 z9FY`PbH7yb`^3rTSgD`;EyukW%JuO5lLrJlv_o7vO`;Ei-S^)_$>rkmc;gePEFaz4 zy%I?W{K%cC_=oM({%VBBO3E}-AnTN74#}po)~1xNn%LE{BmhCi$MZ+@`r(Gw4zQsI zAvR(afn+1zO&T4-I0VX`zY5h#!E-gA*&n%O86&@Bze~-^2gL!-%n!8vM%2PttGZAXQ~_L$EaO>El4Qnv3d16y58Z>qeo^fJhxMk*{V&+A zpvTmyXEtV%Ew%e}rxXmwUPT2~C{RO{<@zWFaEQFz?x! zXM-uxCbKUd!>X|g_9-BL2O*`FU;b_$Le$S&Cns%ejIA5AZ`%X`jxMuSy3M$RI;a|p z`5XF*88?5z@_1WH{g(=p^@z9>G_cRFZ6x5AhmS5DD{s-tRc3;U4(d#f4xR7PVHB== zZY{ZSVp*gf2b%GY8n`UZV)!F$WUQ|sGq%wZ9S@WzB=0JuA0Aa~2^sKcZ_vc9{9+2% z5m&H2f&kTV{VS?let83Dh8Z0&*ssK0#C~k@#%N}0N_Pt#@u@z*2PIwqjoU3P^>wU7 z?41RWN_h@)Nx|S?Fl;j2sy>H&nmU zyXbfhtNmn7v=VeVi6S|?O<^M*68caG43N+pu+;XrnnKC0srXao#}Q@oCr4sYC$dNo zfMbpa6cCKmlO9pjanT(EEQ)Rvnf-nWIEE!dE&RgTQ}=NRV9{11o0PVlCGw#r6#DuK zfvFix@^>vwSqxEBc4ci6i(o+SNnOMAeQ3IwUGERN=gn*Bji9Qf7F! zY;x4kbasxBZeNeeZh{}r&I3^l?-_#yF$JoUKdjbs)np{8RL~F@_H?>N$Carxse+4t z*KwEz{38DMqoiPr4Jh{Y5HH?oz{@!Z?sX{clS|ILu--j}({84q~&+|X> z-n+bkGpMBYPyO-(!S!w{Uocd@4BttWpt_yM)$)}PSO?#^JqT6zB00i5_1T~{r~Cbf zG2L3ef$D&lZ~iAF5UL>-?e0g1>U7OE{wHVyRyq_-3i{dtn*TqhDQ+uax?vA0?8^TN zc)!s%e!$VH zO!lqvYGVzXzVIh)6|Rom)-`g5^Z8i+zuM+F`sQF^lXWEL!2>>5Kwz_(VP*@I)p{RG z?z8JY7q`_`;A2~eo1Xpnj=}E<{iOZs`k(N((*HhLH%xa@`CC|aV!mW3RSkVM`hUfg z_`iIBBcU``c4ye9U3b)0LNTaE*xa;2kzo9Ll7%!V66R9EdJy&0brqh zmTgVD+2BHyWwlk|{oB0?)j|`&RLYK|Jn4#nYX8PlCoL`IjfT%h&k3m2zqeFuUT~)77rl%fXE`IGhy0l4`Wh!h31nv?4eA3Z+xs!&N;K)l`n3GGY}NS_PdZnK+bD9gYs|0}1wb%73go+$$!YjOg3Y ziBk&FG00S508-*Q<9H}@@{B z;ms{4Ohn_7g^m^8j5SmPtzB1v1*S}Cme@!OR88l#FafK@1}e{f?SbM%=Al1F&)WCX zO0lPczDdrAje(q!2#lZHUa$eT&{v^ZeeWS_M#STQe-5ivBGSavh=<@?rJ@gBU?x0S zSybS{Qt}$j6Vb}Qg|ZDoVM^3j2o&wDbXd{@ob|b43+i=hXQx<#*#SOy1oY6VMud<& zw}lty*Omph{82T~m%ToyXooXo|3(PXxo%i@#sKQ{8ZIH#j&4WC&ya&yLY@#*PivR} zK8Vemrqo(W=mh3DR%Ry-UAk3bCwr=15DXwSc7q`w|UM|P3&x}1;NkYNdaMEox z_fTyQLf)>P7hdyb53RCku31BY0|pv?$1E`cxGC(w>A#YYFx2|@qxgB_5nQFLZPtF^ zMudQ*bEi#RFi>lU_&;sLHKi4Cr1lCnUN0MAz?2J}nNk@CeQ-xs>g;FPiNf3rBdh*r?ivg`&q{uyZfjr% zukD8~JeWa)@PDHu?-wN_8S*e0whc{_1e9VnDJe2OQh5v*Dc`_wxG1QmNd`=1;rEY! zIZNS`P>)APYg6j4x?le4Fe{v*hY_|WM`)+e4t(MC3mUvb;j@-dI1BM1_nlq5;g zFWH$!pP1Ld_(A!8=y>O(hviO#cCEJ1(XdeFDx5o>RT=iL?Q*@1V$snh^At`OXi&?t ztM0=K+?4S1qq*V~=8-`_8eQkzjD*uvQoX<5B`%FYn9nTT6@J<{?STB#cq6LO58nUT z$~=P!;@&CNQcvf#@c|O8Wi$#K$LU{X$CbGL>$)J@b?*J1>a>F3=?#J z`i_kbrfYiP;C#SI;huEVuTyXX>E z5=&m`R;zdE{MH|$=tex68DW;c4(bHOtxGBeXL8VY&b5Y&BM|7VPzxYJ8RCr50gAWy zv=d!)FH!oZVy5^ncDO@h((7Hv^l}-93#`>q0*)&2Pi@&e%|Vlcx_YwTGi_zET0{uG z4#?Vx|ByD|6_^@pG4qpQA2~U_i9mqx>Hj;faQp-rvqLdahMwfW#*z%XvqXl$7+mtg z*xW~5qO#2*#nQ#Kw1t4ezM+-8ui}9*y_7@*KTU#(+;sfl%_m$pM4R zTvBKQPuc>c>Whx#3o|e75MXf?D_Qg_MC8UdY#17CMrw%UF7!41#xP630mO3S+l4o4 zCSjik*Ig>tvAakFOw6U7QUo!{o`8z)+04}*fgM6lG$F33ay-JOw|m%U0N_ac7oN3^ zhJ8J>)D~bl3>l?LLrcx?(TjM_SAC+!(%j$2(72gsd-_8+P7`TC&pt-<{tvDak?2hT z<$KM42VpHT;}TZi`kfqD*ffM3rl~~5{aJbm=VAUH$0CT}$(w<`Qi@`jg~H5wJn?LC z7>N_ncGC23W|#tJIs#mw=M|2d?WB#uQQrHYF=G;DW|CCRLSdX7tuA{j8}*scP~hB_12do)+YxBXV0% zXZ=eA_M}|@ZZ$Kel_1A@Vol#TDUZZm{H#JKH!FNSfRL#Fq44C@9IH7pFR9c07&h(M}el8okq1wKO zi&V{f=+`KCl1(6}-JI{*HksE*=hee%{gTJ5ED9a^KX%vF@L&_Cid&u3Y$^_E?mwb% z)()*r95;DGUa1GA1C3kcfJkpSXF0@O~RpFtYX0Q_*l9O>hNX| zQBFqd)TQ{2%>G~vsgOvMAbOw!#FbUt+%t`+m_3YYSE!3Qw8WmblH2$sz*Y8UpsQb! z_;L06vV6bn@ntVE?ObR0H$EVU9NlYYq0h`~F{WLfO2embU)-J=x$kvh@Sy zL$jukIXSkY%@EFv4+SQh6qH1|R~82qc}L!j5INk6DJcAiFO0X1J~S97$wEcFm>0S5ARxplAo2G|_O!bU}n8MOOOxPxBSz;=B$bXdRwI(OL;R*egG_%>=R9;U;zI_F!L+|jQ(v= zQYg-V*@v4VHKkRQssalut827SvjNVSnF>%a@4OdRaYYhV)wpACDj9it{~2y-8iYAU z{B|JAYUJ3!^7b^N?`I}1y7*ck%}OZ*WS;rt`O!R!@FYG#+dCsvdkFoZ_@q=zngBq^;LU*0I?!J%b0?0eh}h$Phuj* zV4_`LXsT9LF`p}C#99E4HzRYDiOHz@Bs(Hv!nV_hTw5AYI-j0 z_L!vT`ks`-%pD<_VTS(Q+X+twAozA)^W4&fwZrwV#wDb_Sv%K58y;88rPx+Hc9W)v zyHHfP;^6Ja8>K$6=g*p#8^QVOrth^2N%e}Szp}&Vi`VBGr2Acl3PU&<oxX|;2E8bU~G-z!+tHI-bUYH zA7AiSWJXYfloQ>3!s%~BDN{v^p?FaBHw>hK3G=dijvj0tjyNX3md*+P4AkddfvUv$ zi!-!57(T#yJV}Y!ZEg}{j$b@3neODfSTOzHDH6BUN9nYS>z)sQI2(OUPIwki23M%- z8kq^io|bxpQ&8(JVFpXtrRpw?tr=px-W&GA4i?*Qh#m72>2);Ykj+)es`u9&0cA54 zG@$~Qu$$TsXRLJcW?wlQBi_($Nox#P<~xwT%D#pEAgxB>I0hY3xt85@~h35*diCD+BTLr z1exmAm|J`aV7h<#asp_CFv(;?XEln6K7PaK@tat|jTvh^p z16rltFNZvs`o1B5tNoLI_^}!xJ0vEFF&}mkL^59s@ir2EBud!+#h1t9lv)4L0R-^7 zX9vg$tYYz+M=Uj$s>@rT-#}PP=A#y(<){h9=a>sq>BAJn*WS5j@lRzU(0kXp?Wo6+ z&@#1N{rGH8UDAm9Iu`9PKE25;(ZKuMiSmQhJSJY`(5P7=3}c&FIX!Fp=cA zbn!T;k#fq2pf&iqf~mufm+;|Z^$sNx!Q2+wdZCv_QzRR_K2aEi-(_)Lc|qn8o)4oWf7m#x{sD=YNV{Z5k*it?{V6w|F?5$(PK8YG!%?9 zRMv|lZ5i6dL6sQr12sR}7JdtLX*63 zwp8fvY3!Ew8jiGsF1tBWHI<|mQK1Pj=Rm*%wmMORC7md92zfAlSoL^X?R(5Z^pgm; zTvt7W8c5HMrq1u^sOyOjGDmV4>gb7}o;}33-~l6hrIcx1f~4o3=ViBXg76Y`C06Ar3dQjmYCIwE=$tCnlfxmH_ zmZjADVef%bXrfr$Y0&g#2%F)v7vkecxBUrH$cvyJ^(c9vi(R8Mk9Z7pNmlnPB6R1r zXaq-6H_;2xBz{p1m)NO*R;^~tU{-t%vgoWQCu8*~w5TYqKtFTzo9m;LbjM!k=8JBQ zi?_FBSikE5v;h>A(0>}hx|f{YTl3Jm0?R8jiDww_$>Hhu93rKIcVv^zlOniFOdssi*1)g!Dn3wo=I}B2s6{a za8St8E|%R1MHiq7W80^Xep#-DpoA4NkO(ODO)5|mI+Vdl=jq9X8`KtP0!0cq1g!!S z)7MA_yhK%L#!NX z!f_mBXiOA{q+T9cT?0vT`A4&_1lO@@)a5lZ%jMHq5+FP@(}yi;1I;5|bC`$|+wJty zhzzvq)PLUU%MLb!2NGs=p!C$R2r`{MD5-l9OD%aynI_C6MM;r|y?LUrLCM`WKVGt5 zqLAgtqzYJi9u|dvfuREgMq~2{ZcXqjqiJ~VM|WdOGo`rZrmaEG4<_w5pOOCF1>4fd zZ(F^vl~3$6e+htIaNfS)KwLj3b}0NL>Mz<0@f0x%jqn~(NUocfPH({H)mpNZqlliE zZv9j1yoH&dh5;<~Y~BK$xR$a~!RoI^rw03&)ia~~w07EYMlr4ML^#(4dS376l}U6D zqmB-&>Vd^tcdwmewG%R$A(1OuZx*a@6HPrASxuIA6Z0??Z2g(zI1}pN8_sr@FwSdS zNS7)?DA^|7lVdNEKx}WL9(_LnuYj0S#K*Xp_Mg;=wYTsiM(4E*Mga&aawCM+*Ara& zQC8EzW@ziA4oYsT^CnnEpY#@j%X6uaZP4{DUm|sxN;kgUXpT_5@sMH~VQJ+?$r5g- zXAEs{??)E&oQ;w zj+(~IwEu~qj@cZ(HQ)%;4vgVLKqC6Le8joTXQVSxUA4P5|6zBdDDU!SQ~{@HpN5K1 z%5?vSOC#K%UG<0*_m3iAZ`S z2hJEGlxnz5q|1J>Mk%-DdX)O{k&;>HZbE_H9RGFCbqz}RADU7f6nnkt}(d^dVYF%^uY#kHD5^D5?dz(=*;Oysz5{1-?1Z*d`AlrzfLMJqm`py)l zG^eG;SXC*B-qHj?4ts}kalp1g7;<2rsP`!ObuKM4OY=g~R}CVQ85p^5vY9HHbvg+2 zD$EofI4`l)D4CxOCsW*Z?^!x@`ZXydKG7<;PGCQq?Xo(V(6r$NtfJ*Yr!C|MA!}Z4 zS;uk>=_=x_^DeSk3wZ}sGd4xyalZ(GD8-MGhQPl;XiDVfZHVw2jI268@Gpscz#}32 z3(h?JQ%N=bTnfW$>cmD7?V`Y!;L7dBGZqz%FE_8%m@lnEOGP}rqi)DX<3;qJ16@Cb zOHttpFCeBN4;Vm9r8D`W()0@=CLSAcl_3T$cu6W;E%24iB$P8sFuG}iMn!*M-=(+> z^)NWBu(d`z;N(YIuYSRy8B%RTT~~a{W>MeGJnrUHCNKNbXHhV1hr~eQFJ(y@gqKkf z5%C6$q+_8;Rqz7f4uSh-fF9XS7Ph#AjRt>4ZK$oIa97BQ@{k9q*kTjv=wpQj+Ku

;J3(q|bykyGE4?PVyMSSUg&GKp(Hn^*@>7<f+~UIp3gx&7Tb@-57oE-#C1Y3 z1Q=l84hX_xgk*#G@YqIba@x6CLlOyc$ms!@LwlKc=o|Wo(vh8Kr5!JnF1Dxfo^GRa zG2ci{l;rA{j(8_@G#^NNWy?~X-)@aLlN44qCpf|GAr1d*89=%BYYZiw_8$>XLc5l| z@dfNG*y=kN=NX?WiS~nRLyVrfHju0y>YfZnBG)3|5dJgPI>4Jyoh9T-}V4y2uqk3>=4UVn;gL!myU2Nss&wvyS#XNz55IEUtYj{QizvZQ+4XgVBYGC1e?F~zv@R#*~1t&<&OV>-! zhy=0sCUS_BsMN}IKcmn4SxM<1>u{T;IP7rj@SbfqtRp2%*aK^NLP0xr&O=|C+hVJk zN@kAE94JDnS1^8puf9-SQ|KfHjg~MoaRyv)LEEf)iHC?bdRM181`t82qInrDt4gHc%5*0dR?EE-NVTlA%?KMk3$~)P9pf9SQfE50eGI2Z zMX+5tkb8Z6oSPq4L`XT?Wa=%O<3O-->G=hMLAbJ@dE(mQhGu0Q`+6pQ#_94(-we}b zWus{cVNH3j@jWo3=B%4?vlc=wiGB)Jwp)}E2&hz0eW!(V0?}n(g4oFAPg1yLxb{qhNEY1D`WCz2_2p zJR!XY+cw~`2I?lmR}}Md&A`P;_&iXOMmy#3K%hr>WmBR@c!|}iFD~=Q?^h`zCgFpg zwX&GSW+K1-9o>b4!q1kRE9|L2hN_62#cBccm1Ny4ZiZv)G|94sg2XXT5cif zLp?fp+bUV()YtTNmX!h`|B;r|-1{Dt5=MTEcn`~cTEKX;Q@(y|OM55@}34;)Nk?vFnA!H63+YB{SqiwMMf_?)Hdyt&i5=dkFb}BL< zEi}6J3bb)Feg7SCI-*}|J9yyS80OwYRfn8G9?H-L^t)eG)VnoNO+*A3kO)n%6V;o{ zwd--<3s{xuAK`V5@s%!`dmSr>O{Ni}lb^_o7g zIj#rSvOlo3q7~h3J4N0f5{lmtJ~jM#9kKoS?6%iNrTu1=t5}M*;{Eh=d{CeP{HO}p zIlEtgGFX&#l>UC*Oo#29h!8c0(Q6}4zNBCC)b z?e>5AaUOb(^gLbit`Pf%N{i%OwW;L^cg=5T@ugqBHWhb8=)7_|nCg*X*S(1QsE0Kc z)`&9I{oEkFce0x3c&+)1^&3GSWz)U;^LxfhkYYU(v|{GkcQF{=lcrpXrp!06>|P~s7GWua;iNm$|$1vB`@K= zG5`)WcF}hM@cF-z%D+J*C(9|7L<~Tj6Cg725fJBOJcWl2%?n7+%T6^7>|Np!=j8~1 zMnK{LfY;t7=c%ZL#}gyFu6JubU5@rZltdp#)rH8vaeeGmDWm?z1=OM(|MK60{97V9 zpdfhvmZ${)Ux6Swas6UJpp*bqXcI2x`m4KK0AI0MA5d;Ni1*(2*VhORP(1CHCbL0vPg#FFy~&21;KxW^y|_3LmZ7>YMx(=_~>^n z!|So-g(pxcu5ZCB!EonzSd~H}+>Bl2Zu&HrD&eKk-dp>0dsJzt7zu*OTCUN}#5XF7 z>9z}p9@^zn?O{EjwY0d|92+=F6HfEGwSR{7dqJ*#sC4v(SIZc>YO8<+>c(`@7T4!H zYwEi?;u>CrP);RdI;*`YNJwf%N&{|a`MlZwgpPO@h%}8mX6$uKzrK@Wwkel zI)|~kEM-X@EBek=WTn}ab(Sv%9Gnu#2!R-kv3$t zvE?C}GCNk=Key(<*UVYn;%cdAnHj0k=Fi2_n~2(kN+g%sM#={nHpyjPNfl0%ik_m}L`$Le-p zAE_Md3jC?JxIyN0l~a`kFCjW0X6(!8Fh?r0C*;y<)_tfxR7${N`d6XFqNF=HJ7g1y z2y9t2aAfcYThWP4YcY|&ClL%`R<5*e(A$+P0IkFCg&j6lNGz8$KVZEe>KuDuLDXGh z)*wwj2pCelm5!Es55?B-wvTox%eWVE1kSMx zGOIxJNghO(-t8FBk$Rcm$6S1M|DXivrZdC1gr7gX^Hw8&ZF*3I=@xf;`vNH!0|nu^p3*c-eC zuD?JiJ>7;o7ejK91_QLof>v!c^Wi)N%&6zVA|IEQNf#z=7pwM9Yvk(CEi+B_N zg5Cd0Xn-JI5*MKxniD`md;~=Po85sI0Zm{CDu2oVd95o=h&yPB1W2qTOh!fKik`gsW)P1s&Obukt^MJ6gC;Kh*t5M zJCs6n?5WiFl=0-6-Dvp%+lm1|4!K4xTd8<17;9M)2P$*6m)O$(|IU?g?7$Ezz> zvZ{=%xoCT9Vs8&u_`2BR~e?l z`XwEj2`yk(Kw>W1Ch0Hw-n>q6{(MvV_g}NtTTM*(a=Dj&Jx-0YdzaN|$K6$DlZ6mH zedi9FLbVPhnm{_s5CXXwaIHK6gp?0O#@#nIRA)q3tQ|AT#OOuUgD_q~V-5Ah{w35! zYViGVV)~+zr;(X`C*9&9d$D3Aj&2cG5wHud^8IsKeZjCGB?qs)pFWp&E& zOc-JoSjsC)bC~7@`n9-_xHz#C7wV>u2_>=|A97)@jP=Gb7d6QI%`Q(``kat~ty9Jv zI%7u=WuF&vWCC#Su`IV?(JlN}fRiGk+FDg&|AcFVSma5xmFO*x!)DGF{;g8cNcF)k88x+`E!vGc~dYLZ-8dsawD~T0A;5c zqC(j$l3Hr}IhS6fJ=DcB{a5@m(sGokUPP29gBQwE+P+>t9De_ZA7!BbK&EH%#He7r z(Nw28dILBS-07dRYnqADQ+(;>Y{v-8{cn}L$!0%^-x{w}jKTS~TYM@}PY`ozPJ?0q_^~;dM zb!VFTO*hlSa0iutD#$tlTUV;ptAFUr(f#cFq5^NG74JDtMjRRYhW?n~Ukv7XS|`#w z;SfJAP0Jl4`aZ~RiK_ouj@gUxO7FxLKPTd%*7Zs5}B&s?OZ7@yye)P7o1XGUG|!%fgQ zMI?LzzLz@ngG6~Ub3IaETvq=_4{^O$F*m9U;knYmh4+m6b%@>ZfYKnn(+Wfcauz3d zZ>ue<^2fvO@^!%K_+jEzPtFokjhOU0{j7(bw^qO|Afr0$6u&s5uwQ~Nks_e$fBwek zsB`4hOM`p&Vedtc9LOsV$l5DAKB2hKEgbg4wK@V1dL!^BSlR1~+R4borgI5#k@i)X zVe$3bALaZPxnv8Nkk|Q>iTYS!5Vs7HXX1%vrjQXx;9PrmYXlqKH^+PBC&=YhNFB^?WGk_YC;hI1^_cm8tc02=WG$%sc9%GL;vZuz-&&;jMu{Z}6h&UNbcMfuZNCtbDY*#xPYgJ7 zl2`q2BjzM%@wSrq5myZd`DF`1{h3c3Dj=gH&^y;0($lX2?e(@Zx2xYCIrsK@_xBR# zW%eZat#zHJC9eL#Myx*-GeN$tB+;0}Mn)*sH z;*L&y4x}5uhJS+HY$^}}kdN8QMymtHt&~fcq6xKcDCZ$oao|pSGZk65cGMwCDEGS(vDtN)mK#H)UFy#WrW6yMV4ouP7Sx zHqxabE2o^*Y5;pft*dGZ*e@GR&H$J#f2|7QBOw5o6;=Ifzx*|1KL2aT$o`Myzcvm! zQoE??CHH^rnEx0IZ43V|qvEf<0Wbsqu{RuVb8?i-nU?L(71t{M0*7;m2ufu(cHF!rivKk3#YhV6F?t+2(d-Q&s;u|L!= zmc=H=r|*f^uugQZ8uK{&W@c}}Or<$d)7K-|kl{4={YS80$A(}>8~%DiHB}GBii4`G zMEgd4-j8Hn(Le(dE-ulBq!}|wCo(&;_os`K?UqzEKEpVy!;L+U5hfuSNu}vrk>477 z;?d+BEBLJamyvK?Qy;Dac0O`1EQIuW<&w=4q6c>whQ5bqQxPin?ynj8 zbYA?{v;a+TmT1}iQ#FMYeA-t?Lew`(ac%vkggJ{`-Y(|#+atO|!O@QLX~pNc%i_Q6 z(}~Lhh@&dv%SranqAmM4EfuWF{;6=NFda3tiTx)ZbO#yMTx55t`K6IYVc}^4+&bqf zJE^x?{kFvmf->KsAW2FAt9EK$I#_FG(k8V!BuoRQ^kkocE|~Q(LIz{Gs-)F!x2*@V z>{2eLQ5yjU&o0?2ef^rR;;)pwr050cFG_B|1Bc`KWO<*(p60^#pD%_v|0=Bzxh_YT zVNX$Dg&=odS>K*2skmy%O4_#hD*0+bAR~4u?OR{>-fi>JB1%Pfd47BVVy(CP)iy<3 z*sTbS=4lh)BXZZvc_oU65yH!1KqQ2C(P>S^rmP7>AP+n>y-%LCIkO{a?!;SUfc`LAn*Qr}Apu(hY;BORY z)~S1J19*sf6GD8Hs_^!(KW*&SdLv&vdh4+Xm2$}U{r0QVa}z56kPPRT+xF)JSS!X> zb_d(D$=^o+Fff zOYF68VGU(Ob-{+}3$v=`{Li`^U{(Yf%@^pS>k*i#>xQg2r&rR(Yf4lf@;~FjGVC{ogYFmZZ=k zNr-Lrll`D?dotpKGy@dn>+tBlgU16*s4MY>Z#E5g@a4N__Qo*-2rYvFrj-RSxAxh+ ze}Z{N%j?!___AX?_7dDBkf+0_otYCNc<{~iPxpbNc|XQSrPg@5(c+1<0%vRAI~wLY z&*wD?ad*rSSWfqrZ9b*6?)C5f*$l-8!r2Ftv^Tl>5P8UU_I*nNw?D1S2LKW4O85Vz zId9|;xq6oXQ;V`t1*TsAJ7w#0h6l5ap!`eUTCzx{PhSZ`-wOER~B-^&Xrm~kFytjiwq^_l$AW4 z@tp9ZtVrSg$HfdJ{d4cn$duMNt=sIxrc4$ z?le3vtZZ!w^^{)Bgv`GebMhQTI0PLwBCcNemF;8=_}~8Sh>~u+kFzUMM0?a1(vyjS zytNHeAEUut%#w^=nKm{8rL^DiL}nfF=x5B)xgRejj2%bbF!%)hx`*_egy%~>t|M}|ZRzPiW<%z&pt#<;qye;7uL8kjNKR9YMno;#Fs+E99h6r&a(0Jin zHHT|rp5SwiCvNg!lq(D2TAePwyEhcgY-br^Xh5l$9pH1!+J2_8=@EG!lvgUv+fg>Y zvPR$cWwG!1lbVXJfkGQ2Z`(K|z2KUEY6As5(ECtQYYybcIih#3)*{-o%+LM=F_KF} zx}avJzWlNO_A@)dMy`koTT8YZ@2M5nu=a(-$ZaPqA<~u94w8klWS$@l?F3j##I<=Y zk-0T_aO~C5Ob}7mHawD2f^XX#dZSVHEBNBVcm8_4YV+Z(h0>pv;Hz1wj<&+B?jHs% z;us?inj3ZTNk&Z2JmsxA6K1H!2T9!(Tu2RRf?i=l>e%vfC;aLoa_*4JUNd{7;(``ahYNUl2 zpdE%e;*m!`rY~(4(5s)NuzklOa2^UWEzaW<>B=D-nLBADtQ*(9Sn-=kFaiT?q0Mb^ zjBO4&LRhDHJeQEh%QCSj{O&AT_TtZAakTmUg3vVnIs>I0V(6Fogbzt@Yqd4$yO^L` zcU>rTjj%y2u6teABJbmjQ~1O;Ptr|s>ig`()<5=zG7xcUzuqcax2+M=p{)t3(}la& zPpRWpIuEi2jK6~*@|e)6T|xf?ni}Q+>N-)u8pl5$Up*{>ybz_*E^Utw@er#cwxvFO zlgm$B1^^11sqeUiX{w&=hX z`B0e`q-@oPQgghnoH!hruLGPiDw{mGhIN&o_R|f@*lOtB3Qs6KHNRfElhkNLHu5fF z&F9hzW$|x5eB(u+|EZ=Mz5IS7Wcy|747tjydaAYvIGS&H$U&N)L0AwmPT}t#gpKB0 z@as$$VfBHQw`tBJyl1{mC_C;|pZ;_w`(SRl-jsUER{XlQPbxorK~}fKwI~DU``5tN z*jq)MT+}j`_l^@1m7?`>_6N; z{0V;8en53TbUus(U#{E>1911@Hu!$x;o{+5`o8K<#7)}6edqH1_Ct5uWNg%)`66m$0Z z!5rq)2`;s)F4|K0smWt>2O?C<$%P46T}GyYv(EZ?{$trHNs>pdVjm=nu68gFHb1@6 z>I%wyeu@Kg#IX%PU-o0g5M`b%1A})SSryV~KYp)nQV(h zd7w!48vD@zoM2n(9Qg|ND>ohlDa3hCxKotoFBc)f(B4xJ)jsg-0+(cRo9aPllqB4` z?J?O!>(VCz_UE7HiYO^ECmrSd*8`qfVtJ6a144^Ob94ArU~~+$rQG+>I`YXE2H69x zW@xfi7%iyxioBoXzyn-AnzPKN1m)9$4AHPK+Y5DDcgZz&gbLcLx*k3MeF4wn%Cy}nYFzk$9%{zW-cg9t{+Nv7*Hf;1!T0+Y%V(MY9LHK-v=`z$ z%@>wAYOf}t>DgnW*d{=Uz&VCixU(;wjk>IjL;A0=yW2lu+WymML_N0!HSp} zsxY-j9Sm)(^}>fA)?l$_S}&E*$je?xKYc9MfQ8X(gQ=f#Je17E6Ff;71Nxqj^(V9=f-*O1@ zP0BZSqGQOLepznCBqQRV)It;1n0J)v--`O25fTJ_Q~4Q_dC~;JbBczf)0Rmq_+*FT z!N5IQQkOk2o$kKWA9R{yZU-xCK1ldQ*)733er{CvTz?)@hH<09rq zzBE-;Q~RxUk2D0%X9&6SKYUp5b}<3VRiWLT_p0txhk73g0&nAqM_SCvCv(cAL;E^3 z1Z@G)l8QeAmg__dyrh@R|B%1nBs)@KAXu>Qq2~u}7#^7FzKqgHP99V893%48hvlw) zXsutbqudJ|?6L(*!I}l2@w$JWe1+B6*=_+J2zqig=kgL4kC?U7rs9m`4!chtLt0*GB5T|9Sm2;03X2p}Dam z!{^`25zH_L|DydMxsoNdd|-D3EQV)*N#Cl<}Vh5%x`LM4;)R9@&UiwHMIVo=9- zyvY`m!+U2{Sh{^a+UfV*ocz{0eB-MId!*_$P6r3*G{=3$U)bD17~CA)gFbv&%|p-SHSjAYnTBUi}82quJy)of|8 z(!q%#nUmJNuJZ4`lX@%U988TAgjxxYE$^Jfo|_j zc2svjrbTGDRP?XJrm=j|wcNO10eaGb zZk@dY3x}t3XSS_(p>S!+#5&`GkftM0Yp9c7N6dyt0reN96~=_0AQX^osbu=3b*$Ir z*KP6XpaYVkwwNnce@3=n42do^QPJ6~t>+za#kLO4VVWJc!>bx}+wby^M7Zh$dvz^M zd;z}RChSoB-1J#LtH5UU#-wl#hiB?3SD}lx^Z@965_mO? z&11-fnbLP<gakgwGS>c<0l$FLZ z;nH4yZO&`PG@(fD32x{sQsd(suyYxoI3xEk=+qQ!DZVp2WqHP^9T;<7XZo7{rXm1R zIO7;C(qYO>mrZ7TY`XIs({kE*OW;$c<45Bek}=E|8{F?%AVrvX+*k2}#+8*`A3Ha* zoR%-CiWlXXGoQo84{hNCTk!A>HVb(hyr0_{c8X)SEr=^#+4*gfeedj6=&nql`Bj;qV;2%d z5n!jSQh>D*M$nOHh*Y9hL^~YnjyfOe7S7?G3OyPsX=Y;kx1=WTau3GBk5{4&B^3&G zA>C20%WLAq3;|u37k&wutXv{Lz7UtD3YZZhT{;`f1>3tK4z2xtS2FAh5;XpnmY$@Z zP1ab_GRr*)cy20MXg;-!Rj{ z)lhaWYK!`Z3Bjl8eJj(eEbJ~?@R5`lR5#MmD?f=$Chhhv+#L#8mMW>Pu>vwsh{^22 zp$mxNuQ8czf)VZz_^Lh70qRiuHStCdZuT(xCk_{%oR+;~Nz58+uH|NgKDzX^ih1<) zG%>rj>)y8lNkmrj*U5J54h^eJG=B5lg%iEy7nHQ_z1#p^eZu=aTBT1N#R+jc`Zl?G zD^Z}&mrD~YXQQ8oxdkSEGTDWJa=5R)NP$GYkJkMb|1tUtUh4w%W<9^f{{~ZvZEy!d zj!iS{y1l=gDbi(}*5r~sUP<(oilPs2WDvi~<)4sT$&~|X{(@(=6(`u>W`F}WB*gxv zcoi?vfEtRTztNChgcY-ebqnt=cQ>965&L^AJgq;k=~l7H4QpfCJE+VEyu1laI8d<3 zO;zRrI*1$C@2q#fH=Y5R2SAiYj$T46Ru~-;;f)G0Kk#o0Y;+_?S@8HL8%r515ft2< zxwvD~Z0Fqk*g(yRq99I`8;rk+R}Im$u8Yo}420Z`JAVAEQm9zlIa!l*Y}xaLR9)mo zvp$lkj!k*ncaUPU^2i5!ZTtiOg+g{4zj-z|QL9dv_2W^bbCha@zwIw3r9euybh*Kd zz98{71{;NII7@rUP(U@CD&P1A)U$6p;zpBWM_+(ih2Nt*`MkKSIbD~ z3|=3V{@%5fD)UFLsC7BO323!`rI{{pJJ-lI5AN^r@<~h!q;37h@!im+Kqu5{OMsU= z5o>EytOL&}zWYL9NPOaqn7YFfZSRKXeCQk8`j=#;%JZM^@{RZwFHCo8Krq^=z!jS` z>(n=3XtQ*bkcOg*KndBlufJB~_9sh2Kj^D76l4R(LpOC%CqYtdV~j^MjS*us^+QqY z$m#ZcjvO9g_a`Bx;T|>Pn)I9|Q!{XR@SWentgw%PjA5e z!cSxUlIEXFU?#7c)SpVdI%`(_J||?5Ck*~0to_RG`{va zMObmTnT(!sZ`1Vnz+^07$KqX0lkdLAXIk+g@hOK0Wg4E@T z|5T4Wv)Eps=3&bF3D$CvOFiO<-yP-27RX~cR>LSUL3lBy4y)Cc&ayU>RcGbQWgKYF zqA*p6OiV<(b@}Z~(<4isO)&;gUfo|RdrR~x9s?!OSPkClj z^>KzCIO)*i2u>df99hEX{6Lg?3qj;x(**W<7hF?>oH+6RY%F?5drNRm_oBY?D0WOC z8_C1p1$0!sj?tOGp2zv{VDD(tVr1J>{#jwpA5P5~s>SIu_RKomDORwQek^(-dR4!A{d3Z^~=m zzhmVkNt-f$%H7`2YAfw_NM{%4K zFeM4rze@++66 z8Uc}!UciL7!(a2j+cNa1MyGICh{WUZ5Kg$7{3Q& zO5DTuma5ozd;QHJ6Qd}hn`f3&_O$}wNBk3kB(ZkTEqVAW@kEut$XoF++fwp6CTEhG zIN%Ncs^b%O8~fd*{H7OV>a_m?#ANQ<{S4&N=#Fm%a+R z@twVM1>fmQY7&}e&WWZdpoz%}G0_V+bY4*y?*~=-gc!8rE{I-9+zQ1X6IrG)QlKIY zsan5A5M9^H)jdNqW^s5AQ_5gAR*+>AeEgxK|80p~oq3X5eWn_al8p3c5$6p_VCYGV z334?-myCLQF2BK#JI2Jz!Y=V%SG`8+6I$ZA~KG2|uw=Wx<(MEdgMFRgo}L0oocS!o{?;Hy2@b2?|z?Z&`;Sdsk8 z7p}%g68JmRTXz z@r(3h05nqIdKjC0d@~M?IUZ`rvZTgR@^z-9n1Alfs@Yooxv?qjBr=Nv%pOFFF%?A` zDj&L*n|AkmZt_T`tU+X|V>Hyo^QVPT`L0F5obfZTxh)5&rFgSRLfR0+KTDdd+H@Nx zj(bstGq`mA*u^VuX((bBb+x#-OjTh#S4K`g_&`R&4W~nZ_)ih@=|~Y(OZ;J#Sw|@a zQ#4@GQJ}b@9rjxZQ!@qWD)7j>A8$8YzZHGu>F7>G>7Tqw>r}r;6D-I&`xJ}hr_yvz z4_$Rhg?=&w3@~b$J0{gbWJ2w{o+T)hwQK!s_3~NUAg{udAc?ED`}Q%6;E2gKLSUO8 zr#?ma3++v^o3l@(u_zQdX*EG{B!BgF+G{kkMSFl}R9IGqOB0t)T%wq*S!WFz9*Cz| zI6cy+E)yfhq9n+EG!!kZ`z#a<%h0$`QHa9dYDJwPl$1QeLIy36%@}yKq_^?@o`nA+ zKn9A495wxn_~mO2Wry?4((<&M9H>8I*4C2(hhGv>SPt zdeiOFRQ6P))5weB1uo^s2tG!hz5Pk*_pXw`jr5#j`No3wng=k51-Mjkm#WURD6jz) zVp?-1M&l@27D#U!E?xi~ufW5*#Gm1$i8C7K$p&&7Wx0&O!=Tr$a+lgMcC#|%#(i+s z^XmFyufVCj7GT^kBwjW%V@shp%wm8s z1)cnn@w+p&bJ(Diq6Pt??Fe=e&6%a#<|zBP2^8rabr7*`odWlk7EdV68t%v|{RRg5 zFD8c&)l3Tfi~NDd#yi7I4(~aWIR)o6a=w7(={Ws*qiDM+mX6d(?_y16wYqT-0V(q= z7YZ9ncyw^hAI>Afv5!d9>xbf6h>7C3Ife9nKqXPa;F<{6WPz_h?P%{TP~F{QSC-C&4t1ihVY!qgzHM=A13582izQ=LnV;2~#g%!uUhh@c!Qf zPmbi2ZI*QU0KWl%J-M$hoJMTqGY}W{ZRpj%+5?rwqr9>ZcXfCTV=*9~_+aNs(?dZa zw{Skxi6`06TY;x&Qk&(P?Ai)?{WgB-Mn^Zk)+vU%7BnIvx9c$yF`CLD#3IBvFQT6YPFh(!BU+DAm-mPEckvaD7YFO*5a}<=@OBTsDa>uHw zJ|+va4$8%3t}aR{k;8{gT`JR{ID58I{2CKObUD8&20K(|5ev?eoI82jn$+v(6FREB zm*=W7J|GrZTml-}3n{I4&oevnLRg0aQ$GXl33=+9^nP1~4=CZ!>ZtH>Vv$tXpliFP z)PPF0l?Vp%zONE!8OrD9amWg(62uoht1G&G-1=Mm0+V6amW}@~NO|e~Xr%S5|p7JhqH+b$GDY4!jYFE&2#5vWh^Tr?K-1;KMRc@`fk%F#-VrdN(MFu6Y}cn> zE0iT00NsOTUp$k6Gli*Zik%>6YVVU-;NSs!oXu~XI-M!*)fbHGW?=G$*BO&nuT_Ho{*TxOY$y!WxjNvyRsUX(pIEDEow#q%`{;u?%?KM zJz3MZV3%~fLT*WZL#KybiJqb&eHP>zlB)p9Yip>p7TC5+s`h>TU3S~))@~{9=z2Ws zPn21o33e1Kwqx&iVJwpN$N;wv85yD*Gli{8r_U+{{rhxbwxZ}a6yyXz6AC-L8~zmD zlkbuUxVyfj0~1Sg0PTZ}mOL_ra^DzC+yb4_7{~&1dg6o1*!8 z1+6m2D!pNq!o0#oSGk11IS$`CwFTHR&70+kCg};dlLw!j)ReLHx&*^fMV>KjZWIN` z+)-YkzL~@`;F~l@eR_`E_D?6=nTxSU1n10aD>0?)8S`uDBRc-r6op+pip2)V7i;~w z$^LoAnQPus4oAI=imvaSCmxgY)%zhiFCq7zQw!$IX6AY~?%q*96VdkT!C5Js8@28c z25UPt_2F3z59`ccY3$Tw87bs&AhMHx-C}nb#j1A5TWez=>>;y*Qbu2~ePdoCNx-22 zJhx|>9qWT^D*;QQ-71CGRtVmXJ0s!Iwks?!R5mn9-=$My$FNz^^e7Z`guWwh}^%Izi_BudMR=y zSN4Xr`F#xTI*S9RV@}#FX?l_UT#`6}+ec4Jn19TncS6{BV5&&>%S5+r{Wy0H)W@(& zy4-YMV09Q!lR%x!V%{ZIE)#*$EQa73X&eR<0bWm{$4b5t1V0)?eu%Tt9bB%c*^}#j zped6$HB@=q{(&MRA&e#ecNXQ>C}^Tb5T6y`0iSJvWlG{{p$j?JkdA4g;{2pM=7#t@ z9XZ$Ug5a1#tys0f_0h2+KQ2-ctMLF}8m{I%_o2j3n^kpr%KKdng?4f;XyFJ3?A8e{ z3?Xa{fw?)+s4Erx}n(il^!TMD#g}i^DvNnpY(-QLSf*APhd_F*}eFb z+qR#!7*)nT%M{~cPcD1%5XO(D1n*|*;?e|e9HoZU!b#DzFbe)Cb%*{yg)sSLsgoF% z&y7RRtMU0c>W3b)_(lDgRn*<99owX%N5}aw9?hgxt3csM47d3^Mz#s4TOa?nUA0%NkyuWxJ^T*;9|%#DJuMFMt4?xT;_i*$AAZ<{!Q&zg`#l#X z@P^EC4C_^;+=)M&D`cq}N+W*MgcPn7hFHk5N6qy=cYx$xM4VD9Js8?+e$j{|)t++! z6r1aRd8%7P$ZUyk?igjAb8${xbgd>K#Xw=s@5iD+a%%8%l}&+H0)ih3lETlahB}p> z``+hdMb3q+Kj0+4!VCn1lvlGXgZyPN3j@$42Lok!Gx-b#YgJ8XYA)7|Rbjv*x8guXP zHK*{62F6GWT0U45dP|zgd7D)xC^e zl^d_O8OxUbYDqsJ!9&i#qyS;qVG5WQ8WTcbFW4BXvtw3!tZHA*5eb?|szP_thN|&W zEYJ1>^vz3UB(zd$nIpi#)oEW148$VBSQf z*{>xw9BUo(-k)jH>S^?O0nbMT8TUwz_%eh}Li!6=igvA}K0AAnOy2^FRDzx!TL*3+ zEtW*~yvcyJxM-vuX4}~8c$<~|m`s)bR2brFwUMU<3EmJ4I*AurN zP1ofrTHmJ~Mf*MSQ>rCxNf>?VWPGR1O^ijoNtfng^@vYrzg0zN26I?bXe{fvCgq-# z!|f~av5yFQp;8qdvdqeNWLJJp)I8@NDD5f#2!a&Kgp!Z@3Q+N_-SG(LmS@FmiS?fz9wln_0U6mL^Mb1+7A_nHpVhqA&oe0vapU8!UW7+5~6HpIYbAvo_ig+}F% z4$GUS+Q4C8QUFQDX*Eu$C3VE{ur|2)BCs*YKk~XO)3sV}QQu1?SUR1i3nU~V>eV5d zdH6}D`=B*vW<$9o=GUF^ry@kUZWNw)#uSeP!#jDQRtDZNxDh12{?N7 zPH1OGOp&8#)qg zAGYp73X_i`<4EDmJj!eBOKeSRjhrzBgb&J~5n=qc3+A^S=6nunP*19&csktWLvaA> z_{EeJP&P+25||aTnK3=`@kGccX38oc|3Yu3F1e*4=Y1lu{=RqDAX4Q<4l9c1$JY`| zu2XAi5_Od{nSpaxT9M=s{=rhYdM_6G9^tL_Y1&Js-+;~${&%sALeV<}*DkS#i zPL_OaO`;cs=CO7spfRQSGMy$x*)a1Ks~q`QH{bB52RuOk50MKg%Y;y7o&m>L;$N9~ zxwV23WwCh&6PXfp%>MCiYiJ55hh0Jl-TSPYbWgENdvGW-E~pYTjQsWg0A+qU-U5W3 zs`i_H&TBpinh#GQv58^o$?5;x&}#{!^>7G6P`iDUa{)&!Gx;k;;!;R#6%?&5uRFm7 zrY|CbtR#{H;d?LmwWt4}oLV#;IAHo!?;}{=J8%q{p0omp>^ou>n?`M*==i%b08v_^ zJN7-nL>Z*K3v2^I^Ns!`IcVo3!!8-;qHxE;(Oo-!cDAeGdKH2?k_W1xS;en!hYum$+mfdzp!KxGq1L|sD4o2P|qlU+hFeWcz3GYU*e zNJLf1YpLDNRRaOMmyK0-bJ#phN zJ%xiMz9}~iAl?`3gdt5Z`qWrtOTl~tXE%5qYQuL6k65(E>R;#Wf+qNRjeS{AYrSrMeE`-rRxoYb14)2S0k&AFif93(Xs8H6q7w`5?(kzw9eQ$BZt}>iXe)G zeY#pNc}t_Th}fQESx?jd(cnvDW5mBGR*e6pseHx`29}K3H-g4<$5e#VW)L13aCkFxYg27X#s(O-p!dBBN8f%%2$eogw-eWscwb zHsE-!|FP1%jWSFLRwe{yiIztRMesv84BSAxkN^Hp<}{&Up`ZFRbD^yqY`lS8!|%Mz z(9vYJ^S=EPOM3~4;V{Rt)BGW3-oK10fy#MRPT^bbM_zL3JLkIb<_;T5Du`r6^2-$k zE46J=H>if66-1RjHGb4REJhD8M3dx2vasCwb>rmySBig+Zi?hA5l0Tg8@6|%MTIDe z7ggpM`2)l3CUI_ITnl4|3{6~i0Y|ELzC*)ba{{vu9QsT^bkhxVhKH;XoCkI8$p8@W z2L#6}-S|hnkr}zp2uDr+yALJYLdZgi2~%BL^4Ys;5RPO)q#ysp938)FugCAZHV}3e zze+Z9R>FKCO`7JMES?W?=oJ%>BlL~Rywy|-{NDhC9oHE!ikGDQ+LjgWZ zr8wBWAqn~U|9X4LU}qUC)EB=V1jyFm`9^i$M`>>}{KLJ;%HAP4Vf*Nb{imYjKYTCd znYfl<0p9o7a4EWq0lwYCy9o2i+}sh2K=`_+{%CW$ZjEbl6Th-q4B5d zHI%i{;d4^Cp?AUsKYkK2gk}e3L74sYU(`R`SEqX+_j#(3Vezt{P=!q7EBlGSSMyl= zX675)yWzO*FD=TD2x1`hX#&ds>m%Y|y~_OT;5n)gOexO1DmsJk3jkLHJ?qsEVwc}*MWkRW3CAL!7KSX2(Yc3OZlMy_8fRuwgR7KZqkxs)+5dfzx;QEoKX&RNQ(0$!F z6xC~tp=8w#x1-T98R;E1&<;{6XqjjAq7d2K>So8T9e*Iqy<{*p3V|Gfq-#3<4>sNM z2}cs(ztde_fCLlt=5kM3SAL=gYz{d{FbXWKIyszKb!z_I!nb?z*z*BCU+-e4At&3(16cGn@pUNN2Nh(_Hn1$iZCJ;$BAY4^!E;PA1%hXR!8P z2em|w^z}(dyH9vjt3dqA#fO|Ps?}+mm3X`Yce*v?dzU-6QmeXZrcE=FNRqS4$QvSx zv9Vjx+y#9LtzuCbimt;ZJB9Y4b{AdqgN)iFP+pRzBClHZQapSGJB8SPXSfu(EIIg& z{G5;9rX^?44YZCf!j;W;vMKYwUpU&J_PYG7c~ajbv0IpY6LtiLaY!kA$e}aL)6aGH zEC_adyF5e2kQzP5GNF>r6>{<&>F~h6mXjepyJGCFE}=ezIe>jv9?RkG{O~R0kSv>W zV;*AB`;Cx_{;t=D>qYvRD9vrV(9l{mY%p^9tD0JG5sXanSJ{rCZSn2C{)WTw#2^p7 zNngo?e%+7kSeYsE%Pan}kFlnFU5BB<#RVut5l3yjvCON`KjSSEX3Z39H^@H!!LERZ z7~Cf(FPa5_auFE!0+h7~E_x}ytwoB3DT!MnY(mpR(8?6KVi9W)GX6WsiCjD%X+yuv zJnjkj%W#EIZx_bdV@IBN;=y0_3;R2QE!j?l##Legbh#R@@KL*@A;c-|LC_)v zZRbh*`g8S`8>lpv#(xR#$Q>Ksj_lQCJqUviL1d{*+=`Ssa;Rpgg8Pm|!;%#3-lH-Nrg9}lQ`~}uK8$SkREZ*gB~iI&IGJ8vn@RHcg%@%RBSFzKSOwb$wRT2 zc?8ZPxVb&$q1KlWiTR}~npMb0W4*?XX1Q#B?Ad{p6$^^welp1TtxE(;H%s{`p4=c8 zUR)D}_&R7%BwTMBQo7a>?lWVX@m@;}PO&K!YRkhZ3=eTZ*n4D#)Kls`UrD5&x)=5k z(?DS`7kqAWI=yB?Way{~qlYo4xsITa%3iC`Fv zoyKI1Gw02I0G3mb;j4GJEfx+mK_R&!?)8d@mqWlGL5+b3G1d6HNBj_4e>I2gg%c=; z#7+jbIa>b*<$9>0%&-ehUrgJ-Kp0r2x`^l>9c|@BMp;wjscOnbtOY|g5fNo7o;!i5 zE{j0b!VR;Ffc}c*(j`?Jwin&U4 z#UZ+@)%enIH2`3jmoHu(8z$G*%M|6LDDt78=vA@%WgjM zT2H&9-st$dw}#~~UjNA9?0j&obbf2WKf8RwJ+EN)DSan zTHpyN$OzGP@ZP1GqlX-kYaHS@M1!mu7ed#in z$yAnXU25>R!x|m_#$-9ZIinbVuLy4R`PxKQ({!r zMWQ&gf*_joj z1%DqbZ_0JrFz}lkpD#S`WX;?cj2{b8XV23MpTEWy>uXdX)mF$Hzcvz|lbr>u!wY_x z@ve=HE1wyEZHq&^fy7zj4cV1BH$S#oNrlm;JF?i%kaQ41M?FCC#Fc!|N|JpPWlY{c z`)KP{|HO$Tkqs~Fgk8po^^N(#oD`{^`iSXb4cBjYAk{AZOFI#L?bgdaCLx8)Cm@qK z0pI8k`$Uk9tC7n<)5K|eI6>PFXK zzJ(gCNuYy2G@&ZUjJ#-^i|&b8#z{KRPM00Iy;J$_h+K+UW0();nRI>bqtnfc4jQw*-RPyGTY#;Yu%7J}6OP(RkS4{*=vNtEq{Gg@0 za)We1rO@*U*kVe+!`SR-f%+~i=3H0#v}T1+ZBSer*;^F8>Y*YW71fj{8b*MvaUXG~ z;XYB5-55;oP|}d#z`~`ytz%iR9_YSwNDX=O`dEPbSA4W(L&||4-LwKk{1OpcHf$-$ zK!wTnI?Gnj9{m(CIEPg8LcCC}sE7L&US_!_p@7VAI1{9JBcBF?M7x8T<|cScpPl|3 zFKMf%G_fJgR1aV96w!Eq;ZUfw7>KPN5)#|}QsFmCwP4+qvbei|-v}~$WMqX1uGRVQ z%pUVR=kJRSaWn&v8O(x0K4xdfbE^JHcCYTWws+pa!r>M=YoAc1d?QAPh#OyQq;e*L zE4*u;a3O=pI>&kHsh(zrSllzU9_5m>yw2AKCTRXaFcL!Nk#(qBos_^s-#l)_+I!6Q z$p@jum<<=cwU4 zjwqToE3O>IG)vd%j^UWq8veapYy((UMs1okZ~YaIjuV4vrLDDDE4ZjRAaZ*KJ?1}o zgi+@NCXo+^8QSKZ!@Tu%9xpBzpCBpS1ZQ537QwQ8z=V|Z2c43qKfuQuYPe$+E8Bvi zYgHEG((&~+?y-_lm7Z{HK5|~KXx4?okKfY(`9%`x_I*nrMkF?!GsEN#g=|Edab}Cd z1tA#(vbZat+&K7fVIA~H&U)~vB$!{yIk_QBiE5HRPn`HYdz4bg>+Ov1DOScL<0I6L zSElOENknMxar6%#@u$4pkwQOxeC*cBr0zp3sO*Tab)7Z6hAr9|_rHiU|-uIY#9rGuI6mw$-O(dou zjshRrJlSNoVYK=9N%K(8JO~kgt?!Ypoa+@QQkYPl3%~)-;p7xD#;phglxt;z8WoWg zX?WND%KI=X5I9b3hnPs=;m(GbxPHZa=iwT`vp|5b=^W5|s)IM9o<=I=%EwOKIUGpYlE4tp+ zvf0Gd2mT$54{J2fN>JGc(C^`V^00Dh{d%|G5pVWa>{&>7#BO2NRP{GcILBd~ zRQhzmkA$b-dBx606b|R0Y&jr5(H|$p{87w34YC=QaN(afalZ$_kamGGq}>nKx+MJf z1hV!Vp`_uzL%aI_pwO&D2*-RLlv;6UEY`RA@f|W)IBZfxGI{lPQ^R^}Ai5jma?HR( z+-92@7O?0D>X(>6^b~sWi$TO?T+-Kz_fBXesuOqHPcM*f618<}NHjMR^?Ga%Me_tu zQ#5>4WsPC)95A+d(IltpX)~H1+UeYD5eq+zNWc!n7XuE{N!WY7+k$Rsyy+9$g_9t=<*KI}}oag1=Ub|txUC`4j|mSaSi5#{t3IovUaDc#S8EF`OxM5YPmH9Fug! zW&U~EUm@j|O-$u1^W#qdnKa6vYy{;>!RYD#%(PkN1DK)Pt#Guz=1*?^r{Il-Jt3#O zk!3v=a8vf}r(5s1XSfkEYuk1o`_PD)m$-HqwjtG1L(v*^GtRU$3%M$xee7vwNoUwC zKBKhsWxT(@=ct;uZ$oo=%rdBnTNuA3H#6`Nrxa-W+7oBylye zu+gdL4*w;moMAYjH@-vWiHX%#f_bg2FY&XoTG!|P^(X^Oj;^*ooaG08vfNpVY8D=ZS;}_4;GxbI%qfOo8mzq+JKSFYHAsXGC0{)5MFV;?65R z2l;;`&NeT*LFb1Yk;27u7Ii@Jl_zzM6Y6G8trsKN?FY7*ZQuRE;%D=XfZTu>9~i=p zE)hil9#dBZCNaS~ig>_L$-8LijlEuorLeV$_mtl;IKTTD96$R~+Ra7ahbq#KX2kc+ zal6H?I|4t5fMDTov?e^!k(xvXd`+QE8)~XNfR=+5XsYl~s$ga73X}(kwowH|7yS@r zdLVUKYKsu2Q32kdN%K1#hEs!ly(}{>@yVf9l?zKN3z7B4 zPZhy~6Fp*7`!Y1!jqLM=(4d{uwDzmBsue!x71A(YeQOQ1cuh%D+8MLg>IF%DhP%m4 z5e(w*?mZu3mXp5QfDyU;)!(EUOyAAg92$jlzP$-*y8rv0Luy=VNcR zbq;!u9O9WMi_S}bD*lmIv2%Ns(gZ-|MNfvo96Fy%&6HLjnhd5Aqgo;P3*9zHYn`rx zm}tX`Y(k@xY_dn(tD0!vo9&7u)d%*1ZQo%6(BvFcL#}naY zA`Z@Onmeo(-rOFJaKZ7L%9 z0rQyBoAqu?2eY0$IAMvOIrlEa&U6Jm5Xk@`^F78kKKHEeDn0S4gDl-|&_)w%Yj) zetOgBRk12T3}dQ`DWT&6M0)KXOw?>5uM`a~H+CTynVMw%i*Y&qxXli{+<@EGEI%mU zHB9sFJG{V8KIj%w3|e@v_n2FXvAABY^80NHzR*=dHJ=@L(?gz``bUWd;$B2E6^jpM zikJD>t|e0w`?Edf5~+|Yq`lHaOHl=`+sK5iWPhem2GO&1O(cK5_x}yaKVChaBpSBw zN~h7jbeqvULyMDm;D0Po+$7g%!ASp5bwqu{KYV7mKGd*Ok7+0r+NZbbOFXLlIEFlS zIhZPJPe@zvlMaX`#i$jnzmIbDv=xv{XC7oNnNK^D{&JULj)d1oqN+a4e z%Va@Q()}A0(`!akx4LylTB0Za+wtT+RV8I?xEj-y>&gTZo(mytX*XWYdaQpFPCIjv zSf=ZKG?hX7GermgJnTJ+SK59keqkQC+TcDOAB@DLF zJqj!7EHj6?38_*gOUb5*KtV(6WSSy?ye4$u?{eFlKYx906^cOzq_K7ma>J0IRa5eZ*BT zh{3~mvsL*yQdmE;150F^@Nl*|#d_Pbn~y4_ z+%dPz8L5TMC}kj4*o#8v`UNJtUA869GIs8^z{}n7j3@IZN$x1VK#)-6Sm&yeGXfI0 zQmW~K_?jGX@AL(l2PFI<&g7_vO~m&3iZ*9x^frWCNjwiE0>5-ci#a0kR}^SyGT-r< zL87g46#dLUvw2_bqry&@_3EE|RZpY)Kn4Xaw z(RM&h743=IMStFz8Ir@K-%|5pf&@Y@=H96u_0T#b0+SQ@AN!yNA^M=M>eoy}YgFg? z0A-n_A_nW{RFTvQ5~0S(Xu6uNECepU*Ah}Cb&$9tLbP#@Y+b^2!!%%@dS+Ws z22ZS8woYWJOe;#bq=sES^1RAbA+(1LU*&nTS(XT7i|YY zc;dgzp?#HvF)@5WT|bUy0mme#=BDcTX-~g!gLZ4t&)GySinOm*qgqzNNZKdZUEt(t zZK5yDp5dO?afcX)Pn#IBt<)uViQN?E9b9_2^1@I_xA)jLm=3Mp1%ZhA#twf%qInms zo(uxk9rr0=IN&=CsrpMZ;fr$`&>VdWAbC2gr`sF~rEBIT?j0r<=wBi|@gdv%4bEE) zomWiOL!Ho*DTuzkTh>A@Zjwn3Qa6D_Vnz54vctNgJTxnuJKgizqj|SW*h(dja3OyX z8bLCapW_t!dG2{py^|uiM7K21O6F2-Y4!;)Xwz=wJnw6m_ynbD-6zbq0SdP}=|R-m zx-?KSp3yY8f+Xfc;DEJ&jb?l?%9X2MXH!JL8uxke{{FPDA~AvP5;XGvcLG`EX!GB* zPAMCU85`$!a{{hqxgj;daqsnimxwo+r_3}Nzj)PO&e-_HjMSKUgsy5$73)6g1xdJZ zDIuHmV`knzEQmS2WHF&TH> z>*}024wa@c8$7CA{A~O1-x6k^cUb&642wAaw_+IVa9|FQ@x30bFhq2ZF%E z2>mR}88OUFwLn3%Z7Rm|ODms$+r=G9-d@0dfeM}O5~T`wG!#q8u2>Y}lCwa%>hH@x zE~=B`^j8S ziZcQ0qgNn%Ea1-@v4)*}sIk@szqJAV;t7l2^Ji`2NcDEEpClY{%#uNZmDD?hCn0v* ze*F#}z|-8@A*gNRjS0y3ej}GlwE+UrUlXt#d!@PG4G#kQ50`!WBg09CFL9E&xd7Uj z`7*V1>BIhhsAsJjIcibDH1Rcfm=t62W}r>{w&2-9nNco1(JQbP4}8?2=+@WI zia653Hhf}A=ShgeFX1NeS7(hJ1fTN)HH4mxBRRRBKcN&{D?buHdnVA{R&_SyITq^O z$@pAN!{c2?eBJxD5W%ts_IjSb!TxlX_mZ}WgypMMx3v!P9+HsC?Fq_Blgk+kK`MlQ z?%8Q`5K~|CldFkdr0ixj-{*io_xH@x+%*VCdTf;QeD$k9?*0RNMOOHhXt5d;7PzV2 zfkZ%)9WK#x$jKWi8IIq`rFtSs;Qj&OAdS8#{FSRv;#nr)pj30;pfR*?x5GquLTT>Z zG2uBO`9WnO6HB~#6(rBFedUoEJ_AenrtN_&u%e*2#)|C|sD>4KL}zSOAR-f8ZC zI%{0&*>xmiBy3R_nN9D3`HP8R*NA4)qlu3E^|3LK*Z!7uSNX`mYxNIf2@jS<%xz^- z$5w414qNE&X}h73+)5*%R^1TwE~-$mYKyng`T>wW&f1@|77}i!__@kT-1eUg-9DS6 zLSv}e0&Wr~Yr}Gt>6t~6U&^YYoAj8gESWW%{S)QLyoH80gvDsDa`O{#RHPno4M42_QaKm=S+=?w`;@ZnEE zY%qveW^96$rKOXZ(|Hs;_}z%J>WNqzo{?1F|7*w~eLHWYMe*x^DQpLhMs7zLQmrWx z5-*okDCe05r&Y^DG@{Y#W0(B(J}z;XBipE{_%~rn&&+DtK~Gcp5wb{)ITkkho9WJY zfel=5v?;ehl2yruwEA8{H^xyT#h>X&2VGOs618J-^cSAx2v%zwhE$h|>VE|r{V#x$ zsJhX%v3v|!z3~7Hx25=)*+uZY1L)$k^P?4Zwt$)lDn$Kl^zn-eKEs&&>G+JoM@}9SM){-rB1_U+#Rf^v zFMGIhtKmU^y&{LnHwbg=@rc+5GkiY1l<`i;O-HlUIHR57tx;)dmgR!qtU@hy>F{UY zxs8o8=0(T$L*kL_=)^a;G!pD~dTxJdn?_-`K9|qfe=N^d+X=|xcyw+U9q4|CV+$h` z*YIfVh2|Q}{fmcDXS+r4#Y*easj&KyUZ~?sMEBLSqCHdy8(U0RQo|{0Ym%^C6tQ;i z{qFu0FiNjRpdUeXAA+^6U4)W&NG?*DrtAib&{#;*h{r&njT1Y3_9E-wL9_%;65J=% z4F2UexCHt#;`wJq7C{YDO>N=cT@U}|bI%rQ8zfS?N+OMt;2vA#Bq%q67!#J%Mee4n zvjUj`bC_yaL5!ZFCZ=NVChBQ*v0R!+5;O%k+E2bZ&}^5h;qNH#LDr3dIhTlPzBP!% zc<~4AybUXU<`%PBDw>Vw-p+%U11bJ*oChxI>Va1XLzlR>N!y{Mi-7t!h5=Q3t%VH| z+)h9H;R`J5V*hb^1iAPkyPoS|gCW1e7+UmhGRF$TcDBG9gE@LLhM=%c2~JS48R^T9 zrjIawrqNF{iTs>Rr$#w~u8X)9}zdWWtQnT zlRJlTU{1-^<>$z(z3FMgI8Xo=#T@ykDB>h5gBD*HE1CG!?%XNE8Aes%bl17n$PkHX z8w5b8lBJ{wSlotn$qQnKM;5==W_Lz<V8V<7M8L7JCbV;>(JwvzQ zU9~jdfQ_0xoP5m)x)j^pc64W=!+y07A~h8khj}iVpxm}!paVq31PIPGQVo>U$=F0X zJu_A#^$NAMKa@A(%T!rR2zY7%Uvw z9`GW_3pcgY^7rw)YVdXliW&q{>xZZE_;YqC?zU63o=xtDzYP45%$NWuDo2(VUe1hK zkA3O!DHs6p|Goe@jcTBfB2Ij2c)UvS$0CeW94j6CHiszcPkL()n|KjyqnZu57`(Xw zdYt%e#r~V^=I#*&f#S_3eECpHUc&f$|5Tn??k=~OlFK*Zdjoxa_pl5 z5DpaZ2#*bL3AOcvYjpX|9gq&;2!zu3M5?1P)LTbKSx!je)4rL*;8Oba(oXn;rCTF7 z5D&%6A_Cm3eg{=f`hzoCu+~>asiNc8=o&q*IkvEzDgA$H&U?1xb5$v za4Av3W|4Wnm|K1(M3=a1JR)sJQOB|NW(;}G5sjF4(-?AXT5MX%{}#illD|o>E2~5{ zCOeMmc}=X2OwP63JP1(`(k2sn{;Nb2Y9Z)$6aA{PL7IVJIg;d+8zKCCsgaBJBnfDy zkRCqxBkHy#ap zSS=yN)E3LKvhCTu$jyNBtxQS%SDkWZBSN_Zr*0=qh&HMLxMSRn{S*N8sE}k)5A|R5 zIO!K@Q?R{35*|~WeQH5Ti6f{Do0=T;Ea6{(h>i8BHXYSSOgCiX3#%YMu-te#BH%nF zN_~1L(IKBv`exX?BCDO01mfh5Wb&N2`nx4;2Xn1`Sb#Uk*zbv*UXRn4aBps@rkq8H z`|fQ#mMj>yuKKhvkBu|nekvPo_!gKB_zvii0c&MhYJxCtgaOr(e2XBxD zO$*_<_SlxsN?CcN<;#Jcq;JzjS#=-ng$GNiU~(s<@*@QsRi^ysKbI7*hHjtXrk4AR3Lf67vH6WLu~G1jQ-4;{fvPNb#tGEW(ALR?<3Bi&Ue^A|ZH%9X2|pCKP8 zJZp#wsBgGH2I=3KO_Rp+1(M!S5SwVJ5vRpuhh+#?3ybf{^7oM}3f%_=Bfv(gr#SIf z(H0>tCzY~^@IqT*sA~_Rw3|v-ab@#cWFoMC3U-8<^Hp56n?o9wQv|5Le?|*`{9qcf zvyY=Tc#?rc_BR#dM|5RNJLxWk%?GKdgVbT|N!T#G#&(Kz;P_-j1W8bsqUp*83-jfb zffNkZk+Fl|k}rGzNToS%E@M3wqQX)M`A8%Yy!IqiIXzCUJDW6#hv{5q*pIf!M4Vm| z(bPz&g)WV1kVq|t^S+k)KI9?-7##{Ov--LtzxF2X5Fltsd}ZJPF;XR`Scq1cz$Y%3 zKJCf!I<{xsv_|UrXY)UBCBCTjFSshd)E>6TSOqbxFjzT_fV7t?nXJ3AQ|624)L^Zl0Bgi!P+& z0(p=dttr%Mf}p_#+odisMF$>s>e!lLtE5UL!%Mf^AQ*m=HhOs~cW!xz&`t&$WF8$@ zFG4b^B=Rjn_%bc`aPtxxB9oG4jC?2;#OUBj=;c_VR81FE1Fd($#+@0An`ypkFskvv z&@)`}FGFWD&Zvw1Dq$rdhL$FQJ(An+)N(<6-AxOIx1DaTZx6w-`xO!)Z0DNPujq;! z{DiL>k7s>SX*AtpJoBwVGEQH{81(^DaH*QJy3^ ze5S3AohzQt5Y91o-i+59+_7mJAgZ(cPc|a6Z(5?t_4InfPJyg7YRR^?Z@-V_C2o6T z#~qw(ROhw+>OCR>eFV$PB&4o|P5g28ex8cFW=Zs|{52Ss|L%egoEwm?{ z2C#&Rhc+yn&>y)Sit9_7I|P%Y(HDE`998jSI@l|vE!T9Lk&Z_sRaDRmYTZT76h?NY zMv$NrM#asX6q(M^K+LpttD)$wOia9`$&PT?u z2)fyhnuKj|^8JjYc62Bg4`0lPP4AWa>gYzZWiUOo$ZRP}%&8~k~Nij*)Xd5u85 zh*jRSiL9Ju8er^49Ym4Dn5_2~P7Y`59Zcdry2NiPY|T?43qf1NXbeIFDh4ibCyD(TMeb>$t1xF|E7z+h-FqW4g}0Lm6Z@ye?pe+(uJzhi=)p~pRGl(lj|02h=q&^lhanoholV1p zI%s-~fwo{q;?od>mX61}p=cTtTHb)??u`%DFtTlfiA=e*Lf0|NEPwk0G1QpPs;ZBz zKQSI>SxiaYl((~bcx3TLk4vj^PJblG1B_Aa;cB-#$W#YrN*Z4_?^LR3$Mpax7?TPl zTFyjnKI1bHk%lu6iWv({pB2B8`Q_-zaL0N?2axf*aQg``uN-Sc%6vSF38BKhhdN#$ zX?aYbjC@e{vTTo?_GI@7>wgUOM54wup;#%;QE)ALsu4y7J{1N;!$mjeRIR>`(|vXI z%{v$v?nbJ_^RW$P7B-3O4;;k`>O3PTFubi(gzTZ|UQu4$E-tv2UskoqK+^S*-?c5#x+4*5)dreq72C_oe?>opBV^l8f#XF0HQOhv5F)K|aw_EqQhkJDRw^a(&?YcHzKer| zwguGBaF3!ilp4U>Nu~t1Zx5#szpY!z<>o4xk39TqtsR5+yvL24C5ch?%acxghZRDO zRZd2~gi9z1Ld}_Onp_=&_HIKZswWV0G(z|LC4f^!1{8>lBM-oP*H(WB`zJ~1huM_2 zA{}@0R<9iR50h56?u+kiK903jR|tR%AlFhu@&;la-{-@q*uWJx0{qmeMj&CnFF%&u zCRfQErr|jk@$Oq=WEiD`tt$j#H0ELh7IztZpJMtCuN!2N2l%Fj#>#!qrq>p2Q~=Ow zWB^c>41W(wLk#nbmfsH(p~uwTLwOFp!AX(CrR&$lyvk8!Wv|=BraiFwET@o^#P)^4 ztZ-rxoS$0Vf_N9XSgAq6iLM{XVXt0R9YJCOL+75^lhyEV_T~5NJ}yD@v0C4~G9jq2 z_>od%0COHfONg!u3Ku9`ya7>zF|IyLy#%y&0>R$E(YB=HmM4|Y+W*aU^4{#w)NDK2 zMCr68g@@Hv!5rr3S0t1)M;?M(nt9B5c^kyOa_bNsEn*C~PdQe|wT|Ay8o9r0mHbS~ zSoIZ(d4J7MX=N0LN{rEc}hpC;BgZjjNVInxwARXQN z(z($T6;#neWg|A`DwbJSh(JUoB(4+kUuA{QulVz>!$j&2aWvGm`XwM$61Ll^2_<4f zEiK+Or1&J!QuErDgE%Kzpavz-cV48vymBmqWZ6eO0mK>00vIS{7M$q~fkfJdYs*PE}>h3TX?wG{$X6F@NX)ew`<@sS|x!mr8|f=g%y}bjyR+d}A1! zZUwXGVgBl~a3SDfOQQPsKlKY^5K|}n1CG~>=*T#?<8^0q`=PI1f=>|EdJHei2i<>S zu4QyORXc80b8EXevlR#`kD!D3a}157w&Hx0BGbD{058YS!plg-KIP3a{cb3*5b{q!H-Bb&=8Uujuqc zoL7G*EKHuur{MV&igR5Hn1*~gfVRj~$LVhsqTBb2KS?DSBrB(%493+iUHP&7C+1F* zRfumjAB#9`T^ketlQV}^C|IRh>YG_HSLUp>4V6m2!Ab?#jPkr$2G$_+8#iB~{f zzWn{>xq;FTd<;=-BCpuJzhxIc8ud=Kx02l%x1^7S8ekKFzEFAy zmr%P^+1ZIj0jzM;?ENi_zd40tIQv=$1OIDtRP5TRp}FP-An5K1XTW>})QjvhX}#-d zG>ygZ#%FrlIS$4HKH~3YdQ%Daeus;QevlG@BDnuf<;E0^RA0?mdEuf*JX*fJV|T&t zKmi9vI49r{u_gLc?ucyJos_8YdQC?Ho{YPgfG6wl0 zzF}Ln!u**#OlyOiwi|^BIsBFzy!kV8$!uHaRiTkEx2b5F-q&-cOf_kd&cnIFU$~mm zm3umQU%yXGo6z@!E|tK$7RAWp3?#BpB&&%G{9y0A6oRT{`Q-uC+kDC&m>2|grNx=Z zQ!jA0t0CQk9-RPPpWu{b`O{qQ?Ws<12})M5pn1xMJ^vo^O{&22-Md*$tltSMN5e!?U9}9gRr0Il z$h^hjFY=A#)fYacd&8~h$}##MK?pWWuu3zRGehO5EmpLPRb#|s-`~|=UhXeMk?jmr zlWZ=>tiqpN8|d1zGMpNWAERz%%)Ty>DQ|{P@~RL5*Etl55&mIoG4y#K_k0_m24ON5|j6P z6Royjo3fKL{E=3oLd{0wy zKAoyRmG1M(1Nd19$9Wo9Jix0iHt@iCF;>m9IWbl(0d?GtfWKxZ)l@+lI+>;7{d1@4 z7<~dBH%;TW^nzzJ>qhfc7@Vj@nfW=z{^uE$m5em>3{H{*C;=LM!*TaruM86yE3B^V zg?jM{GhlVyPfq|(BbEKi2&yUG#!mcm2ruHQ_T%{7DL4hs4s27-gCe5izx*uASq6Jt7JuA@IlKUUqMx&^n*B^kyaRjffB&j`h+T1v8@@e};*qr);tYR=MlM8`0^0baeSBZro zP5w}bBMxo=ms&wm{wr>x12@nfPIvwH9FejiqhkCc6DZU{^`Yt`UGSeM+f&<9@73fn zeLt=|xTrw#pU~{d^ZH{uRu{0${6`+tXB2(=u#lI$Edk6nTZj33b?HRBj`ZD|1%tVa zJljLB^B@ub?c_s;mwep4aVn0-=;y{AWpX?VWxnsmV-TnOro>_J-NXSH8S|*CtMjh* zcUF>ASzI&pWt9r*K)BW_f_kL!7&~xzOQ@Y-oQ6b z^hb>8yGP;7Rih#j0wYfk4EP1B&%n6QC&Rn>RnvMKy~awU>>;;-_M3zk;i^8L%OZgG z7T_t;cYP}~;98%+|JpyNzFs_Wn+B)M0F8b%4NvXWjxuW2KYw{K6-G=Hd|%;o4RrC7 zF|>UIdo!uYI4Xi|-bBxWO4MK+GHCOaB|R7y2{NJ6FIr9NK47jiB7{xze1`kbK`qKk z#sKY_mrwNt_}Vq6P6Ys51A1T_kFxn@+3-gs@gEECNDNS|y~Icm^A^%jTr$C-2s}vY z)rN1}Si0n&Vb^U$pk`G}S;nR<;pC6P=W@Uk%jfUmh3xwk73l#zlOOmil>pXSmKh!3 zk34iJ=OmcNi)o~WZuUE25+wFz(b&K!?akpkQ_l39Sv1qI&*4XjU9VZHCZo0lfM71D zF_Sd`eTUi5unHXUSR+^TAB#hmNlW}Sb#Yg-f8`23ke&%$PIF*fv|E&&hp+9rfS73z zBFl%;K5eJSzAFEQ2#*W{#D=5*y8P_kNxKBdb6~E56R&+K?54Fn+4>m>Css`ka9=vH zSM^qAI`A#_`7O`sjAmY88Om~_P#>V|Bb*jKt<$5yVyh8}sK0HQ#ex@ooI${Kj~9#* z3hUbj1iB>g^9CD#mkqb!#ECBh3y`IugHtZ*M>Q8}L>9W-`^*dVR%bOMAa<>g|7h4n zhP?BH@oi3q&CQYWX!W0cfSD5qVDj>nSWT3=p(DZN9v5J}=~tUbgNV<|*KyZ83%Ch? zorp2r87|mor7XBpX57-4F|YONe1=kkg=@f%_EW%_uSThN^erWyis%u(4MXibu|FBhe4E_Y?JpN{|~dhtM#a7e8x$dLF6 zxH4Gz6OyRC-`ubH<+V2{M{yEY7g3LCXLli;Nfc_73U@G>J|4&lMPRSLe1OR_E%5I8 z6rdoGO4B?ZM-i<#@T!zUsGY2^zyW*V{9(tdt|e7!d`)9If+FdU8X5X&cytwBKf*-J z0&d%xD!kcA=M(>Uair<@6&)&L&1@vTwy}#gfif|(Q)DGDrK3Qa>Z5(L^mmzVj6Khp z^6T+Lq4oajuw53tK3rS*&Ov^8J3=|g_-vbJf$_q9P7}jMuePC6_rPZY)1Y_&#ehA! z{Y_cP>>6eQBL_I-j<%W7 zt=I?eKG12LV5P>_b~`>|(;E+WHs6dLzADV4dKDn(sHk!hn3+NShUAIB%jBWLY*gQ# zY$K|I!`4pq`)}yF@fONMZgOCOiU%<0w&%rGBcWxc0O%hic`WFI3iiwQOjJ$CLS*VF zlmYdhh5oDT%FQ?YP-|z$lQ_s_)XJpNmX5y^Ms~<9w#-)LkTx^FymyqnuDqUeV{h&i zY5$2xjk|08y4LMiU%lN`hn^l7%k2(Wf9C`i8k%~bN3-Czms;F+DT(#QS9^oW4Iqw9 zR|q>bPNzS@m@T?$+i-b-(+?#~>xVm2I3Sk%81OZ81(F^Y&s;2jp+Isz+)F|beR#D( z9w@#q#qEVVEy^a?g2b-_7DKm1-qi295-tR*aQ4xPn1Lb1GfsK+foH@ty|(&p$Z}>< zT-1O!A5coWT~&=oEy_$(i?t%8e=ov%(e8Hbqpy?85^4B7Bris4pbDCO%!cBr#8%z~34kvnRs*hBbU%Kdf2iLE ze<&q&hLS#MKAv>no-019W7J`I==zgAX37SmGAD7$AoKP|k)7v1z5@YWD-36(^s1Z+ z%W6rVbXkB6#g<0^t9XsW!Wg!}`yV(_svECqRCiVgnKk!{km!V5TDsFSbkox4%3UU9 z8M1XJdE60QTex2(dzBSYqaA(l?o4C3A>3FhWbEZRx^N;dx%)>d733&-SQ^>il@O63 zFW#@XoZY=rUp;o-f-fS^E#awFe79(Z4+_J8qV2#wWqrnP_xTvCsFZW>OK{Tbytc<} zTV%21&F$)YzO;`YE*e*0A^-Tr!FNFRRnCv9VmUg)lPg}^gHa@% zOO#S7d6OZ_>C0PjSX1iGcGa=NBvEfX4IMg;BwN4tXC!DKGcwenqBN?CqLgwOzQHi( zz_YeR3&SG@1`0Fpx-b(afT=#vXF@0}%OOVmhEv?r=iG!|KBu^%s3VvR$U(oX@>5a6 z)MUFs)6@t%zVp!ixm*}V7&$A9N&lA=&yNeCRf?$gZfMVr1o>m<#LSx&K zJ!O#a2Zk<>UyIt1G+-wfvD7d7celAtG+gd(Q|>z4(a- z@<^PlFZk6O94Y|O9e*&XMGl5cf#5-%VL-BZMj@BgaVPNXU4YQ2)X`j%zOfiCr>M-Hfw}bL;11xLF<)Ar zlJ|{{Z?flYX@Jg$``a@ zkK$ZmUE(>mnx9!v&55d3x7=o5plHaq-A&TW zV*Qt(sfti|irf@c3Zg?_RUlFFEZKh3vs{)x+jB}Q$~O6(?0pt4_CHnvs~gGo!A0Gx z3yOIs>myTJt)3IW`l-Qc;LLv!-OIDU>83kk51yT@fl4=T68_1RW23k}+fd~+;wTM;cdnO8 zel7!l4a4`lKTI@o5a0Ym4QaKC}h;eD{QS4?9eTD`TlE>?Ks{pU{U4y2<@XbP)_fL94+B{)VLF;G*>33$-_e zi0>H1eU;mI1!Akz*7yc~t-f1PJ~q5DzYw?2uFT5XL$d!zWj`7x;$Rq@m6&)eKA99! zvzq-D@dow75`!RLP1ZK5PpbQ8$_U@e;_~95>9;W}@^%8gsH%9~NNvk$R*F!5)=WT< z&!2jtvLrZvp$u1;a9@+|w0OhXNBV~#F_-@QJ0q?42LNeYF-aUaEFHSQhA&Jo-2!p3 z)cA2Qi9evGs zCe@wNP)ae@Oa~qep!!BW-7&>2!THdhZDL2^)maY7B~@WYFS_WX_{$UDFM|mlAHoJ^ zcA%v=wJuWQAaVKfEg|`2@2xx9V^4BK^BN7h_yiBf(@!oilly#s3|fO0x*d}B>Q+_62k)zb z`mzY@Ww);n<)1wd19r^|IF58C3(YB6%O98j(1$%Lqi1C+c>Ib=dyG!3bqB=l6^e+# z><8}PM9R?wJ@?#0O@gc9qq-x}g_RJ%3bl~mKd5tH8uDr*a~m+vaBuF!lu~LVs4jIS zRlQ$EEERNj(=ndDSZ=tQXcP-NVl>W`a3$Opj6ld|V1Vycr@2hCAv0=h2aH|Gq9kPU zQiHB9HMkeVWtc+vNTw*rt8YsXFUVuv>Nfc!p%nnpgkU}B?iMy|vZC9QZ6wym&I@5L zX9Wq@i7l+Wa00Csf0WeSuC2dM9eMtgh-3E0<#V!6n+C0p--WfTgnt6$Gu*Lq*Ug*n z?sSH`j(xDHM_>h=sUrnVpxFkEpyQ4Yn4;)tLlV=m_AHhJpdl!DUdoR~Ze1T^oYe*f z{$?hgw)*Y`|KrvxR!e!{r^R;${{i`-nbPo-3is$YC$ zFi|Y#i0#HrtV2loKizmSWTz%vSHl@r%;AcS&1j%$9p+ZHz zqffrf3r z4+|lX3ol_oT5an3)N0cn5Q$JykRkSBL7H8&CgU=`;o+#i9VihbZ5O`nR;?W z=BgZ<-hvVsrZz8j(s=ve~H1KgMa^lzO>tUu%%urT3RXx^O=5>PjP)t6124$p1WX(gMGHCD+z=reY`VK@U&Sv zANB|O4h?TM2bJ}wQ%neZ#sgyH@3S)Zpf}EnKzHPKRh3jrY|rfTEk@#G#tGtz+0!l*2Eg` zAg-KpS}&VuJ7&^yR$L-*IiRp?j?BmMA+n%WgopVSF(b(0!oX0~4q|kH z={doaRuVMr{B80iO1mFkv596OT*9#GQY%k9n>Yp@yP6ho64C%x-R5(blYYN3hy`h) zASc>DHc4%Fxwm@w(kYH1ZP4bZIf=t|BWKHPJ_k+t@yUDs{&^C@ev_=2k1)Qj^(Mwr z;5QE;5|QSv)EKL5JYx4ICZ<72aumoYaaAyOvApN2y!AZX5h<)Itmtns~y>7YC;Kor0W+h*?zVw{`g&vw<)QAv% zzws5F1#9~N?`po}LSGL4cyhHg_!@gf%GB3DQA6WkCr*IIRn7icQ`LZ9F?7$4<`ET(7KR%3JFwIx!VE&hG)%i#xI zFBz*=JM8=g)EEh64mrG6-Sn;^$-he@y_c(EHoRdIA1%2TDvk1T;kFy?t#+P!-jlJi zh2HTcVsG&mPpgPneW5mLTz7PFvv{N6#%i9B`w@ec*Q-`Deh?yjMZBaEIf!2FFi39^ z{f?p@r=c1F9vw?l3CS5%E6j!H2H1sJY{^Zb#A;zu*otmLWmH%^{txfccT zBHpOh9}Umz{{o}OIg<==1WI!tO0XbF8W{oN^ogMSx6rU;+zC$8jPWo3G7@S{IR8CTV$xUT0Rj-tF`9w27+pQO&j1kBKb!Xsn>l-}SbN zW&>nyDTxXwgU&y*o-p?F18}A(-04hDSn=$?nT?XSKk>rC*(LiqTpc2@hGllO$A;Dy zXm9t@^5R}cbyeRtqO{5Is{kvSOsYTZD7%bXd{#T>F@>EbQu@Fp+v}F6e<{dGgJ%o+ zu4LuaZI>P!)>e5#UzZ_s6Of!eyytUT^IK+;GAUFfV)0dib-v6OD5Ef9s=BL?Z;%+f z>>hqMgg)KJXyp&{*kp5yZ-d%Ds;zac_4^b?iF^;8ANe|L88(Ko-0`CwMN=^iH<)k^W0Be~5A-q01Qt5D22xAn;B59zL>;kJ z+^q;Z*fYo_J0=hB%$~DjNL)tbh2KGG<@}}&&Qz9f%Cijuta|sQ00C0W>W$C5evqLt z>e{zk%-LOzVdP@-t%6_}#J@6i3HXk=0Y!lYQE?oS((G+y#X(H$5E#f*qCOZ#kd8qQ z{(gYX}qL(!1C08v`K&i~o5-AC9c(eM1WwJ$(`{e&M}1B$;`sN)M3ONWnB@ju1!4N=CkHU;*GZ?008F zPW>)$0=*STNo*>jP+x2VEmVM>J5y%&q>b%^Y#&*Cr`N7rSsE=A?ar+M!}=ibY%j!$G`q&?*bu+cM8g^NDy(%Y@#T zr9-jkmI!UnmbWnwewI}Rwg$v!Jgk)gX8H7-@&P3{x@+lxB+XGwP|vtI%4LgqmpLKljIzzxRIRqemzxBLfjbw~;#p3k;gorx<#Up;``?x; zMn!56jz1kPUYqN6!|1lzjpPy96Q(~;(KA+lW6a6(Nx~@XZkW>$kJ!eP7$Uy)Myl2E zU5JG%sckWlMP$j_#E%s*we?U(lyKQTAoc+x;Str(ovf)zqzh-EfG%w{dxVy0VCu>* z$)CK0x@FH>XjA(thXHqsyAL26Q77Pe7N9mS_Yb@!xt}|($maXd0pu`44c{_w%1TnlIF&*Li>qxZrh zOQ76eLaTKacsBuBz-rP#gY!7=O2mKB|BgJ+AySa{wig4z0h9!_a$_t|g$fN@Wg!(u zhqw_|QQU+;hOjyX0HFXq+-F7zmQF5P4*C}CR1vfiPSFUfhd%A1HPO>1HAl1(ajrlG zl<#;XZMGlnB|QR;UfNIuesX-1o4+U)bQwy>-LRW;yN+u=8?q_e6>$9kpVj}HgY+xR z#8b{Xm+B7GI+K?OKvt-$nO48`tqa`BW&8su8f9gjj0m@a%+u=y075(%tKj8guWu#< z%OAGYD&-)4ZM3SAl;Q3u2h7Y~%K^DfrZPX?V}g={*DN`414!p`_v|)aVO_|6kBz=n z#MD{Ke0)DaclRNja=8`p=C+Twz$1o42*;fc0s0Q~Vjk(v-D8Gq$7_xe1mwh z(8AE+8eWPkH-MrH4fk7ZjZ{za-}aHMY=F!?+hi;HP7`jv5d;ao4+t-Kdt1meU~Y^Y z%&dfc1`RUihq72tG943EIwlYVZ+PNTRJ+_FjM}=RN$`Pn$drX?k`uceeyh^GriRODi%;5$^qEG4a^UShGgk z0n5f?V(Jy&E|LExXyeTF1Hp(4)o1rWcB%ms=>UFRGPb=J^B|L~*OP}L)!xs6QUX%z zqI%B+UMzzZPSMXZirx!9bABKBQNF1&?W)l!Qv5tlE>fPK46g%C)Y3DUAtZ;cBI3xkPs=grs-s zXT;BTVW9SdAWEF$IPZ%B&P|g`^SNo3IZ< zmt3m(i{jT-w9hdyS6dG%zA4*yoEvyyDxhjeUP??bJtb<)p>?bt{A-@SF=Al!_Cam8 zB1(dXjxMIVC&fs6{B@LK!Gh>qo-Wx&Ide{uM~M|yyJV=G5)&mdR4sBb;@6ZMmZ~;I zqc@Y#q6z34iJ#<7ti1s*{+$&?M`*NAxDTZu_IWg7o@%6f3~nan{s=& zSNwqY764&}a&Fpit9-~a;!@P$b9x+OE?@>W72j^dJtu6L#C*K=IOj`Lmy6lRt4Q^q z5Qz$_ipRIqQd2Z5E(v{O)Hu;|GiMu;*I1m9{9HNa8F zJoV5y;7Dk2pdXlNpfWHLe;0Fs_$V1wu{@yPT7qLW!}lar9QHwTLz`vFXDz1}t&dAu z5rXDXQi-xuVP00N31SsRVdkTL+PO?w6u1;K;9D_UoM|zv4~7p<*BaX_q$}_LF7hgR zWM7W+y4oaoUIpk4kR!hMQa-a>+}#CDVmHOi>T{v`Np*JLb-`|0Qatz7Eo;H(39*F} zj*`c5-)^z|=1zHb6P=Q8@vR#l61=X%+>=Eg{Y2m{TLahFe+Q~E(^P9=Bos;w8|`;= zYRg!54m{gZU>cfF)G9A$Ce~4Tx3&kUg6>AHSRuJzZJ)Zr%cZT5WWP;B6~!ub#6-^$ zO6o`014;Ic>A5{rNbu9gt7C-s-7m|~ zBYLzRsQ7S^HBTZRK~j}6t>_4^uP$>alKWje)mKty>Ei~7#fBm+yEWA5jF>6+;TORz z_76i2`sNARbf~SdVt1u$+=Bv(vSVlGD$s=0=%(3STIxd07hh?UqJCC(ei(YA*>7`KN8TTx)DqsV zKuW*})(OlAU`3@zc0Vg&yy&7%(gE0aV zKMO@UyIj~BNv16aX^JaLH}HY08@ZswktUp-Ux<2Bc8qq({rk|9oTma#Sf@#I0n2Q6 zLRXI&=aX|OR1paCj5fP$BdNrVVAb8pP~?LF7T%2Ag&!0D6a+vkn}v%*DIQlhDYfe# zSqPP3NY3YcrJs_hu=)P^yyT0H;|{Uv5|cnlVBp##?H^lXmrT}@&kfuUNhAE^So$-T zw(+rIb&KP^_A`kNP1@dej@p#q2qi6y`{g5khTlOLD4-ixQgu`0owkULRM17SztuOR!Z!(y6;-TxQ>Uxjbi#%| z$v_kaP@mDk{;Kd0R_8<8+1}0{%Wz`KCMe!)|F^3W7Y-?Hmc&0O=bg6IKArhlXV*L& zI>%~{>l%x}4vA#*&Lf)Zd<2Y&x*fivLjjTD295YABh=8ltvfH5QVDck;Tq;`#X8Xq z9@I}CWiRC1^H5GPqNA%Dq41jBxVzNjD$GL23S;$ozy+i`g1PK2evVjWA3W}IS+^Ws ze8KiMgPr!f<-j$A3Zt$ltSlW9cc~5RCDhrqOEHk65s(t$J0vVbXlF$C7VP)|;5k26 zKVfKKBE#N01*u*m$S2*#h~D{RmzQw2tE)<1_@t#RfEHu=s=0{psjxi_#-$l6ps-om z=+SuXG0M848mp7i0x_GrMytF4!cb ziY)}7(REx?z;b^_OL-vZXe~rkLDZ+(3dD;+(jTp2Pq?fb)#4B)# zJZa?{Q}3Z5rToOC)*;2W(4f&9n5bm%PgcTeE9=(d2zXI1^3KSqd?0~ZyL|Fy+@u@_M&0UGi?`n(N2mN_fRURo z9<_Raz$&NFdp{gCJ>U20t3PDrr7t^06BF)-n{W`)CW^}-<<6IFdB*YC9ADnLn9D@D ztG)+63hcg*`na9{PxjN&u*|Vh#t(A#$PmL{=$XM+_A79n9EsjeHrt~^x-USmWNdA^EFT8CUf#Cych|7tXJpw7QNl zsD&DU{0W@03;@ZwJcFd=J~U5oP*gXhAlYY8%r#-_^v|3=bo!m!eE_saS4B+2DXWmv zR{OCBB$%obsVxo9f+_DGtLxeo0^~uSTz3HsM*=2juM7o6m1k!3c<$ESEuraigukSxFoG1bKgcyX&`_dUo6kQ)q?HAW0%z%1S_O+b!jlUr=-Y!!1UjMdc zYW-gMESu1t8rZ)}BT@3W94=*DM*nSfOMv$nDU9E|gt;aZW+@;mI1v9LETjM5NR;j5 zF(e?{Y-nv+x`I{&E%NGn|!8uupwl3Oh?^71+z z-~3>)}^y z1h0KFNe9hWZgrX$6 zT2snmtG|XKU5LW@4uK39g(dtNBNv?bt7X!G`9GKX+ZTY6KnrQ2sWw(vNNDl~)ohHx zvK6t+dp*IYQOjR36!uxi$J!tM*-m8Hv3COXH2i@!l8JS<6v`q*F^vp)jAS5vy)j@|GSg|#9`vrHs0ab&r^fJuJ3G|l$5W% zo%yMwra%Mb^RFHtDwNT3b1xTzaSmz=5@7Fh+O5}L;S+j`_a{umzRH2+PtK7Wx)sX_ zzSmDPLRh2smn`r>4UaM#ZzwSi-~nDhedrLfVNqkTuI`W(0JpLOr>-rOsD{36xCYL? zcB@;=xbyl^^2bVm4pd+uJol;=vpwe}y-OM4<6_2(kSu-7UAv>z%M$@#2og3SFfB=a z0`H7EToGxe!OQNexz2{o&`Mb*d4YPTM~2e=F@XkJStG3b+kd}^M3Bv^MIScL%KL|lkuK5oT49%+E<6gcSF0&16Qf! zt(ht1nPipsG9@a)N77)x7q)31oriZVhK4+S*R|OHpl#z1&XK$Nm{=9bQtIb!exw@Q zmfuBQSGT@1hhNLWBC^vG$3AV8htP)q1LYNnF0`yk?DZM^!GT|IdP0e19h3oLwcTMD zQX5bazPm%+jzfA$>!%J*`(GD#0~%FxSb6iBHV1w^&fWYpX)Pp;h!@Lm0nz2zkZkLf zuP=8vyK*rfUAdV*Fo@q>cd7<=fhkYyjpQ)t9GTXu0dK4pCaqi<_)LUj+vDn{sjnBk zGSOa|_Ixw2@m#?V_s~VQrV0G=#8VurtAA9L>a~tmj{c`2;LY~@48(bz&5F>2Bo?Lk zUsl7avl&OZGtU<8BOs)L@Ot?vnPo#15nunL2U?SiE|oj`U0;6Q_+d_T!{{18& z>`|LHpmq}d`OihG(`=w@KbWkPH6E#*p9Cy2k1A$66qo*n7ZNljlWq#_*7YbdU`cQJ zVSo0c1`psAH5)YDd^iiZ4Ce;ka0W((kJP=!CZn7<*?ClsXDk z4j-f$vQwSZx6L9b%V%uZrHcjGZr(AmTO073f>FO%%SNhU!7a`@M!lf8M{I)kbN?t! z7ynSWPgcm^dZ#q79L9%~c?tz1@@uXLHI;J>(Sbupl__2px z%Xu^`&BLf3Y^-v+Dt*{>Ni@BLqL>a}E%47US@Aj*?hLrvKRaX|vP|^xb=n{?{i4PB z#Uh0HcI;HCPWf4J%Ol7VlZ4*q-<79JQ#yM{8@Wn>)M=(MGn2bL3n=YfC(k^WYRlsmT$kym$Qf8hB!~JBo?iJVC?jDICMD4T*nbS7r zXQ}P;NEuW8POff2-s&@2W628Nx5FJ;#Rm;!zlc_rZ;K%p8~Brb?ltb~6wm0_sd=BziCecj{11wDO4=t>}cHR^?wLhiV>}COHisz>5{l!qTHZ*x$WbpZY*qW#;w&ZuRSSyqU?PoP?1Y`!>HAMo_fK-L&DxW?6 zRDG<~tJL*^)wi1L|1#?2@=v_VxPCwEljOJk=~o#uyxv7X^WP0BLE_J;?WQ0L1uZ&i z8oD|bJUmHn&Mecn;Dh!XZ*swV!EI_FKjwGfzFQo3!Z^*va$s9>)w>q)oU(br0G-wh5ca9OOg#szA?Mke@SDUzhm`NFDdV=(2tmoPz7k{B zU{|3>aK&&3ODzg-b2y-O8G^HlGT+ZI{|-&i0E4AiDur!Whx+Mgv9wa8~LYK#%zNJ5z=7u-!4t~OQ#F+R1EWc4Hu~v-J|}zc_Nj2 z_G|bW_=2B$?pa7uw9paN+L7iJ_L^8if(j`^Oik4M8PNUK@P4wEy5Nf9 zso>w=M8vk(O7gh1tS_mh@7h3bq@XHi6CO2j#wvlU9c}LfkBGLA5YG2OG8N8Oo{0%V zs2X~Vd$y?xTCAFz6}>brR}+o;lg1=gpBeT70hLBjdF|WXLw)w0SI_=&*ctaO87b3* zJs8CQ&>&IN5rY6*k1m1gfd)z9R$1ax^e@5;e^Cy27Jv)SRvND=V)_mof7V;tGi^t58aumOH}v4gY+*a|wrXFh@efVUTv2 z&d)Ma%N)-^2+~0X7YC*#Wum7HZwQn6#=Oi&7BKTHoc7G1;f*E-QDxD@@df0xyM3= z6SMC%l57Nm;at|y|JLt>ho>=zWXsP({u62eg=5s)R(r1v>Eh@JJndFV@g7P9mb2~u z){=fU)Rg}DruWk}-JQH0sD4-O79&(Ogqz-jP9((iZ(J`aJW>PPGT9DF?oZ6o89L+= z|86aSR0H;<4)tg7vGi)hSKWtQqpnCUNHV=Kuo*a>Zm4nNE}9#b&>Nx42JmD~7Y-8U zpRoFTWn+z7q{q-vRGa@nV@`mQ{!Xwi%TUNm1Xp_Z=$N!O#UBm}jxH;Sub6@0!oGm?fl{+@3mO@R!&lSI| zJhUhix!tzf?fg9L(^$sn17)!iS%pS?Q2Jegw_;*=&iK^Rs=A3k!_=cczViLHh_5g0 za}%6!+CThNKd$##v^v?mYyZiVsK~RKNq^`CJ{$L{<_^Y6an&AvRwFlCXnABCYrBVr z9zk_@HaW!Z)orm}%+Rp?uytSAQB9zT6>Oz;e6{`8_1Nv!?45^xW-^$(GdAzjm%c7; z8|$|pd+u4h*y6Ps3T)7ccqCzV{cD}e;;-$WsU2qG@8cKadsg}t8r!y7=N81D2TWzF zT`uuZCdF%gcKwY%(6RR$88Pd!3c67~B%zTs@vS0dK|CwH@5$6F-}}r>X9E8?{OO&k z+}^j5c&D2ke()FE|4@Y7nVgjr9)DZH^7oDG-|AVc{-f+fJQHQ)e*R0R{3{`D_w^pi zZaNn<+rB(X^0#!#nvy%ycqvu(_ww4s?eUarj8+*p3ESpP$@;Ez+h(quoM;xDPik6! zk=x@>tedq9ZLWZb(#H(0^ zJgk2L`Vm}fLBRxmKdIOR#^UD}s&5u~Ems4XKNy)Hhw3H8L&};@r^4goLiX71+oq@A zYqTYMg0y=yX*WiwRv}xSLIHo1W|zd((GHB)t9CiYeY zPYyc0Ab~G1Ch&|*seumYrgA>U6Jz6QuME)m&4{`YOWt-KFU{ea1bvxp?tF;W0SZvdW>FFsK{rZIE7dS{M>N&6kV%S4~97c+RKpAcCWuzvxd)0B~ z+njR_ShR1Y(@9f)v6G~%AABZ4g^MCb*4vjm!1(9LRK0^T>4yfwF##m>H+}5czPB-{ zCv5`)916jD2Uvha#B*E4>gR*`v^2@nsUOzv!tQx05eEZcnf@cPF_Hq~w+=4=o3jZ{ z574!_eZY7av>Bjtqxme;*wBpZ)>&RyrivHK3*h5}9bZ6OHn>NX`&PxSQ;-#hbwLRQ zanc(y<;V!S;qMJEbzgUT74{LNzS#_rFs!WNIRU4;S6-$C??j#dSm&v&kAoWoFOPT& zK`)v9>|T5MNddohaM795LnwUwTV-A@}rInV0peKB}Zv>r53} zh^=6s;@*=;X%8n<1V5?tx>>LoxC{QAh|{P^pAOv2vAxV%zrK}Vsl(CND*N;vR0-?i zJP|k!4(2S+zO<5rT<-4%Vq-iK&s(Z)JchE{GE#1rTVr-fHkoHxwp_I5n!QA?_@k5& zbT+T9a&Ea4&&{NCot=}Ea$A{vsH|@jDl%iAr93Lar{f7p49jyL>!LwYN+N3lVtnoH zkB)h&HQAWw#xL}g9(sq$nkf<+uMw=^YSkgYW|)(J}L3NVD?~Vq*`f|1CQX&mi>r<6Uf@O^d+m>dmVJqcjT<6i$G#H;7P2hQ_k)7dcv*5be z9xC%&IfJU!Knlw2`V-3+efi%=?z@q{lh-L7Au5a;(BMkEH~T9XO7v zff1V#{{cS_WB*Zskw!Vq} zgtT6gyp8KXuNdT40|�f-Znh%p652!uC)HJBDHCu-6O^yU3Wm%0JLnt&i2X(M;+} zm8xy>J}ZFVXU6SDF0sk!+HIBggP(()FDHQp3p*o0Lue?Ga`bz|h?yLz)^U*c(|hCZ zNYE}rc$y*H{o28~;)@F{c3Xi%*;O=dNDWH9JHl$AMA(JwtA+5hhK2#2? z09G|@K*^Z-ce4$VpEf!mWno;&vw!QCdrs_}+??e!RNdv#HW^8A)^6G#w0JlGc3zC<Jtm0?I`u}u~?i7Oc_mpXHAF$a{o#j^C|dKnxku0cR9c2E|2lcK}+ z%0$!O^QZ}rG7NVf5S#*&s5~WXuKU5YEUg8%znw9g^qIa{w2RD0xYzht*_g1P5&6Oe z55MBTQiwkS*1SnNn>6++kX&yBwc zlrJG52dr5M_NTjV6DW800zC+J54W2r_ZRsr1gO*B&{ph2K^TJd-+SG{uIFJFd#R!z z17uI?FnOoD_yy89tGhf<6#9vH>M$APqFV$mJ1FFv1#{=}6yUK;jL0f0(~*mxn_I%`pB%jbG&Q#fANdR= zcFA$sa^#)69!@z|pAsM=Wg(1DP$BL_8A+|E%tG@y=_2}F7G-xVW|7m<;x6Q9R>ld2qQ5ZjRI9lx0aDUU~JwcI5H<{IV@uj_rSCs3G^ z;K2a#+sk^JU0Iina<^E9PdGr)X7E12`oN(c91#(1hyA+N39BbreQC?FEqL1sFTd2(WoZN&_5}iG?WNMZUzF)aI z^{m4J3oeA!HVp7*{**B3#|pAhgNo%6))mYVTTP-PxAzm)7+K}+pZ-*Id)LcX9xBm* z?y}3}E;@heLn82uTVlLrKmpJzBEH@c|^AP!?XZbdxe`#-hP_ zb>J6w!m4H}jOVZ-GPO!o`&#==H}B%mM7F~7(<^(BU|5MdRd&$9p*H&Ggl`;I*s(Zp zQtR!Y$-^ChCP;rn*cNKh-Sicw4`-bK)@}(*BLHqS{MN>uJFm4r3|~>ed)+ASAM_Ez zewd6bS-*EujzEXcDJ$cMrut&tZ9qdUBuY`Sdffx?W?_eLD-d@FU;R+R&d5e1l@V@1 zwdL0t4Eqo6z$WxMK!dJTC&N24&twHa*DoW0`?i5cDt~}aT_V2ZW?u&>d$P+~wh+cH>qN3eDzZc<`<}?YWhXmXvJA5C*(E)f-~V?%&+ESL zC$DB+=gc{0&Rpj@=en-%_w#vQzomv4q@A+JmLC+Y*TF!YF@8qjPX*hOk?NPtG*k~E zKhdZRt$7~#kF%PL31V+xSs^q0ZoTp7HbnnQ<~}(aSrm~OH{SB3Ox%$xi;Fowc8&m} z^n|k#>X-sGdxz$XA#rGKvdwHm*%_)RAQBfs67ky+g|C|XRH~J-DH8&DxjS*TXGUmG z4@;9m(}Jcv>U);QrI9Ufna55Dl1r-9H^|M8j6GB!e_{bmcRn-ihgisjCiO~U@*oxS zifUG>s8}#zw^So@WBza9ud!zOwwRP(gJ{RcylXrUW@bxm_U<%QX}CaEA3VDi43$s> zV^F>rwFm92U-fQUWjQSCxzNk@hS=P~wk&2xj5vWd`L+F{ z77j6chr69)%@F<;5>*uOx1jIB^VhL5r!L#whC!2e@ZB;|$C(jkm#*`@a(b)HkYi~bUF4ik9T=#8ss^Gm|@zo1A zHII2u?cPK!TB@xj6P-yaR`Qd4zQCd`ck zK?fBwT>f3coRFrm)Qn_o-@@jrF*$X$coQBbdn{C_b{qRyk_DxYJ))#u(0YAfhw>1T z8>kP_a}62Ju8u@Bng%7|CbCnrE;GnA8@$?~`eT;xx+YQ%*19*O+ZY`5m=66g!{Q*6 zB*y0*Pk@%Pi#+*JDa2K2lMciDpoBT8+EWIS<8G#bM0BtML;u6NOJdFPzqB@#L10f6 z9oE2ZHxzK8VLvJI3eX8|?g|pL5R86_j5GEIu@qcMoiijf!0(YO%fg^LB|FuXUA+j9P?94SBo|0FDs4CJVTgOV%MLdkW~W=sxjJI?IGI5I~HbX$R?wyQX$h zpdv)*EY@ey{pwPtU$cFi;CxX{S-X!by3%3d1cM|Ae`dq`c0kX;m zD!eZ;hX>!O8$v!*E&5~uRZHTh)blG8kZ%aGhUQ{*Wu8kJE^&n?z)&9tGQ4iV+V?S?u1ahSP+uosQ6^L51LOWBpn$3 z{%fN1Oan?%3qo4C7dMIr(t};vSXZn>vZw>g8Ito`EK`9?5Qxso!|ykx05}5hpGSrs zjVy}ZyIyJH4`PT#KBv0#<5boUs;^uZcdS8oTph9hpc*Tu*X={s)H=}tPE)j%%^U^H z?$U-$ z$cLzp3#&e{^6@%kU4e%+l*t)3={?y%<8?nmf>p5+*;YjH`F(m98$A}K^9!38pbA zI+KE?41e=5VxB!0V!19}5CGs3KfG@rUu5C5APOA`M2;<1EkPZxjDQUgNPOyHSjfud zHrC+TnO4+*V8T_Cg|xX*orOhP{eDjjpAAq77CjHDUy?0tnXfjK{`r za?7qU`C{iMzd_2zj$LZB^gCvfLZdl<8E})h+2d~>6^~?i;68aVC&FS4*PspKGz%TG z4r6(8bVpK49vz$|(#?P~wZr(;SJ<=W@hLK7t{Tq;Sq;-YH1@`(>&%61B*$zVNh$Y>%sTYK{jENCVL%R~bK!ryV)>+ss3E)ChCKWL`lP`#- zxyv#*#0_ZuJx$$-d(VXxp+Ol@;bspSYy%%*l&zNJIb-<}m%27^?tHOjwH5L1I^qG` z2T0CPr!Ys`Q(hL1uX+)Ri(QJe!J#+LE~Yi-$14`0Q(SV}x<|&&2c7q#5Vb%Fv#|O0 z0VNRRm-CT9Nta!X%!@Kn=c#am#&@{E@A1Dc2_PtlU?wB3&`H~X*M)Hh$V8Y8Se#B* zH}d@8EDd`pHA}@S3gksbIa7Z~Lu|*Iy^QtYW?$qPG?a5e-y?9`lYYCD!);>-XqE&H zwsqNAFwdw&4ez2+Oh<;=7N?(32a! z0%KTypLJZulp|wN4`<`!8aU1&$zp=vVP_jCM&Os;bn`GxqDlYg9`0wpYr-Obl)CP} z`KnXs$bTroL8c&bt?^bH+Iu17vN)`yK_lEjQ9>A?Twwk2`Auh6YjbO8j8?Oc&$QfYR*OW zoMDdNah;Dy8tT{x3hG|2k4^Iz0rmqV6zZ>q&26Xct;DB8z%GF$!dp&0H4$1036?3> z(`;lkBdV;A3B>bAaHd_qLWLi$fr7M$aaXMRwG%^IL=q^69^sfU1%%K<& zYCjuKnq(hfVzKC@_%m=nAWHO!cQwL=Y;dlB9GT_#E<>`*eE-wOwQi3e54Ok34#YS8z^*t>ClCIXIiZ96pBn?%HG(~0k)g%{>UU!Th7-G-Z7d;rDBJh#vAa1Q#o5*&&c z>zCh0IyI)@lk&}1^BDjZCkbM-E5%p+N_+o?)?PE=b-r*N0^^%}U97>~`7k7jihKQa zy(UKbwxb74^k|0zF`h>Q4hSCV+bGa5I;-@T z2n9Jk7rtHsj_Z4eia+_Tk7V(TZG%b9NSh-08uHAP+ zJ-NwKeDsu5yPwngulP}%4AwR9u1ZvAhSCVOYxI*?ki9lGZ(tQ&$3axVo!57{A*xAZ zz#%_baF~+%u_lHwSl69Efz^P@5gMihf{i@|IXR$FQ$QP_(2Z5J)a$c)qXj3=ez~T6 zyDVLu(ciSskH#=3JyuZVeLf#+9o&N0YwBw&tc!eD(adw4a5wzdD`J<4ysYU&dl%8E z@EA`kQL&+R%Y=*GSc7VFM+$U_l$H)eOU#+Un>`c9*salWZYsobx^#KlvB{&6j; z=WS*Dt?$$ff4p`-ElWu6GIG_GjF0c{QlzbXd^2?+D>es_MI$;+64r+8~!4PL?(OnAMk9oTO zY*V*~6xU<;eH_kp;?~`0&|}Gl;kXD;6;@f4%6t^wY`%RNR|=u>jLy2C@K_BmhYoOZB*fdtnx z?NsiK>OIf*Z(&!&s6f;}JwFpQKJTyE?l5^a`BZX=1GtKeDpy=WT5{JEaFX}l=@=iK(XS6@R@ zZyuf0S*jkJ`9yj1$B+2`b|<)xtqDhjhz`JsnGSfbBfvaXnO{~+$zlDaZ_ndWs~|2l z``BowAz||2hjT$*HMasWK5|t%oduqKb0R4`AsCY~fk}w&MdxjX=c?}Z!-rnN4G$5{a zhK@7&KX&nG5D_iC$hYck?|nWuT*GF`5X)-!=1XLh>VZ~O;*q)GY#!bIpYI8S(cJuEgv?-?`!) z*zAC6GYq-bfsay72l`HgE-HcC_QK@OZJi&DErT6A2pUYHVMVOlQFgN zmzE~U0vwmQQiE;{tzmWfyor^Z8#is}C<8m0v{m#aFu^52pat?s`;k%Ug__ur1)7)K z%EIBU^-!IK)gg6tO zR^q;vaywj7p{W^Kmu83~#hhm9yj}St!E;E0HNK=vu9N31qpk8?FYm4!5$|{~H-b41 zPE;l> z9(1{YXED$I%EYQSrXP-im+WHQg`u5zO{2wp(mTPju=Hi%i~r1|H6Cmr+o}e~N5lK? z379%Q05}#9fvC>-{OI6$fwb-$RZMGe=ao=}uu#Af6i$wt^32qx&Pf!%xvzbR$P|1x~o9?-oSu`r0f_A&hCyR&mkhLV~mJXO9DE<+xlEo zWOcpjj7!6VXmMEkFj#3JAIDHo_fnM?ZWE?95x1NsiI>3$$ zHN!cUJ&XXR!QY5$&%s=uoQab~%ra{jRQTy1NnGn>pmjAp{`&aW7#dT)G!T0!>t%{R z!GX0u{JhgTqH&=gu`d1WPEYF#?pE_o4dUpv@?W^}W5RV&e|={4QOA^oNm&kZS0ki_ z*Z@h+d#cg~Fi5A}8T+(y)D5hnE9oVwZTtgvj3bX4dz+2?`PN*lKDj!nPLYP9xQS@b zJguvt9kY!OooA@V2`wyojB(sZ9a{a`V&0wP0Hj&cX}b}?1njQNnEHnVI0?W570Gh; z+CHH-`Ex=lP9V?2D1YEunyXdTEcaWj-b&cwTo*ly7b@p@RR;~uM7`kR{eA$06i)Lg zS!ePR<>^XK^z=2MSzr@>EK$k*z!41G`NV1p(2pF;yYFv2@!J}o_heG2w@r;MELp&N z=Kx4vi@B2y1MY<+m$(?m<0ZpP;KB&{@zjdqK4Y56W(;SsY(As zCvy=^mw%qIK$17USf@ZL6VzqFVBB9OlEsd5Y`6?olwC%IxW@n!>~Uo`Jw_^Kg^Cz+ zdlD>zk8yii{TxFNFH=Xiw(^iz{NSg<-3Fm=r>tn}>4kBKz^mlzJL`he9v%7(fD|sw*1S3tD!;T^Ve7$U+M_a|5lO-r`=nI@Bon9 zf1L=oB8xUKa#|dOws`14W0>lQMlIOzxyI4HiSdDF{AHoAc^S;3gUvB_jr%Gj>YIT3 z+kBF%ui_*S-hGbRph@G z@A?S$Mwo$}JL8jUT=L=b1~v+sGmNOzP$^RBChh1jshp2Kce*)o(UiJU9M*0vc38vZ z-tJ_phmOUzx=Qyd9mI*L{>g)(|#?(WwnC>NpKRWyug<8L+M1EJomkxQQPcZ>=K_uH)$CBVfw7bgL<#Et>c z+RTmsEQ^mWKf@5tXssJ%Sfw{uR{*S+Q>;jja?Hu~5dA2aKzegOhsYgFuoo)V6`)VB z_2p>gDW#c-JxF{ek32UrY3b^n<)17Kz?6X8?;%4KDe}DbDOY{u|G{2dLbteiV-Ol; zrAcA?5HlGQ^h0ad*6-dOi4O~UgXkQ7gFDJrynGusblEf&HvnW52Tb~3bG~!au+$bz z)`_qf!ph6fF}!I{utC8zF+osK)BGT`e`caR;FV<@|FgrSnQHnrl@ceFZnMaQRxVKR zzVKl_*2dNq=wpw^kc}cO+V#nRM>D$ZE?>F{$J4cNVg_E&n21a~13l>6HYnqh0=Yic z&>zH%vgzP4s^BRd&a|WevUjG|%GE@iVwOY(QJtI|LEPiFlwq2x&!olgWZm{>H|%h~ zc1Ish2eqmdDXwZWY6GtGB=1*}M^xotI2Fne;|?1Vb^CBhaZ#&MwFa*+$jzsf6Inl7 zml8hbN(1q@nVH;?_y^jpD+SYbezR0#sw>oxV-?^cV+c=sn-{tAt?D1gc-$xg^0?W- z>foOLOAof?c>!-fL=A{`IfuYfC_oH2Y9`a5UXcIpY#!lA1mb%l06d&|3(4MQ0%PWg z7vGSL){KOerXJpP)%h)#*(u-_z)m80+WGi5--_<4a2gQKai&fFQA*s8T;uzC?$maR zv%gcF){k%$$u1~CDjfp!Qw?mj>Fc<^DM$H|DII>|YXS+*T%m#X30B?Uo^0j4Wjrc3 z9+8%TLM{5K8SJ3f&mRW0gqXAb&x26B80%iq~VZI zd5|x!c8==OHgFX_Z>@U2PqKVE7~q}KB_^sw+Hp9DkzYZiTil`Haf@ zq4|K%3oSQu8Mw8)OyM<1T}^q@TzYQeHrXcOg}xd5P`EjSVgaC^-%EMZXsvGE$5upT zGQuo{Sfa9M=SB{uc+6LVVpdgcFCp5t-18R4BZ7Vy^FINR+p)Grd zC(c~^JJY>A?w0bV_2HI}x63CK;b|4RvTT(Oha`Y2Vl9Ge2g9vU115qJRewCz^kuyL z!xy`Xt#snDh3aBy6dGBXJa4roBq5tP1ctLUKhn6JFJg+k%hs}fm4H0U8Zb|nJbSHa z8aIc!9LVn2Xq3Hd7jJtV8q>iSs_#t1>4~JHW?vb`#|2##;sFQ-LIauVSvet5cw@QB zFHE?#QZqZu>7@?aq7oFi4c?MpyR|V|q?1TyyL+HNU6%GW$!%k9e=1GLKJ;Q&zG=V5 zWZ$udL;%FA#V->QJs*9Sk*NBSVfERZ;)>Hl-S-rtr8+TGI6CdY+sBfu9#7*l8IpgM zXUt+N3v7_po^T6z@;f`I_QLRwl(A+MPj&X|7$j$<3qFQHJ%33$ec8>|oH^;?{+m!a zT}2tgU}SM2$4X+FNKkX`_E>YG_O|txsaI$v+nLlnL73nLRPV-zn-{LW`$BSm-@+q+ zWVLi6IJOvuwYzSimD4fCJ@ezq{cd|gTHJ(IhwR%CAsKD65BL+CB74@VJk$MEy1r{^ zqOtjF9*ry>Ds|}wqoiUmZ8UK1<(VOC55-}h)73He5pPHQCFb(XLi6uC6>r{&8s1%gf=A$y+J)pchbir*ZW)+ zfvtJO`Fq6M(}UToiX1~9BXQp>j;-0(4pVd&@FyWmQx%>tSNKLg)PdXT&f9gj{jL$P zlAxg#)-X|VAFir)232}yX7*<9aIQVf)ycAbMd>=a2Az!h{hi@ z`KduQcqx_6y;d4lPE1qv1rv};d!+!YtuX+;`^~ddwOGsTvEUoMpf8>mdG)b*%BLbL z6MFElJ+HzBy!mqEELotNqbogqJL{hJ^Dh(gAi(!3NE+<8ONqhNR@6LgPdNKeP%?t@ z3Dyiw#0{Qr)xoA%*P9z6ljX0kN7!dzC)LH1{Z$z<7?Kp2#NxT@V9`_{e)5lo=t)$; zt##@_REMI})09ww0kcfRvv0I0SpOLy7mbX}BQOi)%hx@q%i2X6_u1zh+nK1(0J}Qh zN3l5Xt*PVMS1{OJ$v@CjmHH?nY!UB%6oF7C?|n5u|1`*CVPw-~EJqe5SQQtFc!aF3 z*1t;@Amm7tJ>AcZtO3ewR`Kj<9)#kZE3d9l)tO3}4tLq!;3MSpePnx@$$)5NOhOQa z$mx?~V{~PoY&pa1d;uGqD=?%19^%<%PQY?O3+vi=h>2#)=Os-Q8ZXnEks!HniADBF zY5pOUBOKEAX`T9KJYi+EPqOT$YqO={_rRhh!eFavFu{bb6qHvRaM+Vkk67W2!~=;S}@_;^-)6UJ6q8KZ~7+04_0q+NheNm7p1z zi%$PNN=UCzcZ+{s9|RQ+r=AbuD&IETUCGm;0~U-=GEPBj8lPY;UUO2IQi^wH0Q&3; z3;RtLtzg8!Bt>11(xjsd!cuqn&VN#DTGnn?BWP*(XHa6OT_QN+V!l@xVUx@j94}Q= z3Q(W;dW`&xb9wZO%{74ehYpo+o-9ip%je+@5bJ=d6}u!#uh$N6o@0PogFUSFZi9G0 zvm$)X_*OfvfbHXNRTPzlcLEr)NeD^O0!HR6tL&nO$wK*chN4++S-Zo(5P0I@PU4H4 zXj}2)N(=FmQ%^~>j^<0rclPmf@?OZID%*nka+v*%u+(V}oQePHYi}&?GGMV%Q)s<* zR2^nHKWI8^XCyblr26;K>OwCZAqW(m_kYJmtQ@Ud-UBQTF&=vdI2pz8JpqvM%U?sc zvc!8p$u$NySM&t@F-&0s2r zepo1Xt}{ZlH2*np1hXOYOtiyh#IBpVHk_%%{U*E&#JX=s+SJH=Kw#*3=yhH4{EPtE zMrT6q6qZJsR_dk@%%fvaW0vbo*Lxtz zCzm05RmD4S%BM~zE;sV^>KW9VL|D#HcT0KKJfN;Aa~&0ZNI2{~5}IJ$3RFU_Pwgmf zV@Fp*=6$Is1IM{VhITG<15?R$;V@}1-V(4KAX#RspHHv8dpzA79C?gjfv1~f%Ej{A zYBG_tJWBLW;<1kXBxY^GNX>{*R#}4GS;D6iZexEa)9nG zml_A81!XkZ7ROLgrajbe0+a#38bY5L7-NBYD(xMaA6VWz7_iJbm6L zwWQ!=Z5Y7`2gqz`>}1VD(`v=DLThl`ePr$PeC(vw66uUjjYy;acYA0rF7KTh*~1tx zbI)kBFa+8SiXz1bIUEEaAgBp=n&(K){$+z)I$x`8dqtO5&@>{;Xib@Eo%~ju8dJ`5 zYLlavBfe`vc5rOg5kgCP(&WO$VEj|txa5nyW7moHv|^AFu*2B``iY)tScgO*H3AcT zLyFY^Q=#J7Zc#KThWj;ru)9Cmo9-7O@-cZe4Zo0s@tuNu&BiHF$7BKbj1A0e(s4?PQ8{@TWty54#y=dZQ6qft z;uu9a)%82}T3XQlqnIDC%NY!N{!wH1>Iv}g2?*nP3)XmY8)&vbp02vq35{u><*K~@ zy--aDjpb#Br+ttPAh%hNw+=krBYlDmy>?bf=1$UjVHn$8n^K@s9gLR`%l(C-URD! zM>c3*u@!JvEmo1_?7FiPCBF-A`dmOzS?^mL1?^A$NOf$*bF%nm>NlXFvnz?0(XE1oMR!htpIYCRA|js4jVtvxxjus{1qGFb-bW}_ z0^KoQ_OKp%FSF276&_P|mW87xe@j#Gt)uMS>!KP#Q8psZP{eX6EWprbMBgKJ{!T`x zqGzl>Oly9;XVC^w{)f=mE2gzk6A`++316zNSXp7xp}<>K_QQZoPnM%&w)`?1Z<3Z@ zW38?M!%r;F>zZ84TO$ig)|XhWUoM1+Hm;(HqEoAXD%hEB&3;#4O8z1Wr4$iX8Y-p= zfdQ~F%pi+CEn5C3iGE{;6RiD3w#8v6hAFzQz zY_=ad=lvVG+UG@W0=_S}jov+f@Gve$=GsuN0~y9u5v3WA{l!WITfd^LQP(_Y(!C)* zSQdWY7Y9fvY<)|9`J3b~TGDD>%V%B-(Y;vo`UmxeP=Dw#Zsa$7?`@FVyQ`j}UGh3Y z@vk0}$Qh2F_{X>%<7f7!l~u8{2>GWC7H_*FQm_5s62>)HAS5cCYThL+ z(c!=v?u_WHUputll$%=BRu7$Uf6_nyfLd(S?krZ{cUq2|i$z%kEPbZ;`Fk*UiM6Mv3>=BYc+i-D_)3w)hbNUHAzMEMU{XV`mOBkE zMw6Lh{s!|+0RsBX!OVB{q{ujP1C;{-Sj{LgQx}ud3Hxxos0@tnqL$c!M!YOuMU-g* zSmcZ{&&IzaCO672mS3I|yi6Hj>Y1tyu#uC0%3mXN*>UC_Q$RzTtLDw0FVTTh_me!ZyDYmqM(e6Y~XlEoIg_X zS7wIs@x!NHsH&4x$Z{x_KU-JS-%Fj-=H_CpK7gpu zpS$Oj@E9PXk~aZ8B@iHDIS=nNfeGaRq8$jHi1{S*tcLc%Z1-{wL!1ZEY!37>R3;+( z^)P~J)oMuaRh9)1Io*{Y256?O6N%6o@K#iO@m%>bDk=?-@A_Eaw!|Tz$}0v34T>g& zW#OAaA9aBgMm@&6g@$^r7^TSi$`lMtws1!Rg>Uv?w?wq5&lOmm?(n&qiRWLT4kkcg z|D$j~$!y^Y+C_vt8Qzf|gocXu5!p>`UaA#2Qx zzlIQq?w-=Dmg#1s>NO*0`Jx>< z`V}7sw(x+>JKCz!yz=KnWw3hU#&AuHnnK5ijL!xD?A`pprsd9cE20h)coUh2%xz7 z9Elk2Oy?;EWF_JUMftiR!-$r?cY#EB)|>Goj4v$(TH_Gz#R2sVQ27RmlJwKvmD+e} z0Q7?~NbA@)*29VL-dkyUHOo_zn%;0-cC1)X&E6s5<{*ckg5?DhpeJvQ)gDM!TTIe} z&vl!$XEncC?>ZFC1an80tFNfD1B2*b;Hd=*RG5{&D$JNp8elOH*`5cTg(_$PH`#9f;YScO7sFvIQCV$cQV99cab_{Z6Wivlc z`+-~kVbqLjzbNQbc8_tl{2(7H6*o9NQ7Y@w1I9Ru?AVtVV#?C|#yu-FbA2uzPZNSn ziBbOjOK)AsxPIcl+~7`yX04}U3Gvn(>fDUIUz-z&c{jzM9)TVr!#6z?m1FUz{6qS* zG8$%ziEMXw?(=$4?ra1+)|rKsI(0hY)h&nGgNt2e0AJ$Lwj)ZL8}Dr$rJ8;>QP&p7Vf$+`B#uZr#DQrqX(O6#{@Y6iPdG>d ze6x%L$+eq{_(W~EXC&#GBp{EI0}7~)qGgoR;p)Me;8U@-X=Vz1@3x=Wkk>|-m&vte zzt#H6SC{!vwK5E(U)CnQPnX@L>#u8(EhZlaaL4TI0EpVw^ zw*jMs;Jor!8vNrYypW3`<@LeSG&=H;Dd z8+$?q?CjmygXDqV60caMzO1QWu1j>LAiDqkSN6&8#raRlYxCB#Qz8}*)0xYMEx}D@ zdYl=_?K6?Fy}!qpcf7mSX-@a`VveP#*&y)^L(o-G}wX!eg5q1BLkwr)$XsoB>Rr2p0WhtHMUxt*)^?*NFoAfhLJxm)`Rq*@$6==mSr8WpDm5E>& zW#s<^ydJEfe*AhP{0{KmX)0<@V>~_`3#Aw6i$pK%ezKIQ$*@%VRriy%JYuy@C+rY~ zbKs3__L8P%F?+a?{-k4rlv>^I!b(RO!8UG^^2wOS2|C8*Cik=PqSYx;ndHWJ+(at_@u%LM zz=U-!F46ClfSEe9THdLHSF4Bb%1{q7YQ@)QfagZSyv-gqqd1Kr32BL;ih27KOcZm$ z;O848)#tYTTwMv&ra;dl3)4&j_kK#hjqa`{&?FsoE7LivVK5xXZbjz^@f-KBHDB6t z_r{vPnOF0$+{7DlOpPd@d`vSXVeJIWVpM?}@A;$u{3_(Bsv7o$GSuKD4i21`zV0}v zCVrlN5_8iD*F(hukJig`)$5;N93R$yL_<`urVR}~<_h(Ui$+dmeE9I!5#vFQa>=|! zjHTY|i2|9{zVSn!S^Z(;N(@#^ zi2Gg!yYik_!53tfX*HP?o8#$A6wEcUnZ=$=%uFNn(k2*b%}DkOLM^L2?V{S-BK?JxQXfec=VaCp2@kwhVJFV6V4C%DkO2KfA)Q(HBvs$ z^zfLa(!GrwHyihN-ihuW-hE;a5HT8WI$OkU2p2;vQ

cfK5CLL#sYc=>$Qr4L3;c)AOkcDfIG3ry~8z| z63_%bj?(@dH^7)O6N3T&8H({Q0;|r-wO-u-7nXO~!oP9gtr`$7uMD z#Dps>#=7Tp7pbAeulY2gV`#KSkq;fg$+{whq4rzBK{p4gof{9|>z!O=dH2OB3!E9z zxcf<7ik-yMJOu<8N?w0JLk(h+%nBZ$am-Uf5M|Rn8peIO1T-ILYbZJ#teB3Qo%03Z z!$@*4am?n2xQnC!T@onq&-^na0YUPKkN)WDXqBjofoXA)RDy!kg=c*AJ5fd8eBXR0 zMF|a5SJO)y0vpuhiH!ZLHSZ)&n?ZlbdVG^CH&SX6PT8LpMK(k% zXsQX+UwPd5jL2e`8_!a-Pa>f0Pd79Djrt}a+`j6_kqBT}SV|(XgPy z9XdbfUzC9xtA7yU@A=Ovq+sMspq>Cu$_YX_(K7DEC~+g$W^0!9tis;mPlrdr=s+gr zgXInKm>PbdSW($^ymLGlTT-wY{NRx1(RLlOq?^^yG3%xCg2D~y+CtDEOWcyHOm^|W zj5E2AzXlo>*b{S(=|&cd$*wUXT1X=o8VGa%mXVx4vo`h3m#BZ*#FU5pN?czfWcfsg zJDTAWKi{jRuX?NN5WzLG8jy4iYUkgn6K2`fXb{63>mNE0!;~|h=imAgpKrx6g*%PR zxAM!cAG!jXfzxu|`ULIXWF1NH<80&gM5FevQh<6V9({u=0nCnk?r`J{Ma|R05o_3Vjb`&xyex5tK(NikweTSq6hDW! z=IA=PJ7u7lk=W75hlp3o?NRo;$pJzD3~iOlX3E= z)hMjR?u~2+`G-ejFCQS?=B6QVgnAn2p73IPtVNs?*wD3m^&_mf0x{kVVqLjp0v7HFRR$$CHcTcJw2Xw!m5mob zJ9A~1ejnN(N5B{K93Y^Um5y7__!IvcFT?i#Uc&ZO-TsZuBiOdi2j|G5S;JrGUO=~& znc1@_SyoZPLS{g5p++!dO%8~Tilpz?$Tlk91Jf5lMb`#VW-&Gt>9RTj;6DUY?Z zl8f%BDZUjk0jIS|BS&u6?W6&*gTiqeJ2JdSM_K07?Su5w zo}`Wl5?oTw09=STEpgXL>A`K;O}>wrJ&n=fPY_RG0$cj;w=Z}eT~vFPX*|y!V^s0Q zQh^Tty^W*rO7SOIG|5p+{}?OkqYA-|d^LuBh)Ad+C^OK~rf#4x|B1}BCvMEb#2p)G z+pDmu1?#r|a+?C@j-I?(PaVR|3X8!bV#UR>2%mZvaG^=uMlBwwc3dFVa^G$N(`kKf zo~(m$4corCSW>X4^OQ+TZoE;3_PDH`dVg?m(GB^%?OI%WX}rl^5$9(50!ibDG@rZz z$**+xKUVO=8);;0aOmRWH*^?yL^l>+{DB>OokdMKOgOFbhQ{yTi#cK&7N#G2kG~uK zKs$RRUi*^*GGw(V$VB2W`t)Lr-h8lH;?~s#5s0PfUKuD;zWIvCtrIT!WYIOhA53W~ zAHc`4Lgs9D8_iskH=(&QwZz>pv-69wufLqeLiv%AcO&`kmF;o@%J$iqp~_*{>kl!j z#2K+qhSki4+|b`|M|>|=opei%*7ZBDqsSlZF4fUPF&1%`l&*s_uMowFZ)h)`ntQCq zK+4$UXc=lkcBP%CL2k0vjUT7Wg6G|ll-Z_TCVxw!-*Q3*(L-II+AT9x`kM$)Z*qbA zFlpE`WR~UWLpNpoSxLBW1@99yF29N&jUzj(6ibyX5pc&}n+v0B6OnAB6U`>yH6y^( z>4Hl%=?k%EyEt#s-N;}p8?X_fW`ugk-3}3$r#HrW%t}2U;cP-BlvvZZZ}@RvrRMOX zjGff|N<1)B5d)Sysb&xI-WHoSbllbn6 zQD4d;beT|R1VP-Pv0%xUemB%fl6>E-^g$gDXb%~(*%B4i9Fj$K#KC1A@xNKQByKvR z6kkq&T_8KZbw78I?X=dXD}J*Grm*3Wf&N*8D;Hbr9LtP)@)yqU&Z{?}R@O&$E)%s(6p+ir z4Iy|MZM6428-61K5uCW=rNkcx*fE1-(Qaz^KL>+iea6+|hI~H{55ndIee#XI!P4Aq zUa>$$4B1lu?7*7tD!Iz5F;{EzRs1wvhsU3J0P1tznj&xJy1}!2yzu>T&coRwzhpWo zyS|Gd&AUf`FB;AVrY>13Jvhzm^4qf`I5OOek@|cIM<#fF^#%|aI`rY$~x`Rb_!_3IMVNl%#C|U}5VOPRj`sn!B5NkbVNyvxa-(D1= z>uXx`N^vW&O+=!72MfHoX)WG$7puqzK)SYef=8uqxW z`M1a(Z@Woag*~Xt4c_>U$Z_#l6Md}ofQ}Dl{1}lW8Xoc&R+dd6wpDzWBJl6AU%>B; zrPQLLB66{rKIfUa6YQ?*>DuX$Spb8RS)Vh%oEh`+%JJSxs(F3DpZP;`uxHJW zp=Rsx^?HWGcYWJ5JcByrs;+ixKtzdLgkmkANV6M4jYx0LwOASuQ3qcSu_zzxn z`G)@f`;Q8uhKv=Ro!qUR+|6~LV6ELu`Mez;!yrmXm*AhRq0RriK`Gijsx1PPve%Eu zSq|%+VY8tLO+!1LN)J3w{(9?`Fy~tz~8$2M6_FE@Z{Sk;bF;jIWFe=Sh->M7rk^E zwIm)bU-oj3!tPC%o6Qf~DGqhKB^ZB#^Mc0&xHlG>&v+(Lwv>U;!v0f6 zpZ_f?>z6&KuL6|M`cUMI5L1CBcymxr~pT;-^mK?LHD9udsa&a+v*&J9dVtvN$ zO>$>7NfBAI%E#QSzhaVY(Y4`&%$~P@QaPA&(^gyV8T(Nu2a$j8gO>^FclEQsB3;+Z zWbdaOzhFc-I#shxDp$20^*~vC0VN*-5GTPCO}A40-jUAp>PL+@*_U_f{EYa;}l;zQk)#0_|NkBtCkuO;(Q$({GD%qUV0Gpe*kKK00#g7 literal 0 HcmV?d00001 From 928c0a95ba85ebdeb57630a8c61030a9d94bcc5c Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Mon, 26 Jan 2026 03:25:52 -0900 Subject: [PATCH 13/46] camera preview and logging --- UI/main_window.py | 146 +++++ UI/settings/pages/automation_settings.py | 18 + UI/settings/pages/camera_settings.py | 18 + UI/settings/pages/machine_vision_settings.py | 18 + UI/settings/pages/navigation_settings.py | 18 + UI/settings/settings_main.py | 77 +++ UI/state.py | 32 + UI/style.py | 176 ++++++ UI/tabs/base_tab.py | 15 + UI/tabs/calibration_tab.py | 16 + UI/tabs/logs_tab.py | 135 +++++ UI/tabs/navigate_tab.py | 51 ++ UI/tabs/project_tab.py | 16 + UI/widgets/camera_controls_widget.py | 11 + UI/widgets/camera_preview.py | 241 ++++++++ UI/widgets/collapsible_section.py | 114 ++++ app_context.py | 73 +++ camera/amscope.py | 250 -------- camera/amscope_camera.py | 533 ++++++++++++++++ camera/base_camera.py | 607 ++++++++++--------- camera/sdk_loaders/amscope_sdk_loader.py | 271 +++++++++ logger.py | 206 +++++++ main.py | 192 +----- 23 files changed, 2552 insertions(+), 682 deletions(-) create mode 100644 UI/main_window.py create mode 100644 UI/settings/pages/automation_settings.py create mode 100644 UI/settings/pages/camera_settings.py create mode 100644 UI/settings/pages/machine_vision_settings.py create mode 100644 UI/settings/pages/navigation_settings.py create mode 100644 UI/settings/settings_main.py create mode 100644 UI/state.py create mode 100644 UI/style.py create mode 100644 UI/tabs/base_tab.py create mode 100644 UI/tabs/calibration_tab.py create mode 100644 UI/tabs/logs_tab.py create mode 100644 UI/tabs/navigate_tab.py create mode 100644 UI/tabs/project_tab.py create mode 100644 UI/widgets/camera_controls_widget.py create mode 100644 UI/widgets/camera_preview.py create mode 100644 UI/widgets/collapsible_section.py create mode 100644 app_context.py delete mode 100644 camera/amscope.py create mode 100644 camera/amscope_camera.py create mode 100644 camera/sdk_loaders/amscope_sdk_loader.py create mode 100644 logger.py diff --git a/UI/main_window.py b/UI/main_window.py new file mode 100644 index 0000000..e4f8ef9 --- /dev/null +++ b/UI/main_window.py @@ -0,0 +1,146 @@ +from __future__ import annotations + +from PySide6.QtCore import Qt +from PySide6.QtWidgets import ( + QFrame, + QHBoxLayout, + QLabel, + QMainWindow, + QProgressBar, + QSizePolicy, + QTabWidget, + QWidget, +) + +from .tabs.navigate_tab import NavigateTab +from .tabs.project_tab import ProjectTab +from .tabs.calibration_tab import CalibrationTab +from .tabs.logs_tab import LogsTab + +from .state import State +from .settings.settings_main import SettingsButton, SettingsDialog + +from app_context import get_app_context + + +class MainWindow(QMainWindow): + def __init__(self) -> None: + super().__init__() + self.setWindowTitle("Forge") + self.resize(1920, 1080) + + self._state = State() + self.settings_dialog = SettingsDialog(self) + + # Header Bar + self.tabs = QTabWidget() + self.tabs.setDocumentMode(True) + + # Create tabs + self.navigate_tab = NavigateTab() + self.tabs.addTab(self.navigate_tab, "Navigate") + self.tabs.addTab(ProjectTab(), "Project") + self.tabs.addTab(CalibrationTab(), "Calibration") + self.tabs.addTab(LogsTab(), "Logs") + + self._setup_header_right() + self.setCentralWidget(self.tabs) + + def resizeEvent(self, event) -> None: + super().resizeEvent(event) + + def _setup_header_right(self) -> None: + header_edge = QWidget() + header_edge.setObjectName("TabCorner") + + layout = QHBoxLayout(header_edge) + layout.setContentsMargins(0, 0, 0, 0) + layout.setSpacing(6) + + # Status + self.status_bar = self._build_status_bar() + + # Settings Button + self.settingsButton = SettingsButton("Settings") + self.settingsButton.clicked.connect(lambda: self._open_settings("Camera")) + + layout.addWidget(self.status_bar) + layout.addWidget(self.settingsButton) + + self.tabs.setCornerWidget(header_edge, Qt.Corner.TopRightCorner) + + # Get the width and height of the settings button match the height of the header bar + h = self.tabs.tabBar().sizeHint().height() + + self.settingsButton.setFixedHeight(h) + self.settingsButton.setFixedWidth(max(34, int(h * 0.95))) + + self._apply_status() + + def _build_status_bar(self) -> QWidget: + status_bar = QFrame() + status_bar.setObjectName("StatusBar") + status_bar.setSizePolicy(QSizePolicy.Policy.Expanding, QSizePolicy.Policy.Fixed) + + row = QHBoxLayout(status_bar) + row.setContentsMargins(10, 0, 10, 0) + row.setSpacing(10) + + # Status Text + self.status_line = QLabel("-") + self.status_line.setObjectName("StatusLine") + self.status_line.setWordWrap(False) + self.status_line.setSizePolicy( + QSizePolicy.Policy.MinimumExpanding, + QSizePolicy.Policy.Fixed, + ) + + # Progress Bar | Optional + self.progress = QProgressBar() + self.progress.setObjectName("StatusProgress") + self.progress.setRange(0, 100) + self.progress.setFixedWidth(120) + self.progress.setTextVisible(True) + self.progress.setFormat("%p%") + self.progress.setAlignment(Qt.AlignmentFlag.AlignCenter) + self.progress.setSizePolicy( + QSizePolicy.Policy.Fixed, + QSizePolicy.Policy.Fixed + ) + + row.addWidget(self.status_line, stretch=1) + row.addWidget(self.progress, stretch=0) + + return status_bar + + def _open_settings(self, category: str) -> None: + self.settings_dialog.open_to(category) + self.settings_dialog.show() + self.settings_dialog.raise_() + self.settings_dialog.activateWindow() + + def _apply_status(self) -> None: + self.status_line.setText(self._state.format_status_text()) + + show_progress = self._state.progress_total > 0 + self.progress.setVisible(show_progress) + + if show_progress: + percent = int(round(100.0 * self._state.progress_current / max(1, self._state.progress_total))) + self.progress.setValue(max(0, min(100, percent))) + + self.status_bar.setProperty("kind", self._state.status_type()) + self.status_bar.style().unpolish(self.status_bar) + self.status_bar.style().polish(self.status_bar) + + def closeEvent(self, event): + """Handle application close - cleanup resources""" + # Cleanup camera preview + if hasattr(self.navigate_tab, 'camera_preview'): + self.navigate_tab.camera_preview.cleanup() + + # Cleanup app context + ctx = get_app_context() + ctx.cleanup() + + super().closeEvent(event) diff --git a/UI/settings/pages/automation_settings.py b/UI/settings/pages/automation_settings.py new file mode 100644 index 0000000..34fac61 --- /dev/null +++ b/UI/settings/pages/automation_settings.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from PySide6.QtWidgets import ( + QVBoxLayout, + QWidget, + QFormLayout, + QGroupBox, +) + +def automation_page() ->QWidget: + w = QWidget() + layout = QVBoxLayout(w) + + top = QGroupBox("Automation Settings") + form = QFormLayout(top) + layout.addWidget(top) + + return w \ No newline at end of file diff --git a/UI/settings/pages/camera_settings.py b/UI/settings/pages/camera_settings.py new file mode 100644 index 0000000..f207ace --- /dev/null +++ b/UI/settings/pages/camera_settings.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from PySide6.QtWidgets import ( + QVBoxLayout, + QWidget, + QFormLayout, + QGroupBox, +) + +def camera_page() ->QWidget: + w = QWidget() + layout = QVBoxLayout(w) + + top = QGroupBox("Camera Device") + form = QFormLayout(top) + layout.addWidget(top) + + return w \ No newline at end of file diff --git a/UI/settings/pages/machine_vision_settings.py b/UI/settings/pages/machine_vision_settings.py new file mode 100644 index 0000000..33bc5e5 --- /dev/null +++ b/UI/settings/pages/machine_vision_settings.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from PySide6.QtWidgets import ( + QVBoxLayout, + QWidget, + QFormLayout, + QGroupBox, +) + +def machine_vision_page() ->QWidget: + w = QWidget() + layout = QVBoxLayout(w) + + top = QGroupBox("Machine Vision Settings") + form = QFormLayout(top) + layout.addWidget(top) + + return w \ No newline at end of file diff --git a/UI/settings/pages/navigation_settings.py b/UI/settings/pages/navigation_settings.py new file mode 100644 index 0000000..b91682f --- /dev/null +++ b/UI/settings/pages/navigation_settings.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from PySide6.QtWidgets import ( + QVBoxLayout, + QWidget, + QFormLayout, + QGroupBox, +) + +def navigation_page() ->QWidget: + w = QWidget() + layout = QVBoxLayout(w) + + top = QGroupBox("Navigation Settings") + form = QFormLayout(top) + layout.addWidget(top) + + return w \ No newline at end of file diff --git a/UI/settings/settings_main.py b/UI/settings/settings_main.py new file mode 100644 index 0000000..e31f061 --- /dev/null +++ b/UI/settings/settings_main.py @@ -0,0 +1,77 @@ +from __future__ import annotations + +from PySide6.QtCore import Qt +from PySide6.QtWidgets import ( + QHBoxLayout, + QLabel, + QToolButton, + QVBoxLayout, + QWidget, + QDialog, + QDialogButtonBox, + QListWidget, + QListWidgetItem, + QStackedWidget, +) + +from .pages.camera_settings import camera_page +from .pages.automation_settings import automation_page +from .pages.machine_vision_settings import machine_vision_page +from .pages.navigation_settings import navigation_page + +class SettingsButton(QToolButton): + def __init__(self, tooltip: str = "Settings", parent: QWidget | None = None)-> None: + super().__init__(parent) + self.setToolTip(tooltip) + self.setText("⚙") + + self.setAutoRaise(True) + self.setCursor(Qt.CursorShape.PointingHandCursor) + self.setFixedWidth(34) + self.setFixedHeight(26) + +class SettingsDialog(QDialog): + def __init__(self, parent: QWidget | None = None) -> None: + super().__init__(parent) + self.setWindowTitle("Settings") + self.resize(860, 580) + + root = QHBoxLayout(self) + + self.sidebar = QListWidget() + self.sidebar.setFixedWidth(220) + + self.pages = QStackedWidget() + + buttons = QDialogButtonBox(QDialogButtonBox.StandardButton.Close) + buttons.rejected.connect(self.reject) + + left = QVBoxLayout() + left.addWidget(QLabel("Categories")) + left.addWidget(self.sidebar) + + right = QVBoxLayout() + right.addWidget(self.pages) + right.addWidget(buttons) + + root.addLayout(left) + root.addLayout(right) + + self._add_page("Camera", camera_page()) + self._add_page("Navigation", navigation_page()) + self._add_page("Automation", automation_page()) + self._add_page("Machine Vision", machine_vision_page()) + + self.sidebar.currentRowChanged.connect(self.pages.setCurrentIndex) + self.sidebar.setCurrentRow(0) + + def open_to(self, category: str) -> None: + for i in range(self.sidebar.count()): + item = self.sidebar.item(i) + if item and item.text() == category: + self.sidebar.setCurrentRow(i) + return + + def _add_page(self, name: str, page: QWidget) -> None: + self.pages.addWidget(page) + self.sidebar.addItem(QListWidgetItem(name)) \ No newline at end of file diff --git a/UI/state.py b/UI/state.py new file mode 100644 index 0000000..3b2e7f6 --- /dev/null +++ b/UI/state.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +from dataclasses import dataclass + +@dataclass(frozen=True) +class State: + machine_state: str = "Disconnected" + automation_state: str = "Idle" + + activity: str = "-" + job_name: str = "-" + + progress_current: int = 0 + progress_total: int = 0 + + def format_status_text(self) -> str: + parts: list[str] = [f"{self.machine_state} • {self.automation_state}"] + if self.job_name != "-" and self.activity != "-": + parts.append(f"{self.job_name}: {self.activity}") + elif self.activity != "-": + parts.append(self.activity) + if self.progress_total > 0: + parts.append(f"{self.progress_current}/{self.progress_total}") + return " | ".join(parts) + + def status_type(self) -> str: + a = self.automation_state.strip().lower() + if a in ("finished", "done", "complete", "completed"): + return "done" + if a in ("running", "busy", "capturing", "moving", "scanning", "paused"): + return "active" + return "idle" \ No newline at end of file diff --git a/UI/style.py b/UI/style.py new file mode 100644 index 0000000..928f22f --- /dev/null +++ b/UI/style.py @@ -0,0 +1,176 @@ +from __future__ import annotations + +from PySide6.QtGui import QPalette, QColor +from PySide6.QtWidgets import QApplication + +RIGHT_SIDEBAR_WIDTH = 380 +OUTER_MARGIN = 10 +CAL_LEFT_WIDTH = 260 + +def apply_style(app: QApplication) -> None: + palette = app.palette() + + window_bg = QColor(215, 218, 222) + panel_bg = QColor(245, 246, 248) + text = QColor(35, 35, 35) + + palette.setColor(QPalette.ColorRole.Window, window_bg) + palette.setColor(QPalette.ColorRole.Base, panel_bg) + palette.setColor(QPalette.ColorRole.AlternateBase, QColor(235, 237, 240)) + + palette.setColor(QPalette.ColorRole.Text, text) + palette.setColor(QPalette.ColorRole.WindowText, text) + palette.setColor(QPalette.ColorRole.Button, QColor(238, 240, 243)) + palette.setColor(QPalette.ColorRole.ButtonText, text) + + app.setPalette(palette) + + header_bar_color = "#5f6368" # Dark Gray + header_bar_text_color = "#ffffff" + header_bar_selected_color = "#f28c28" # Orange + header_bar_selected_text_color = "#ffffff" + tab_corner_button = "#ffffff" + + header_bar_idle = "#5f6368" # Dark Gray + header_bar_active = "#f28c28" # Orange + header_bar_finished = "#2e9b51" # Green + + corner_status_line_color = "#ffffff" + + app.setStyleSheet( + f""" + QTabWidget::pane {{ border: none; }} + + /* Header Bar */ + QTabBar {{ + background : {header_bar_color}; + color: {header_bar_text_color}; + }} + QTabBar::Tab {{ + padding: 8px 12px; + margin: 0px; + border-radius: 0px; + background: transparent; + }} + QTabBar::tab:selected {{ + background: {header_bar_selected_color}; + color: {header_bar_selected_text_color}; + }} + + + /* Corner Widget */ + QWidget#TabCorner {{ + background : {header_bar_color}; + padding: 0px; + margin: 0px; + }} + QWidget#TabCorner QToolButton {{ + color: {tab_corner_button}; + background : transparent; + }} + + + /* Status panel in tab corner */ + QFrame#StatusBar {{ + padding: 0px 10px; + border-radius: 0px; + margin: 0px; + }} + QLabel#StatusLine {{ + color: {corner_status_line_color}; + font-weight: 800; + }} + + /* Status State */ + QFrame#StatusBar[kind="idle"] {{ + background: {header_bar_idle}; + }} + QFrame#StatusBar[kind="active"] {{ + background: {header_bar_active}; + }} + QFrame#StatusBar[kind="done"] {{ + background: {header_bar_finished}; + }} + + /* Status Progress Bar */ + QProgressBar#CornerStatusProgress {{ + border: none; + background: rgba(255,255,255,0.22); + border-radius: 4px; + height: 8px; + + color: white; + font-weight: 800; + }} + QProgressBar#CornerStatusProgress::chunk {{ + background: rgba(255,255,255,0.95); + border-radius: 4px; + }} + + + + + /* Collapsible section box */ + QFrame#CollapsibleSection {{ + background: rgba(255,255,255,0.85); + border: 1px solid rgba(0,0,0,0.10); + border-radius: 10px; + }} + + /* Full-width header strip: dark grey */ + QFrame#SectionHeader {{ + background: #5f6368; + border-top-left-radius: 10px; + border-top-right-radius: 10px; + border-bottom: 1px solid rgba(0,0,0,0.10); + }} + QLabel#SectionHeaderTitle, QFrame#SectionHeader QLabel {{ + color: white; + font-weight: 800; + }} + + /* When collapsed: header rounds bottom corners too (prevents “sticking out” corners) */ + QFrame#SectionHeader[collapsed="true"] {{ + border-bottom-left-radius: 10px; + border-bottom-right-radius: 10px; + border-bottom: none; + }} + + QListWidget#SampleList {{ + background: rgba(255,255,255,0.95); + border: 1px solid rgba(0,0,0,0.10); + border-radius: 10px; + }} + + QFrame#StepCard {{ + background: rgba(0,0,0,0.03); + border: 1px solid rgba(0,0,0,0.06); + border-radius: 10px; + }} + + /* Calibration selection panels: flat */ + QFrame#CalLeft, QFrame#CalMid {{ + background: rgba(255,255,255,0.85); + border: 1px solid rgba(0,0,0,0.10); + border-radius: 12px; + }} + + /* Selected calibration title bar */ + QFrame#CalTitleBar {{ + background: rgba(0,0,0,0.10); + border-top-left-radius: 12px; + border-top-right-radius: 12px; + border-bottom: 1px solid rgba(0,0,0,0.08); + }} + QLabel#CalTitleText {{ + font-size: 18px; + font-weight: 900; + color: rgba(0,0,0,0.80); + }} + QLabel#CalNotesText {{ + color: rgba(0,0,0,0.62); + }} + + + """ + ) \ No newline at end of file diff --git a/UI/tabs/base_tab.py b/UI/tabs/base_tab.py new file mode 100644 index 0000000..62351f2 --- /dev/null +++ b/UI/tabs/base_tab.py @@ -0,0 +1,15 @@ +from __future__ import annotations + +from PySide6.QtWidgets import QHBoxLayout, QWidget + +from ..style import OUTER_MARGIN + +class CameraWithSidebarPage(QWidget): + def __init__(self, camera_widget: QWidget, sidebar_widget: QWidget, parent: QWidget | None = None): + super().__init__(parent) + + root = QHBoxLayout(self) + root.setContentsMargins(OUTER_MARGIN, OUTER_MARGIN, OUTER_MARGIN, OUTER_MARGIN) + root.setSpacing(OUTER_MARGIN) + root.addWidget(camera_widget, 1) + root.addWidget(sidebar_widget, 0) \ No newline at end of file diff --git a/UI/tabs/calibration_tab.py b/UI/tabs/calibration_tab.py new file mode 100644 index 0000000..5749a96 --- /dev/null +++ b/UI/tabs/calibration_tab.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +from PySide6.QtWidgets import ( + QCheckBox, + QFormLayout, + QGroupBox, + QHBoxLayout, + QLineEdit, + QPushButton, + QVBoxLayout, + QWidget, +) + +class CalibrationTab(QWidget): + def __init__(self) -> None: + super().__init__() \ No newline at end of file diff --git a/UI/tabs/logs_tab.py b/UI/tabs/logs_tab.py new file mode 100644 index 0000000..fd28834 --- /dev/null +++ b/UI/tabs/logs_tab.py @@ -0,0 +1,135 @@ +from __future__ import annotations + +import subprocess +import sys +from pathlib import Path + +from PySide6.QtCore import Qt, QTimer +from PySide6.QtWidgets import ( + QHBoxLayout, + QPushButton, + QTextEdit, + QVBoxLayout, + QWidget, +) + +from logger import get_logger + + +class LogsTab(QWidget): + """Logs tab showing application logs with controls""" + + def __init__(self) -> None: + super().__init__() + + # Log display + self._log_display = QTextEdit() + self._log_display.setReadOnly(True) + self._log_display.setStyleSheet(""" + QTextEdit { + background-color: #ffffff; + color: #000000; + font-family: 'Consolas', 'Courier New', monospace; + font-size: 10pt; + border: 1px solid #cccccc; + } + """) + + # Buttons + self._clear_btn = QPushButton("Clear Display") + self._clear_btn.clicked.connect(self._clear_display) + + self._open_folder_btn = QPushButton("Open Log Folder") + self._open_folder_btn.clicked.connect(self._open_log_folder) + + # Auto-scroll checkbox + from PySide6.QtWidgets import QCheckBox + self._auto_scroll_check = QCheckBox("Auto-scroll") + self._auto_scroll_check.setChecked(True) + + # Button layout + button_layout = QHBoxLayout() + button_layout.addWidget(self._clear_btn) + button_layout.addWidget(self._open_folder_btn) + button_layout.addStretch() + button_layout.addWidget(self._auto_scroll_check) + + # Main layout + layout = QVBoxLayout(self) + layout.addWidget(self._log_display, 1) + layout.addLayout(button_layout) + + # Register with logger + self._logger = get_logger() + self._logger.register_callback(self._on_log_message) + + # Add initial message + self._log_display.append(f"Logs directory: {self._logger.get_log_directory()}") + self._log_display.append("=" * 80) + + def _on_log_message(self, level: str, message: str): + """ + Handle incoming log message. + This is called from the logger for each message. + """ + # Format with color based on level + color = self._get_level_color(level) + formatted = f'[{level}] {self._escape_html(message)}' + + self._log_display.append(formatted) + + # Auto-scroll to bottom if enabled + if self._auto_scroll_check.isChecked(): + scrollbar = self._log_display.verticalScrollBar() + scrollbar.setValue(scrollbar.maximum()) + + def _get_level_color(self, level: str) -> str: + """Get color for log level""" + colors = { + 'DEBUG': '#666666', + 'INFO': '#0066cc', + 'WARNING': '#cc6600', + 'ERROR': '#cc0000', + 'CRITICAL': '#990000', + } + return colors.get(level, '#000000') + + def _escape_html(self, text: str) -> str: + """Escape HTML special characters""" + return (text + .replace('&', '&') + .replace('<', '<') + .replace('>', '>') + .replace('"', '"') + .replace("'", ''')) + + def _clear_display(self): + """Clear the log display (doesn't delete log files)""" + self._log_display.clear() + self._log_display.append(f"Logs directory: {self._logger.get_log_directory()}") + self._log_display.append("=" * 80) + self._log_display.append("Display cleared") + + def _open_log_folder(self): + """Open the log folder in file explorer""" + log_dir = self._logger.get_log_directory() + + try: + if sys.platform == 'win32': + # Windows + subprocess.Popen(['explorer', str(log_dir)]) + elif sys.platform == 'darwin': + # macOS + subprocess.Popen(['open', str(log_dir)]) + else: + # Linux + subprocess.Popen(['xdg-open', str(log_dir)]) + + self._logger.info(f"Opened log folder: {log_dir}") + except Exception as e: + self._logger.error(f"Failed to open log folder: {e}") + + def closeEvent(self, event): + """Unregister from logger when widget closes""" + self._logger.unregister_callback(self._on_log_message) + super().closeEvent(event) diff --git a/UI/tabs/navigate_tab.py b/UI/tabs/navigate_tab.py new file mode 100644 index 0000000..f2f118e --- /dev/null +++ b/UI/tabs/navigate_tab.py @@ -0,0 +1,51 @@ +from __future__ import annotations + +from PySide6.QtWidgets import ( + QCheckBox, + QFormLayout, + QGroupBox, + QHBoxLayout, + QLineEdit, + QPushButton, + QVBoxLayout, + QWidget, + QScrollArea, + QFrame +) +from UI.style import RIGHT_SIDEBAR_WIDTH +from UI.tabs.base_tab import CameraWithSidebarPage + +from UI.widgets.camera_preview import CameraPreview + +class NavigateTab(CameraWithSidebarPage): + def __init__(self, parent: QWidget | None = None) -> None: + super().__init__(CameraPreview(), self._make_sidebar(), parent) + + def _make_sidebar(self) -> QWidget: + sidebar_container = QWidget() + sidebar_container.setFixedWidth(RIGHT_SIDEBAR_WIDTH) + + sidebar_layout = QVBoxLayout(sidebar_container) + sidebar_layout.setContentsMargins(0, 0, 0, 0) + sidebar_layout.setSpacing(10) + + content = QWidget() + content_layout = QVBoxLayout(content) + content_layout.setContentsMargins(0, 0, 0, 0) + content_layout.setSpacing(10) + + # Start Widgets + + + # End Widgets + + content_layout.addStretch(1) + sidebar_layout.addWidget(self._wrap_scroll(content), 1) + return sidebar_container + + def _wrap_scroll(self, widget: QWidget) -> QScrollArea: + scroll = QScrollArea() + scroll.setWidgetResizable(True) + scroll.setFrameShape(QFrame.Shape.NoFrame) + scroll.setWidget(widget) + return scroll \ No newline at end of file diff --git a/UI/tabs/project_tab.py b/UI/tabs/project_tab.py new file mode 100644 index 0000000..bc25439 --- /dev/null +++ b/UI/tabs/project_tab.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +from PySide6.QtWidgets import ( + QCheckBox, + QFormLayout, + QGroupBox, + QHBoxLayout, + QLineEdit, + QPushButton, + QVBoxLayout, + QWidget, +) + +class ProjectTab(QWidget): + def __init__(self) -> None: + super().__init__() \ No newline at end of file diff --git a/UI/widgets/camera_controls_widget.py b/UI/widgets/camera_controls_widget.py new file mode 100644 index 0000000..1880d87 --- /dev/null +++ b/UI/widgets/camera_controls_widget.py @@ -0,0 +1,11 @@ +from __future__ import annotations + +from PySide6.QtWidgets import QWidget + +class CameraControlsWidget(QWidget): + def __init__(self, parent: QWidget | None = None): + super().__init__(parent) + + + + diff --git a/UI/widgets/camera_preview.py b/UI/widgets/camera_preview.py new file mode 100644 index 0000000..f5e47c2 --- /dev/null +++ b/UI/widgets/camera_preview.py @@ -0,0 +1,241 @@ +from __future__ import annotations + +from typing import Optional, Any +from PySide6.QtCore import Qt, Signal, QTimer +from PySide6.QtGui import QImage, QPixmap +from PySide6.QtWidgets import QFrame, QLabel, QVBoxLayout, QWidget + +from app_context import get_app_context +from camera.amscope_camera import AmscopeCamera +from camera.base_camera import BaseCamera, CameraInfo +from logger import get_logger + + +class CameraPreview(QFrame): + """Camera Preview Area with live streaming""" + + # Signal for camera events (thread-safe) + camera_event = Signal(int) + + def __init__(self, parent: QWidget | None = None) -> None: + super().__init__(parent) + self.setFrameShape(QFrame.Shape.NoFrame) + + # Camera state + self._camera: Optional[BaseCamera] = None + self._camera_info: Optional[CameraInfo] = None + self._img_width = 0 + self._img_height = 0 + self._img_buffer: Optional[bytes] = None + self._is_streaming = False + self._no_camera_logged = False # Track if we've already logged no camera message + + # UI elements + self._video_label = QLabel() + self._video_label.setAlignment(Qt.AlignmentFlag.AlignCenter) + self._video_label.setScaledContents(False) + self._video_label.setMinimumSize(1, 1) # Allow shrinking + from PySide6.QtWidgets import QSizePolicy + self._video_label.setSizePolicy(QSizePolicy.Policy.Ignored, QSizePolicy.Policy.Ignored) + self._video_label.setStyleSheet("color: #888; font-size: 16px;") + self._video_label.setText("Initializing camera...") + + layout = QVBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.setSpacing(0) + layout.addWidget(self._video_label, 1) + + self.setStyleSheet("QFrame { background: #000000; }") + + # Timer for checking camera availability + self._init_timer = QTimer(self) + self._init_timer.timeout.connect(self._try_initialize_camera) + + # Connect camera event signal + self.camera_event.connect(self._on_camera_event) + + # Start initialization + self._init_timer.start(500) + + def _try_initialize_camera(self): + """Try to initialize and connect to camera""" + self._init_timer.stop() + + logger = get_logger() + + # Get camera from app context + ctx = get_app_context() + self._camera = ctx.camera + + if self._camera is None: + self._video_label.setText("No camera available - SDK not loaded") + logger.error("No camera available - SDK not loaded") + return + + # Try to enumerate and connect to first camera + try: + cameras = AmscopeCamera.enumerate_cameras() + + if len(cameras) == 0: + self._video_label.setText("No camera detected") + if not self._no_camera_logged: + logger.warning("No camera connected") + self._no_camera_logged = True + # Retry in a few seconds + self._init_timer.start(3000) + return + + # Camera found, reset flag + self._no_camera_logged = False + + # Use first camera + self._camera_info = cameras[0] + self._open_camera() + + except Exception as e: + self._video_label.setText(f"Camera error: {str(e)}") + logger.error(f"Camera initialization error: {e}") + + def _open_camera(self): + """Open and start streaming from camera""" + if not self._camera or not self._camera_info: + return + + # Don't re-open if already streaming + if self._is_streaming and self._camera.is_open: + return + + logger = get_logger() + + try: + # Set camera info for Amscope camera + if isinstance(self._camera, AmscopeCamera): + self._camera.set_camera_info(self._camera_info) + + # Open camera + if not self._camera.open(self._camera_info.id): + self._video_label.setText("Failed to open camera") + logger.error("Failed to open camera") + return + + # Get current resolution + res_index, width, height = self._camera.get_current_resolution() + self._img_width = width + self._img_height = height + + # Allocate image buffer + if isinstance(self._camera, AmscopeCamera): + buffer_size = AmscopeCamera.calculate_buffer_size(width, height, 24) + self._img_buffer = bytes(buffer_size) + + # Enable auto exposure by default + self._camera.set_auto_exposure(True) + + # Start capture + if not self._camera.start_capture(self._camera_callback, self): + self._camera.close() + self._video_label.setText("Failed to start camera stream") + logger.error("Failed to start camera stream") + return + + self._is_streaming = True + # Clear text when streaming starts - video will show instead + self._video_label.setText("") + logger.info(f"Streaming: {self._camera_info.displayname} ({width}x{height})") + + except Exception as e: + self._video_label.setText(f"Error: {str(e)}") + logger.error(f"Camera open error: {e}") + + @staticmethod + def _camera_callback(event: int, context: Any): + """ + Camera event callback (called from camera thread). + Forward to UI thread via signal. + """ + if isinstance(context, CameraPreview): + context.camera_event.emit(event) + + def _on_camera_event(self, event: int): + """Handle camera events in UI thread""" + if not self._camera or not self._camera.is_open: + return + + # Get event constants + if isinstance(self._camera, AmscopeCamera): + events = AmscopeCamera.get_event_constants() + + if event == events.IMAGE: + self._handle_image_event() + elif event == events.ERROR: + self._handle_error() + elif event == events.DISCONNECTED: + self._handle_disconnected() + + def _handle_image_event(self): + """Handle new image from camera""" + if not self._camera or not self._img_buffer: + return + + try: + # Pull image into buffer + if self._camera.pull_image(self._img_buffer, 24): + # Create QImage from buffer + if isinstance(self._camera, AmscopeCamera): + stride = AmscopeCamera.calculate_stride(self._img_width, 24) + image = QImage( + self._img_buffer, + self._img_width, + self._img_height, + stride, + QImage.Format.Format_RGB888 + ) + + # Make a deep copy to avoid keeping reference to buffer + image = image.copy() + + # Scale to fit label while maintaining aspect ratio + if self._video_label.width() > 0 and self._video_label.height() > 0: + scaled_image = image.scaled( + self._video_label.width(), + self._video_label.height(), + Qt.AspectRatioMode.KeepAspectRatio, + Qt.TransformationMode.FastTransformation # Use fast transformation to reduce memory + ) + self._video_label.setPixmap(QPixmap.fromImage(scaled_image)) + except Exception as e: + get_logger().error(f"Error handling image: {e}") + + def _handle_error(self): + """Handle camera error""" + self._video_label.setText("Camera error occurred") + get_logger().error("Camera error occurred") + self._close_camera() + # Try to reconnect + self._init_timer.start(3000) + + def _handle_disconnected(self): + """Handle camera disconnection""" + self._video_label.setText("Camera disconnected") + get_logger().warning("Camera disconnected") + self._close_camera() + # Try to reconnect + self._init_timer.start(3000) + + def _close_camera(self): + """Close camera and cleanup""" + self._is_streaming = False + if self._camera: + self._camera.close() + self._img_buffer = None + # Don't clear the label here - let error messages show + + def closeEvent(self, event): + """Handle widget close event""" + self._close_camera() + super().closeEvent(event) + + def cleanup(self): + """Cleanup resources when widget is being destroyed""" + self._init_timer.stop() + self._close_camera() diff --git a/UI/widgets/collapsible_section.py b/UI/widgets/collapsible_section.py new file mode 100644 index 0000000..994cd0a --- /dev/null +++ b/UI/widgets/collapsible_section.py @@ -0,0 +1,114 @@ +from __future__ import annotations + +from PySide6.QtCore import Qt +from PySide6.QtWidgets import ( + QFrame, + QHBoxLayout, + QLabel, + QVBoxLayout, + QWidget, + QSizePolicy, +) + +from ..settings.settings_main import SettingsButton + +class CollapsibleSection(QFrame): + """ + Collapsible boxed section: + - full-width header strip + - collapses entire widget height when collapsed + - callback on collapse/expand (useful to adjust parent stretch) + """ + + def __init__( + self, + title: str, + *, + on_settings=None, + start_collapsed: bool = False, + on_collapsed_changed=None, + parent: QWidget | None = None, + ) -> None: + super().__init__(parent) + self.setFrameShape(QFrame.Shape.StyledPanel) + self.setObjectName("CollapsibleSection") + self._on_collapsed_changed = on_collapsed_changed + + root = QVBoxLayout(self) + root.setContentsMargins(0, 0, 0, 0) + root.setSpacing(0) + + self.header = QFrame() + self.header.setObjectName("SectionHeader") + self.header.setCursor(Qt.CursorShape.PointingHandCursor) + self.header.setProperty("collapsed", False) + + header_layout = QHBoxLayout(self.header) + header_layout.setContentsMargins(10, 7, 8, 7) + header_layout.setSpacing(8) + + self.caret = QLabel("▾") + self.caret.setFixedWidth(16) + self.caret.setAlignment(Qt.AlignmentFlag.AlignCenter) + + self.title_lbl = QLabel(title) + self.title_lbl.setObjectName("SectionHeaderTitle") + + header_layout.addWidget(self.caret) + header_layout.addWidget(self.title_lbl) + header_layout.addStretch(1) + + if on_settings is not None: + gear = SettingsButton("Section settings") + gear.clicked.connect(on_settings) + header_layout.addWidget(gear) + + root.addWidget(self.header) + + self.content = QWidget() + self.content.setObjectName("SectionContent") + self.content_layout = QVBoxLayout(self.content) + self.content_layout.setContentsMargins(10, 10, 10, 12) + self.content_layout.setSpacing(10) + root.addWidget(self.content) + + # natural height unless parent gives stretch + self.setSizePolicy(QSizePolicy.Policy.Preferred, QSizePolicy.Policy.Maximum) + + self._collapsed = False + self.header.mousePressEvent = self._on_header_click # type: ignore + self.set_collapsed(start_collapsed) + + def _on_header_click(self, event) -> None: + self.set_collapsed(not self._collapsed) + + def set_collapsed(self, collapsed: bool) -> None: + if self._collapsed == collapsed: + return + self._collapsed = collapsed + + self.content.setVisible(not collapsed) + self.caret.setText("▸" if collapsed else "▾") + + # inform stylesheet for corner rounding on collapse + self.header.setProperty("collapsed", collapsed) + self.header.style().unpolish(self.header) + self.header.style().polish(self.header) + + # collapse entire widget height + header_h = self.header.sizeHint().height() + if collapsed: + self.setMaximumHeight(header_h + 2) + self.setMinimumHeight(header_h + 2) + else: + self.setMinimumHeight(0) + self.setMaximumHeight(16777215) + + if self._on_collapsed_changed is not None: + self._on_collapsed_changed(collapsed) + + def is_collapsed(self) -> bool: + return self._collapsed + + def layout_for_content(self) -> QVBoxLayout: + return self.content_layout diff --git a/app_context.py b/app_context.py new file mode 100644 index 0000000..fa0b5f5 --- /dev/null +++ b/app_context.py @@ -0,0 +1,73 @@ +""" +Application context for managing shared resources and state. +Provides a singleton pattern for accessing camera and other shared resources. +""" + +from typing import Optional +from camera.base_camera import BaseCamera +from camera.amscope_camera import AmscopeCamera +from logger import get_logger + + +class AppContext: + """ + Singleton application context managing shared resources. + """ + _instance: Optional['AppContext'] = None + + def __new__(cls): + if cls._instance is None: + cls._instance = super().__new__(cls) + cls._instance._initialized = False + return cls._instance + + def __init__(self): + if self._initialized: + return + + self._camera: Optional[BaseCamera] = None + self._camera_initialized = False + self._initialized = True + + @property + def camera(self) -> Optional[BaseCamera]: + """Get the camera instance, initializing if needed""" + if not self._camera_initialized: + self._initialize_camera() + return self._camera + + def _initialize_camera(self): + """Initialize the camera subsystem""" + if self._camera_initialized: + return + + logger = get_logger() + try: + # Load SDK + AmscopeCamera.ensure_sdk_loaded() + + # Enable GigE support + AmscopeCamera.enable_gige(None, None) + + # Create camera instance + self._camera = AmscopeCamera() + self._camera_initialized = True + + logger.info("Camera subsystem initialized") + except Exception as e: + logger.error(f"Failed to initialize camera subsystem: {e}") + self._camera = None + self._camera_initialized = True + + def cleanup(self): + """Cleanup resources""" + if self._camera and self._camera.is_open: + self._camera.close() + self._camera = None + self._camera_initialized = False + + +# Global instance accessor +def get_app_context() -> AppContext: + """Get the global application context""" + return AppContext() diff --git a/camera/amscope.py b/camera/amscope.py deleted file mode 100644 index f96390d..0000000 --- a/camera/amscope.py +++ /dev/null @@ -1,250 +0,0 @@ -import time -from pathlib import Path -import numpy as np -from PIL import Image - -import os -import sys -import platform -import importlib.util -import ctypes -import zipfile - -from .base_camera import BaseCamera -from .camera_settings import CameraSettings, CameraSettingsManager -from image_processing.analyzers import ImageAnalyzer - -class AmscopeCamera(BaseCamera): - # Optional explicit subdir override; otherwise BaseCamera will derive 'amscope' - CONFIG_SUBDIR = "amscope" - - def __init__(self): - # Minimal vendor state; BaseCamera handles common fields - self.amcam = None - self._callback_ref = None # must keep a reference to avoid garbage collection - self.buffer = None - self.camera = None - super().__init__() - - # Load vendor SDK before initialize() - def pre_initialize(self): - self._load_amcam() - - def _ensure_sdk(self, project_root: Path) -> tuple[Path, Path]: - """ - Ensure the AmScope SDK is available under: - project_root / "3rd_party_imports" / "official_amscope" - If not, extract the first amcamsdk*.zip in 3rd_party_imports. - Returns (sdk_root_dir, sdk_py_path). - """ - sdk_dir = project_root / "3rd_party_imports" - official_dir = sdk_dir / "official_amscope" - sdk_py = official_dir / "python" / "amcam.py" - - # Already extracted? - if sdk_py.is_file(): - return official_dir, sdk_py - - # Look for a zip starting with "amcamsdk" - for f in sdk_dir.iterdir(): - if f.is_file() and f.name.lower().startswith("amcamsdk") and f.suffix.lower() == ".zip": - with zipfile.ZipFile(f, "r") as zf: - zf.extractall(official_dir) - break - else: - raise RuntimeError(f"No AmScope SDK found in {sdk_dir}") - - # Handle case with nested folder - if not sdk_py.is_file(): - subdirs = [d for d in official_dir.iterdir() if d.is_dir()] - if len(subdirs) == 1 and (subdirs[0] / "python" / "amcam.py").is_file(): - tmp = subdirs[0] - for item in tmp.iterdir(): - shutil.move(str(item), official_dir) - tmp.rmdir() - - if not sdk_py.is_file(): - raise RuntimeError("Extracted SDK does not contain python/amcam.py") - - return official_dir, sdk_py - - def _load_amcam(self): - project_root = Path(__file__).resolve().parent.parent - - sdk_root, sdk_py = self._ensure_sdk(project_root) - - # Determine platform and architecture - system = platform.system().lower() - machine = platform.machine().lower() - - if system == 'windows': - dll_dir = os.path.join(sdk_root, 'win', 'x64') - elif system == 'linux': - arch_map = { - 'x86_64': 'x64', - 'amd64': 'x64', - 'i386': 'x86', - 'i686': 'x86', - 'arm64': 'arm64', - 'aarch64': 'arm64', - 'armv7l': 'armhf', - 'armv6l': 'armel' - } - subarch = arch_map.get(machine) - if not subarch: - raise RuntimeError(f"Unsupported Linux architecture: {machine}") - dll_dir = os.path.join(sdk_root, 'linux', subarch) - elif system == 'darwin': - dll_dir = os.path.join(sdk_root, 'mac') - else: - raise RuntimeError(f"Unsupported operating system: {system}") - - # Update PATH or add_dll_directory for shared library resolution - if system == 'windows': - if hasattr(os, 'add_dll_directory'): - os.add_dll_directory(dll_dir) - else: - os.environ['PATH'] = dll_dir + os.pathsep + os.environ.get('PATH', '') - else: - os.environ['LD_LIBRARY_PATH'] = dll_dir + os.pathsep + os.environ.get('LD_LIBRARY_PATH', '') - - # Dynamically import amcam.py and override __file__ so its LoadLibrary logic works - spec = importlib.util.spec_from_file_location("amcam", sdk_py) - amcam_module = importlib.util.module_from_spec(spec) - amcam_module.__file__ = os.path.join(dll_dir, 'amcam.py') # Trick __file__ logic - sys.modules["amcam"] = amcam_module - spec.loader.exec_module(amcam_module) - - self.amcam = amcam_module - - def initialize(self): - """Initialize the Amscope camera.""" - try: - available_cameras = self.amcam.Amcam.EnumV2() - if not available_cameras: - raise Exception("Failed to Find Amscope Camera") - - self.name = available_cameras[0].displayname - self.camera = self.amcam.Amcam.Open(available_cameras[0].id) - - if not self.camera: - raise Exception("Failed to open Amscope Camera") - - self.width, self.height = self.camera.get_Size() - self.buffer = bytes((self.width * 24 + 31) // 32 * 4 * self.height) - - if sys.platform == 'win32': - self.camera.put_Option(self.amcam.AMCAM_OPTION_BYTEORDER, 0) - - # Start the stream immediately after initialization - self.start_stream() - return True - - except self.amcam.HRESULTException as e: - print(f"Error initializing camera: {e}") - self.camera = None - return False - except Exception as e: - print(f"Unexpected error initializing camera: {e}") - self.camera = None - return False - - def start_stream(self): - """Start the camera stream with configured settings.""" - if self.camera is None: - print("Cannot start stream - camera not initialized") - return - - try: - # Load and apply settings from config/amscope/settings.yaml (via BaseCamera helpers) - self.load_and_apply_settings(filename="settings.yaml") - - # Start the pull mode BEFORE trying to stream - self.camera.StartPullModeWithCallback(self._camera_callback, self) - - except self.amcam.HRESULTException as e: - print(f"Error starting stream: {e}") - except Exception as e: - print(f"Unexpected error starting stream: {e}") - - def _apply_settings(self, settings: CameraSettings): - """Apply camera settings to the hardware.""" - - # if camera is not initialized, don't apply settings - if not self.initialized: - return - - try: - self.camera.put_AutoExpoEnable(settings.auto_expo) - self.camera.put_AutoExpoTarget(settings.exposure) - self.camera.put_TempTint(settings.temp, settings.tint) - self.camera.put_LevelRange(settings.levelrange_low, settings.levelrange_high) - self.camera.put_Contrast(settings.contrast) - self.camera.put_Hue(settings.hue) - self.camera.put_Saturation(settings.saturation) - self.camera.put_Brightness(settings.brightness) - self.camera.put_Gamma(settings.gamma) - self.camera.put_Option(self.amcam.AMCAM_OPTION_SHARPENING, settings.sharpening) - self.camera.put_Option(self.amcam.AMCAM_OPTION_LINEAR, settings.linear) - - curve_options = {'Off': 0, 'Polynomial': 1, 'Logarithmic': 2} - self.camera.put_Option(self.amcam.AMCAM_OPTION_CURVE, curve_options.get(settings.curve, 1)) - - except self.amcam.HRESULTException as e: - print(f"Error applying settings: {e}") - - @staticmethod - def _camera_callback(event, _self): - """Handle camera events.""" - if event == _self.amcam.AMCAM_EVENT_IMAGE: - try: - _self.camera.PullImageV2(_self.buffer, 24, None) - - arr = np.frombuffer(_self.buffer, np.uint8).reshape((_self.height, _self.width, 3)) - _self.last_stream_array = arr # (H, W, 3), RGB - _self.last_stream_ts = time.time() - - except _self.amcam.HRESULTException as e: - print(f"Error in callback stream: {e}") - - elif event == _self.amcam.AMCAM_EVENT_STILLIMAGE: - _self._process_frame() # will set last_image, see below - - elif event == _self.amcam.AMCAM_EVENT_EXPO_START: - print("Exposure start event detected") - - def stream(self): - """This method is now mainly used for error handling and initialization""" - if self.camera is None: - print("Camera not initialized. Attempting to initialize...") - if not self.initialize(): - return - - # Ensure buffer is initialized - if self.buffer is None: - self.width, self.height = self.camera.get_Size() - self.buffer = bytes((self.width * 24 + 31) // 32 * 4 * self.height) - - def capture_image(self): - """Capture a still image.""" - self.is_taking_image = True - self.camera.Snap(0) - - def _process_frame(self): - self.is_taking_image = True - try: - w, h = self.camera.get_StillResolution(0) - buf = bytes(w * h * 3) - self.camera.PullStillImageV2(buf, 24, None) - arr = np.frombuffer(buf, np.uint8).reshape((h, w, 3)) - self.last_image = arr - self.last_image_ts = time.time() - except self.amcam.HRESULTException as e: - print(f"Error processing frame: {e}") - finally: - self.is_taking_image = False - - def update(self): - """Update camera frame.""" - if not self.is_taking_image and self.camera is not None: - self.stream() diff --git a/camera/amscope_camera.py b/camera/amscope_camera.py new file mode 100644 index 0000000..c4feb77 --- /dev/null +++ b/camera/amscope_camera.py @@ -0,0 +1,533 @@ +""" +Amscope camera implementation using the amcam SDK. +""" + +from typing import Tuple, Callable, Any, Optional, TYPE_CHECKING +from pathlib import Path +from camera.base_camera import BaseCamera, CameraResolution, CameraInfo +from logger import get_logger + +# Module-level reference to the loaded SDK +_amcam = None + +# Type hints for IDE support (won't execute at runtime when checking types) +if TYPE_CHECKING: + import amcam # This is just for type hints, won't actually import + + +class AmscopeCamera(BaseCamera): + """ + Amscope camera implementation using the amcam SDK. + Wraps the amcam library to conform to the BaseCamera interface. + + The SDK must be loaded before using this class: + AmscopeCamera.ensure_sdk_loaded() + + Or it will be loaded automatically on first use. + """ + + # Class-level flag to track SDK loading + _sdk_loaded = False + + def __init__(self): + super().__init__() + + # Ensure SDK is loaded before instantiating + if not AmscopeCamera._sdk_loaded: + AmscopeCamera.ensure_sdk_loaded() + + self._hcam: Optional[Any] = None # Will be amcam.Amcam after SDK loads + self._camera_info: Optional[CameraInfo] = None + + @classmethod + def ensure_sdk_loaded(cls, sdk_path: Optional[Path] = None) -> bool: + """ + Ensure the Amscope SDK is loaded and ready to use. + + Args: + sdk_path: Optional path to SDK base directory. + If None, auto-detects from project structure. + + Returns: + True if SDK loaded successfully, False otherwise + """ + global _amcam + + if cls._sdk_loaded and _amcam is not None: + return True + + logger = get_logger() + + try: + from camera.sdk_loaders.amscope_sdk_loader import AmscopeSdkLoader + + loader = AmscopeSdkLoader(sdk_path) + _amcam = loader.load() + + cls._sdk_loaded = True + logger.info("Amscope SDK loaded successfully") + return True + + except Exception as e: + logger.warning(f"Failed to load Amscope SDK: {e}") + logger.info("Attempting fallback to direct import...") + + try: + # Fallback to direct import if loader fails + import amcam as amcam_module + _amcam = amcam_module + cls._sdk_loaded = True + logger.info("Amscope SDK loaded via direct import") + return True + except ImportError as ie: + logger.error(f"Direct import also failed: {ie}") + return False + + @staticmethod + def _get_sdk(): + """Get the loaded SDK module""" + global _amcam + if _amcam is None: + raise RuntimeError( + "Amscope SDK not loaded. Call AmscopeCamera.ensure_sdk_loaded() first." + ) + return _amcam + + # Class-level event constant accessors + @classmethod + def get_event_constants(cls): + """ + Get event constants as a namespace object. + Useful for accessing events without a camera instance. + + Returns: + SimpleNamespace with event constants + """ + from types import SimpleNamespace + amcam = cls._get_sdk_static() + return SimpleNamespace( + IMAGE=amcam.AMCAM_EVENT_IMAGE, + EXPOSURE=amcam.AMCAM_EVENT_EXPOSURE, + TEMPTINT=amcam.AMCAM_EVENT_TEMPTINT, + STILLIMAGE=amcam.AMCAM_EVENT_STILLIMAGE, + ERROR=amcam.AMCAM_EVENT_ERROR, + DISCONNECTED=amcam.AMCAM_EVENT_DISCONNECTED + ) + + # Event type constants - these are properties since SDK loads dynamically + @property + def EVENT_IMAGE(self): + return self._get_sdk().AMCAM_EVENT_IMAGE + + @property + def EVENT_EXPOSURE(self): + return self._get_sdk().AMCAM_EVENT_EXPOSURE + + @property + def EVENT_TEMPTINT(self): + return self._get_sdk().AMCAM_EVENT_TEMPTINT + + @property + def EVENT_STILLIMAGE(self): + return self._get_sdk().AMCAM_EVENT_STILLIMAGE + + @property + def EVENT_ERROR(self): + return self._get_sdk().AMCAM_EVENT_ERROR + + @property + def EVENT_DISCONNECTED(self): + return self._get_sdk().AMCAM_EVENT_DISCONNECTED + + @property + def handle(self) -> Optional[Any]: + """Get the underlying amcam handle""" + return self._hcam + + def open(self, camera_id: str) -> bool: + """Open connection to Amscope camera""" + amcam = self._get_sdk() + try: + self._hcam = amcam.Amcam.Open(camera_id) + if self._hcam: + self._is_open = True + # Set RGB byte order for Qt compatibility + self._hcam.put_Option(amcam.AMCAM_OPTION_BYTEORDER, 0) + return True + return False + except self._get_sdk().HRESULTException: + return False + + def close(self): + """Close camera connection""" + if self._hcam: + self._hcam.Close() + self._hcam = None + self._is_open = False + self._callback = None + self._callback_context = None + self._camera_info = None + + def start_capture(self, callback: Callable, context: Any) -> bool: + """Start capturing frames with callback""" + if not self._hcam: + return False + + amcam = self._get_sdk() + try: + self._callback = callback + self._callback_context = context + self._hcam.StartPullModeWithCallback(self._event_callback_wrapper, self) + return True + except self._get_sdk().HRESULTException: + return False + + def stop_capture(self): + """Stop capturing frames""" + if self._hcam: + amcam = self._get_sdk() + try: + self._hcam.Stop() + except self._get_sdk().HRESULTException: + pass + + def pull_image(self, buffer: bytes, bits_per_pixel: int = 24) -> bool: + """Pull the latest image into buffer""" + if not self._hcam: + return False + + amcam = self._get_sdk() + try: + self._hcam.PullImageV4(buffer, 0, bits_per_pixel, 0, None) + return True + except self._get_sdk().HRESULTException: + return False + + def snap_image(self, resolution_index: int = 0) -> bool: + """Capture a still image""" + if not self._hcam: + return False + + amcam = self._get_sdk() + try: + self._hcam.Snap(resolution_index) + return True + except self._get_sdk().HRESULTException: + return False + + def pull_still_image(self, buffer: bytes, bits_per_pixel: int = 24) -> Tuple[bool, int, int]: + """ + Pull a still image into buffer + + Args: + buffer: Buffer to receive image data (should be large enough) + bits_per_pixel: Bits per pixel (typically 24) + + Returns: + Tuple of (success, width, height) + """ + if not self._hcam: + return False, 0, 0 + + amcam = self._get_sdk() + info = amcam.AmcamFrameInfoV3() + try: + # First peek to get dimensions + self._hcam.PullImageV3(None, 1, bits_per_pixel, 0, info) + if info.width > 0 and info.height > 0: + # Then pull the actual image + self._hcam.PullImageV3(buffer, 1, bits_per_pixel, 0, info) + return True, info.width, info.height + return False, 0, 0 + except self._get_sdk().HRESULTException: + return False, 0, 0 + + def get_resolutions(self) -> list[CameraResolution]: + """Get available preview resolutions""" + if not self._camera_info or not self._camera_info.model: + return [] + + resolutions = [] + for i in range(self._camera_info.model.preview): + res = self._camera_info.model.res[i] + resolutions.append(CameraResolution(res.width, res.height)) + return resolutions + + def get_current_resolution(self) -> Tuple[int, int, int]: + """Get current resolution index, width, and height""" + if not self._hcam or not self._camera_info: + return 0, 0, 0 + + res_index = self._hcam.get_eSize() + res = self._camera_info.model.res[res_index] + return res_index, res.width, res.height + + def set_resolution(self, resolution_index: int) -> bool: + """Set camera resolution""" + if not self._hcam: + return False + + amcam = self._get_sdk() + try: + self._hcam.put_eSize(resolution_index) + return True + except self._get_sdk().HRESULTException: + return False + + def get_exposure_range(self) -> Tuple[int, int, int]: + """Get exposure time range (min, max, default) in microseconds""" + if not self._hcam: + return 0, 0, 0 + + amcam = self._get_sdk() + try: + return self._hcam.get_ExpTimeRange() + except self._get_sdk().HRESULTException: + return 0, 0, 0 + + def get_exposure_time(self) -> int: + """Get current exposure time in microseconds""" + amcam = self._get_sdk() + if not self._hcam: + return 0 + + try: + return self._hcam.get_ExpoTime() + except self._get_sdk().HRESULTException: + return 0 + + def set_exposure_time(self, time_us: int) -> bool: + """Set exposure time in microseconds""" + amcam = self._get_sdk() + if not self._hcam: + return False + + try: + self._hcam.put_ExpoTime(time_us) + return True + except self._get_sdk().HRESULTException: + return False + + def get_gain_range(self) -> Tuple[int, int, int]: + """Get gain range (min, max, default) in percent""" + amcam = self._get_sdk() + if not self._hcam: + return 0, 0, 0 + + try: + return self._hcam.get_ExpoAGainRange() + except self._get_sdk().HRESULTException: + return 0, 0, 0 + + def get_gain(self) -> int: + """Get current gain in percent""" + amcam = self._get_sdk() + if not self._hcam: + return 0 + + try: + return self._hcam.get_ExpoAGain() + except self._get_sdk().HRESULTException: + return 0 + + def set_gain(self, gain_percent: int) -> bool: + """Set gain in percent""" + amcam = self._get_sdk() + if not self._hcam: + return False + + try: + self._hcam.put_ExpoAGain(gain_percent) + return True + except self._get_sdk().HRESULTException: + return False + + def get_auto_exposure(self) -> bool: + """Get auto exposure state""" + amcam = self._get_sdk() + if not self._hcam: + return False + + try: + return self._hcam.get_AutoExpoEnable() == 1 + except self._get_sdk().HRESULTException: + return False + + def set_auto_exposure(self, enabled: bool) -> bool: + """Set auto exposure state""" + amcam = self._get_sdk() + if not self._hcam: + return False + + try: + self._hcam.put_AutoExpoEnable(1 if enabled else 0) + return True + except self._get_sdk().HRESULTException: + return False + + def supports_white_balance(self) -> bool: + """Check if camera supports white balance (not monochrome)""" + if not self._camera_info or not self._camera_info.model: + return False + + amcam = self._get_sdk() + return (self._camera_info.model.flag & amcam.AMCAM_FLAG_MONO) == 0 + + def get_white_balance_range(self) -> Tuple[Tuple[int, int], Tuple[int, int]]: + """Get white balance range ((temp_min, temp_max), (tint_min, tint_max))""" + amcam = self._get_sdk() + return ((amcam.AMCAM_TEMP_MIN, amcam.AMCAM_TEMP_MAX), + (amcam.AMCAM_TINT_MIN, amcam.AMCAM_TINT_MAX)) + + def get_white_balance(self) -> Tuple[int, int]: + """Get current white balance (temperature, tint)""" + amcam = self._get_sdk() + if not self._hcam: + return amcam.AMCAM_TEMP_DEF, amcam.AMCAM_TINT_DEF + + try: + return self._hcam.get_TempTint() + except self._get_sdk().HRESULTException: + return amcam.AMCAM_TEMP_DEF, amcam.AMCAM_TINT_DEF + + def set_white_balance(self, temperature: int, tint: int) -> bool: + """Set white balance""" + amcam = self._get_sdk() + if not self._hcam: + return False + + try: + self._hcam.put_TempTint(temperature, tint) + return True + except self._get_sdk().HRESULTException: + return False + + def auto_white_balance(self) -> bool: + """Perform one-time auto white balance""" + amcam = self._get_sdk() + if not self._hcam: + return False + + try: + self._hcam.AwbOnce() + return True + except self._get_sdk().HRESULTException: + return False + + def get_frame_rate(self) -> Tuple[int, int, int]: + """Get frame rate info (frames_in_period, time_period_ms, total_frames)""" + amcam = self._get_sdk() + if not self._hcam: + return 0, 0, 0 + + try: + return self._hcam.get_FrameRate() + except self._get_sdk().HRESULTException: + return 0, 0, 0 + + @classmethod + def enumerate_cameras(cls) -> list[CameraInfo]: + """Enumerate available Amscope cameras""" + # Ensure SDK is loaded + if not cls._sdk_loaded: + cls.ensure_sdk_loaded() + + amcam = cls._get_sdk_static() + cameras = [] + arr = amcam.Amcam.EnumV2() + for cam in arr: + info = CameraInfo( + id=cam.id, + displayname=cam.displayname, + model=cam.model + ) + cameras.append(info) + return cameras + + @staticmethod + def _get_sdk_static(): + """Static method to get SDK (for use in classmethods)""" + global _amcam + if _amcam is None: + raise RuntimeError( + "Amscope SDK not loaded. Call AmscopeCamera.ensure_sdk_loaded() first." + ) + return _amcam + + def set_camera_info(self, info: CameraInfo): + """Set camera information (needed before opening)""" + self._camera_info = info + + def supports_still_capture(self) -> bool: + """Check if camera supports separate still image capture""" + if not self._camera_info or not self._camera_info.model: + return False + + return self._camera_info.model.still > 0 + + def get_still_resolutions(self) -> list[CameraResolution]: + """Get available still image resolutions""" + if not self._camera_info or not self._camera_info.model: + return [] + + resolutions = [] + for i in range(self._camera_info.model.still): + res = self._camera_info.model.res[i] + resolutions.append(CameraResolution(res.width, res.height)) + return resolutions + + @staticmethod + def calculate_buffer_size(width: int, height: int, bits_per_pixel: int = 24) -> int: + """ + Calculate required buffer size for image data + + Args: + width: Image width in pixels + height: Image height in pixels + bits_per_pixel: Bits per pixel (typically 24 for RGB) + + Returns: + Buffer size in bytes + """ + amcam = AmscopeCamera._get_sdk_static() + return amcam.TDIBWIDTHBYTES(width * bits_per_pixel) * height + + @staticmethod + def calculate_stride(width: int, bits_per_pixel: int = 24) -> int: + """ + Calculate image stride (bytes per row) + + Args: + width: Image width in pixels + bits_per_pixel: Bits per pixel (typically 24 for RGB) + + Returns: + Stride in bytes + """ + amcam = AmscopeCamera._get_sdk_static() + return amcam.TDIBWIDTHBYTES(width * bits_per_pixel) + + @classmethod + def enable_gige(cls, callback: Optional[Callable] = None, context: Any = None): + """ + Enable GigE camera support + + Args: + callback: Optional callback for GigE events + context: Optional context for callback + """ + # Ensure SDK is loaded + if not cls._sdk_loaded: + cls.ensure_sdk_loaded() + + amcam = cls._get_sdk_static() + amcam.Amcam.GigeEnable(callback, context) + + def _event_callback_wrapper(self, event: int, context: Any): + """ + Internal wrapper for camera events. + Translates amcam events to the callback registered with start_capture. + """ + if self._callback and self._callback_context: + # Call the registered callback with the event + self._callback(event, self._callback_context) diff --git a/camera/base_camera.py b/camera/base_camera.py index d39319c..df89410 100644 --- a/camera/base_camera.py +++ b/camera/base_camera.py @@ -1,304 +1,367 @@ +""" +Base camera class that defines the interface for camera operations. +All specific camera implementations should inherit from this class. +""" + from abc import ABC, abstractmethod -import time +from typing import Optional, Tuple, Callable, Any +from dataclasses import dataclass from pathlib import Path -from PIL import Image -import tkinter as tk -import numpy as np -from tkinter import filedialog -from .camera_settings import ( - CameraSettings, - CameraSettingsManager, - ACTIVE_FILENAME, - DEFAULT_FILENAME -) -from .image_name_formatter import ImageNameFormatter +@dataclass +class CameraResolution: + """Represents a camera resolution""" + width: int + height: int + + def __str__(self): + return f"{self.width}*{self.height}" -class BaseCamera(ABC): - """Abstract base class defining the camera interface.""" +@dataclass +class CameraInfo: + """Basic camera information""" + id: str + displayname: str + model: Any # Model-specific information - # Subclasses may override to control config subfolder name - CONFIG_SUBDIR: str | None = None +class BaseCamera(ABC): + """ + Abstract base class for camera operations. + Defines the interface that all camera implementations must follow. + + Each camera implementation should handle its own SDK loading in the + ensure_sdk_loaded() method. This is typically called once before any + camera operations. + """ + + # Class-level flag to track if SDK has been loaded + _sdk_loaded = False + def __init__(self): - # Public-ish, common state - self.name = "" - self.is_taking_image = False - self.last_image: np.ndarray | None = None # (H, W, 3) RGB uint8 - self.last_stream_array: np.ndarray | None = None # (H, W, 3) RGB uint8 - - self.last_image_ts: float = 0.0 - self.last_stream_ts: float = 0.0 - - self.initialized = False - # Safe default for save_image() until a subclass loads real settings - self.settings = CameraSettings() - self._scope = self.get_impl_key() - CameraSettingsManager.scope_dir(self._scope) - - # Camera-native dimensions (subclasses may set real values during initialize()) - self.width = 1280 - self.height = 720 - - # Capture path - self.capture_path = "./output/" - self.image_name_formatter = ImageNameFormatter(template="{d:%Y%m%d_%H%M%S}") - - # Config roots - self._scope = self.get_impl_key() - CameraSettingsManager.scope_dir(self._scope) - self.impl_config_dir = self.get_config_dir() # e.g., config/amscope - self.impl_config_dir.mkdir(parents=True, exist_ok=True) - - # Allow subclasses to do pre-initialize work (e.g., load SDKs) before initialize() - self.pre_initialize() - self.initialized = self.initialize() - - # ----- Lifecycle hooks ----- - def pre_initialize(self): - """Optional hook to run before initialize(); subclasses may override.""" + self._is_open = False + self._callback = None + self._callback_context = None + + @property + def is_open(self) -> bool: + """Check if camera is currently open""" + return self._is_open + + @classmethod + @abstractmethod + def ensure_sdk_loaded(cls, sdk_path: Optional[Path] = None) -> bool: + """ + Ensure the camera SDK is loaded and ready to use. + + This method should be called before any camera operations. + Implementations should handle: + - Loading vendor SDK libraries + - Platform-specific initialization + - Setting up library search paths + - Extracting SDK files if needed + + Args: + sdk_path: Optional path to SDK location. If None, use default location. + + Returns: + True if SDK is loaded successfully, False otherwise + + Note: + This is a class method so it can be called before instantiating cameras. + Most implementations should track SDK load state to avoid reloading. + """ pass - + + @classmethod + def is_sdk_loaded(cls) -> bool: + """ + Check if SDK has been loaded. + + Returns: + True if SDK is loaded, False otherwise + """ + return cls._sdk_loaded + @abstractmethod - def initialize(self) -> bool: - """Initialize camera hardware and settings.""" + def open(self, camera_id: str) -> bool: + """ + Open camera connection + + Args: + camera_id: Identifier for the camera to open + + Returns: + True if successful, False otherwise + """ pass - + @abstractmethod - def update(self): - """Update camera frame.""" + def close(self): + """Close camera connection and cleanup resources""" pass - + @abstractmethod - def capture_image(self): - """Capture a still image (subclass must implement).""" + def start_capture(self, callback: Callable, context: Any) -> bool: + """ + Start capturing frames + + Args: + callback: Function to call when events occur + context: Context object to pass to callback + + Returns: + True if successful, False otherwise + """ pass - - # ------------------------------- - # Config & settings convenience - # ------------------------------- - def get_impl_key(self) -> str: - """ - Returns the implementation key used for config subfolder naming. - Default: lowercased class name with trailing 'camera' removed (e.g., AmscopeCamera -> 'amscope'). - Subclasses can override by setting CONFIG_SUBDIR. - """ - if isinstance(self.CONFIG_SUBDIR, str) and self.CONFIG_SUBDIR.strip(): - return self.CONFIG_SUBDIR.strip() - cls = self.__class__.__name__ - return (cls[:-6] if cls.lower().endswith("camera") else cls).lower() - - def get_config_dir(self) -> Path: - return CameraSettingsManager.scope_dir(self._scope) - - def load_and_apply_settings(self, filename: str = ACTIVE_FILENAME): + + @abstractmethod + def stop_capture(self): + """Stop capturing frames""" + pass + + @abstractmethod + def pull_image(self, buffer: bytes, bits_per_pixel: int = 24) -> bool: """ - Load settings from YAML and apply to the live camera. - If the active file is missing, this falls back to default_settings.yaml, else built-in defaults. + Pull the latest image into provided buffer + + Args: + buffer: Pre-allocated buffer to receive image data + bits_per_pixel: Bits per pixel (typically 24 for RGB) + + Returns: + True if successful, False otherwise """ - loaded = CameraSettingsManager.load(self._scope) - self.settings = loaded - self.apply_settings(self.settings) - - def apply_settings(self, settings): - """ - Apply settings to the hardware. By default this calls a subclass hook named _apply_settings - if present. Subclasses should implement _apply_settings(settings: CameraSettings). - """ - hook = getattr(self, "_apply_settings", None) - if callable(hook): - hook(settings) - else: - raise NotImplementedError( - f"{self.__class__.__name__} must implement _apply_settings(settings) or override apply_settings()." - ) - - def save_settings(self, filename: str = ACTIVE_FILENAME): + pass + + @abstractmethod + def snap_image(self, resolution_index: int = 0) -> bool: """ - Persist current settings to YAML in the per-implementation folder. - Automatically creates a timestamped backup of the previous version and keeps the 5 most recent. + Capture a still image at specified resolution + + Args: + resolution_index: Index of resolution to use + + Returns: + True if successful, False otherwise """ - CameraSettingsManager.save(self._scope, self.settings) - - def set_settings(self, settings, persist: bool = False, filename: str = ACTIVE_FILENAME): + pass + + @abstractmethod + def get_resolutions(self) -> list[CameraResolution]: """ - Replace the entire settings object, apply immediately, optionally persist to disk. + Get available camera resolutions + + Returns: + List of available resolutions """ - self.settings = settings - self.apply_settings(self.settings) - if persist: - self.save_settings(filename=filename) - - def update_settings(self, persist: bool = False, filename: str = ACTIVE_FILENAME, **updates): + pass + + @abstractmethod + def get_current_resolution(self) -> Tuple[int, int, int]: """ - Update one or more attributes on the current settings, apply immediately, and - optionally persist to disk. Example: - - camera.update_settings(temp=6500, tint=900, linear=1, persist=True) + Get current resolution + + Returns: + Tuple of (resolution_index, width, height) """ - # If settings hasn't been loaded yet, attempt to load from disk first. - if not hasattr(self.settings, "__dict__"): - self.load_and_apply_settings(filename=filename) - - # Apply updates (only for existing attributes to avoid silent typos) - for k, v in updates.items(): - if hasattr(self.settings, k): - setattr(self.settings, k, v) - else: - raise AttributeError(f"Unknown camera setting '{k}'") - - # Push to hardware and optionally persist - self.apply_settings(self.settings) - if persist: - self.save_settings(filename=filename) - - # ----- Defaults helpers ----- - def get_default_config_path(self) -> Path: - return self.get_config_path(DEFAULT_FILENAME) - - def write_default_settings(self, settings: CameraSettings | None = None) -> Path: + pass + + @abstractmethod + def set_resolution(self, resolution_index: int) -> bool: """ - Write default_settings.yaml in this camera's config directory. - If 'settings' is None, writes built-in defaults. + Set camera resolution + + Args: + resolution_index: Index of resolution to use + + Returns: + True if successful, False otherwise """ - return CameraSettingsManager.write_defaults(self._scope, settings) - - def load_default_settings(self): + pass + + @abstractmethod + def get_exposure_range(self) -> Tuple[int, int, int]: """ - Load defaults from default_settings.yaml (or built-in defaults if file doesn't exist), - apply to hardware, but do NOT persist to the active file. + Get exposure time range + + Returns: + Tuple of (min, max, default) values """ - defaults = CameraSettingsManager.load_defaults(self._scope) - self.set_settings(defaults, persist=False) - return defaults - - def restore_default_settings(self, persist: bool = True): + pass + + @abstractmethod + def get_exposure_time(self) -> int: """ - Restore defaults into the active settings file (backup the current one), apply, and optionally persist. - Useful for a "Restore Defaults" button in the UI. + Get current exposure time + + Returns: + Current exposure time in microseconds """ - restored = CameraSettingsManager.restore_defaults_into_active(self._scope) - self.set_settings(restored, persist=False) - if persist: - self.save_settings() - return restored - - # ------------------------------- - # Image helpers - # ------------------------------- - def get_last_image(self): - """Get the last captured image, waiting if a capture is in progress.""" - while self.is_taking_image: - time.sleep(0.01) - return self.last_image - - def get_last_stream_array(self) -> np.ndarray | None: - """Return latest live-stream RGB frame as (H, W, 3) uint8, or None.""" - return self.last_stream_array - - def get_last_frame(self, prefer: str = "latest", wait_for_still: bool = True): + pass + + @abstractmethod + def set_exposure_time(self, time_us: int) -> bool: """ - Return the latest RGB frame (H, W, 3) uint8 from either a still or the stream. - - prefer: - - "latest" (default): whichever arrived most recently (compares timestamps) - - "still" : still if present, else stream - - "stream" : stream if present, else still - - wait_for_still: - - If True, block briefly if a still capture is currently in progress. + Set exposure time + + Args: + time_us: Exposure time in microseconds + + Returns: + True if successful, False otherwise """ - if wait_for_still and self.is_taking_image: - while self.is_taking_image: - time.sleep(0.01) - - # Fast paths for legacy behavior - if prefer == "still": - return self.last_image if self.last_image is not None else self.last_stream_array - if prefer == "stream": - return self.last_stream_array if self.last_stream_array is not None else self.last_image - - # "latest" behavior: pick the freshest we’ve seen - li, ls = self.last_image, self.last_stream_array - ti, ts = self.last_image_ts, self.last_stream_ts - - if li is None and ls is None: - return None - if li is None: - return ls - if ls is None: - return li - return li if ti >= ts else ls - - def capture_and_save(self, filename: str = "", folder: str = ""): - self.capture_image() - self.save_image(filename, folder) - - def save_image(self, folder: str = "", filename: str = ""): - while self.is_taking_image: - time.sleep(0.01) - - arr = self.last_image - if arr is None: - print("No image to save (last_image is None).") - return - - try: - arr = np.asarray(arr) - if arr.ndim == 2: - arr = np.stack([arr] * 3, axis=-1) - if arr.ndim != 3 or arr.shape[2] not in (3, 4): - raise ValueError(f"Unsupported image shape: {arr.shape}") - if arr.dtype != np.uint8: - arr = np.clip(arr, 0, 255).astype(np.uint8) - - mode = "RGBA" if arr.shape[2] == 4 else "RGB" - - save_path = Path(self.capture_path) / folder - save_path.mkdir(parents=True, exist_ok=True) - - if filename: - final_filename = filename - else: - final_filename = self.image_name_formatter.get_formatted_string( - auto_increment_index=True - ) - - fformat = self.settings.fformat - full_path = save_path / f"{final_filename}.{fformat}" - print(f"Saving Image: {full_path}") - Image.fromarray(arr, mode=mode).save(str(full_path)) - except Exception as e: - print(f"Error saving image: {e}") - - def set_capture_path(self, path: str): - """Set path for saving captured images.""" - self.capture_path = path - - def select_capture_path(self): - """Open a folder selection dialog to set the capture path.""" - root = tk.Tk() - root.withdraw() # Hide the main Tk window - selected_folder = filedialog.askdirectory(title="Select Capture Folder") - root.destroy() - - if selected_folder: # User didn't cancel - self.set_capture_path(selected_folder) - print(f"Capture path set to: {self.capture_path}") - return self.capture_path - - # Provide a default close() that gracefully shuts down common SDKs - def close(self): - """Clean up camera resources if possible.""" - cam = getattr(self, "camera", None) - if cam is not None: - try: - close_fn = getattr(cam, "Close", None) - if callable(close_fn): - close_fn() - except Exception: - pass - finally: - self.camera = None + pass + + @abstractmethod + def get_gain_range(self) -> Tuple[int, int, int]: + """ + Get gain range + + Returns: + Tuple of (min, max, default) values in percent + """ + pass + + @abstractmethod + def get_gain(self) -> int: + """ + Get current gain + + Returns: + Current gain in percent + """ + pass + + @abstractmethod + def set_gain(self, gain_percent: int) -> bool: + """ + Set gain + + Args: + gain_percent: Gain in percent + + Returns: + True if successful, False otherwise + """ + pass + + @abstractmethod + def get_auto_exposure(self) -> bool: + """ + Get auto exposure state + + Returns: + True if auto exposure is enabled, False otherwise + """ + pass + + @abstractmethod + def set_auto_exposure(self, enabled: bool) -> bool: + """ + Set auto exposure state + + Args: + enabled: True to enable, False to disable + + Returns: + True if successful, False otherwise + """ + pass + + @abstractmethod + def supports_white_balance(self) -> bool: + """ + Check if camera supports white balance + + Returns: + True if white balance is supported, False otherwise + """ + pass + + @abstractmethod + def get_white_balance_range(self) -> Tuple[Tuple[int, int], Tuple[int, int]]: + """ + Get white balance range + + Returns: + Tuple of ((temp_min, temp_max), (tint_min, tint_max)) + """ + pass + + @abstractmethod + def get_white_balance(self) -> Tuple[int, int]: + """ + Get current white balance + + Returns: + Tuple of (temperature, tint) + """ + pass + + @abstractmethod + def set_white_balance(self, temperature: int, tint: int) -> bool: + """ + Set white balance + + Args: + temperature: Color temperature value + tint: Tint value + + Returns: + True if successful, False otherwise + """ + pass + + @abstractmethod + def auto_white_balance(self) -> bool: + """ + Perform one-time auto white balance + + Returns: + True if successful, False otherwise + """ + pass + + @abstractmethod + def get_frame_rate(self) -> Tuple[int, int, int]: + """ + Get current frame rate information + + Returns: + Tuple of (frames_in_period, time_period_ms, total_frames) + """ + pass + + @staticmethod + @abstractmethod + def enumerate_cameras() -> list[CameraInfo]: + """ + Enumerate available cameras + + Returns: + List of available camera information + """ + pass + + @abstractmethod + def supports_still_capture(self) -> bool: + """ + Check if camera supports separate still image capture + + Returns: + True if supported, False otherwise + """ + pass + + @abstractmethod + def get_still_resolutions(self) -> list[CameraResolution]: + """ + Get available still image resolutions + + Returns: + List of available still resolutions + """ + pass diff --git a/camera/sdk_loaders/amscope_sdk_loader.py b/camera/sdk_loaders/amscope_sdk_loader.py new file mode 100644 index 0000000..2477196 --- /dev/null +++ b/camera/sdk_loaders/amscope_sdk_loader.py @@ -0,0 +1,271 @@ +""" +Utility for loading the Amscope SDK dynamically. + +This module handles: +- Extracting the SDK from zip if needed +- Platform-specific DLL/SO path configuration +- Dynamic module import with correct __file__ override +""" + +import os +import sys +import platform +import zipfile +import shutil +import importlib.util +from pathlib import Path +from typing import Optional + +from logger import get_logger + +class AmscopeSdkLoader: + """ + Loader for the Amscope camera SDK. + + Handles automatic extraction from zip, platform detection, + and dynamic module loading. + """ + + def __init__(self, sdk_base_dir: Optional[Path] = None): + """ + Initialize the SDK loader. + + Args: + sdk_base_dir: Optional base directory for SDK files. + If None, uses project_root/3rd_party_imports + """ + if sdk_base_dir is None: + # Auto-detect project root (2 levels up from this file) + project_root = Path(__file__).resolve().parent.parent.parent + sdk_base_dir = project_root / "3rd_party_imports" + + self.sdk_base_dir = Path(sdk_base_dir) + self.official_dir = self.sdk_base_dir / "official_amscope" + self.amcam_module = None + + def load(self): + """ + Load the Amscope SDK. + + Returns: + The loaded amcam module + + Raises: + RuntimeError: If SDK cannot be found or loaded + """ + # Ensure SDK is extracted + sdk_root, sdk_py = self._ensure_sdk() + + # Get platform-specific DLL directory + dll_dir = self._get_dll_directory(sdk_root) + + # Configure library search path + self._configure_library_path(dll_dir) + + # Load the module + self.amcam_module = self._load_module(sdk_py, dll_dir) + + return self.amcam_module + + def _ensure_sdk(self) -> tuple[Path, Path]: + """ + Ensure the AmScope SDK is available under: + sdk_base_dir / "official_amscope" + If not, extract the first amcamsdk*.zip in sdk_base_dir. + + Returns: + Tuple of (sdk_root_dir, sdk_py_path) + + Raises: + RuntimeError: If SDK cannot be found or extracted + """ + sdk_py = self.official_dir / "python" / "amcam.py" + + # Already extracted? + if sdk_py.is_file(): + return self.official_dir, sdk_py + + # Ensure base directory exists + self.sdk_base_dir.mkdir(parents=True, exist_ok=True) + + # Look for a zip starting with "amcamsdk" + for f in self.sdk_base_dir.iterdir(): + if (f.is_file() and + f.name.lower().startswith("amcamsdk") and + f.suffix.lower() == ".zip"): + + get_logger().info(f"Extracting AmScope SDK from {f.name}...") + with zipfile.ZipFile(f, "r") as zf: + zf.extractall(self.official_dir) + break + else: + raise RuntimeError( + f"No AmScope SDK zip found in {self.sdk_base_dir}\n" + f"Expected a file named amcamsdk*.zip" + ) + + # Handle case where zip contains a single subdirectory + if not sdk_py.is_file(): + subdirs = [d for d in self.official_dir.iterdir() if d.is_dir()] + if len(subdirs) == 1: + nested_sdk_py = subdirs[0] / "python" / "amcam.py" + if nested_sdk_py.is_file(): + # Move contents up one level + tmp = subdirs[0] + for item in tmp.iterdir(): + shutil.move(str(item), self.official_dir) + tmp.rmdir() + + # Verify extraction succeeded + if not sdk_py.is_file(): + raise RuntimeError( + f"Extracted SDK does not contain python/amcam.py\n" + f"Expected at: {sdk_py}" + ) + + get_logger().info(f"AmScope SDK ready at {self.official_dir}") + return self.official_dir, sdk_py + + def _get_dll_directory(self, sdk_root: Path) -> Path: + """ + Determine platform-specific DLL/SO directory. + + Args: + sdk_root: Root directory of the SDK + + Returns: + Path to the directory containing platform libraries + + Raises: + RuntimeError: If platform is not supported + """ + system = platform.system().lower() + machine = platform.machine().lower() + + if system == 'windows': + dll_dir = sdk_root / 'win' / 'x64' + + elif system == 'linux': + arch_map = { + 'x86_64': 'x64', + 'amd64': 'x64', + 'i386': 'x86', + 'i686': 'x86', + 'arm64': 'arm64', + 'aarch64': 'arm64', + 'armv7l': 'armhf', + 'armv6l': 'armel' + } + subarch = arch_map.get(machine) + if not subarch: + raise RuntimeError( + f"Unsupported Linux architecture: {machine}\n" + f"Supported: {', '.join(arch_map.keys())}" + ) + dll_dir = sdk_root / 'linux' / subarch + + elif system == 'darwin': + dll_dir = sdk_root / 'mac' + + else: + raise RuntimeError(f"Unsupported operating system: {system}") + + if not dll_dir.exists(): + raise RuntimeError( + f"Platform library directory not found: {dll_dir}\n" + f"System: {system}, Architecture: {machine}" + ) + + return dll_dir + + def _configure_library_path(self, dll_dir: Path): + """ + Configure library search paths for the current platform. + + Args: + dll_dir: Directory containing platform libraries + """ + system = platform.system().lower() + dll_dir_str = str(dll_dir) + + if system == 'windows': + # Windows: Use add_dll_directory if available (Python 3.8+) + if hasattr(os, 'add_dll_directory'): + os.add_dll_directory(dll_dir_str) + else: + # Fallback for older Python versions + os.environ['PATH'] = dll_dir_str + os.pathsep + os.environ.get('PATH', '') + + else: + # Linux/macOS: Set LD_LIBRARY_PATH or DYLD_LIBRARY_PATH + if system == 'darwin': + env_var = 'DYLD_LIBRARY_PATH' + else: + env_var = 'LD_LIBRARY_PATH' + + current_path = os.environ.get(env_var, '') + os.environ[env_var] = dll_dir_str + os.pathsep + current_path + + def _load_module(self, sdk_py: Path, dll_dir: Path): + """ + Dynamically load the amcam module. + + Args: + sdk_py: Path to amcam.py + dll_dir: Directory containing platform libraries + + Returns: + The loaded amcam module + """ + # Create module spec + spec = importlib.util.spec_from_file_location("amcam", sdk_py) + amcam_module = importlib.util.module_from_spec(spec) + + # Override __file__ to trick the SDK's LoadLibrary logic + # The SDK uses __file__ to find the DLL, so we point it to the DLL directory + amcam_module.__file__ = str(dll_dir / 'amcam.py') + + # Register in sys.modules before execution + sys.modules["amcam"] = amcam_module + + # Execute the module + spec.loader.exec_module(amcam_module) + + return amcam_module + + +def load_amscope_sdk(sdk_base_dir: Optional[Path] = None): + """ + Convenience function to load the Amscope SDK. + + Args: + sdk_base_dir: Optional base directory for SDK files. + If None, auto-detects from project structure. + + Returns: + The loaded amcam module + + Example: + >>> amcam = load_amscope_sdk() + >>> cameras = amcam.Amcam.EnumV2() + """ + loader = AmscopeSdkLoader(sdk_base_dir) + return loader.load() + + +if __name__ == "__main__": + # Test the loader + try: + amcam = load_amscope_sdk() + print(f"Successfully loaded amcam SDK") + print(f"Module location: {amcam.__file__}") + + # Try to enumerate cameras + cameras = amcam.Amcam.EnumV2() + print(f"Found {len(cameras)} camera(s)") + for i, cam in enumerate(cameras): + print(f" {i+1}. {cam.displayname}") + + except Exception as e: + print(f"Error loading SDK: {e}") + sys.exit(1) diff --git a/logger.py b/logger.py new file mode 100644 index 0000000..a6c87c7 --- /dev/null +++ b/logger.py @@ -0,0 +1,206 @@ +""" +Centralized logging system for the application. +Provides logging to both file and UI components. +""" + +import logging +import sys +from pathlib import Path +from datetime import datetime +from typing import Optional, Callable +from logging.handlers import RotatingFileHandler + + +class AppLogger: + """ + Singleton application logger with file and UI output. + """ + _instance: Optional['AppLogger'] = None + _initialized = False + + def __new__(cls): + if cls._instance is None: + cls._instance = super().__new__(cls) + return cls._instance + + def __init__(self): + if self._initialized: + return + + self._log_callbacks: list[Callable[[str, str], None]] = [] + self._logger = logging.getLogger('ForgeApp') + self._logger.setLevel(logging.DEBUG) + + # Default log directory + self._log_dir = Path.cwd() / "logs" + self._log_dir.mkdir(exist_ok=True) + + # Setup file handler + self._setup_file_handler() + + # Setup console handler for development + self._setup_console_handler() + + self._initialized = True + + def _setup_file_handler(self): + """Setup rotating file handler""" + timestamp = datetime.now().strftime('%Y%m%d_%H%M%S') + log_file = self._log_dir / f"Forge_{timestamp}.log" + + # Rotating file handler - 10MB max, keep 5 backups + file_handler = RotatingFileHandler( + log_file, + maxBytes=10 * 1024 * 1024, + backupCount=5, + encoding='utf-8' + ) + file_handler.setLevel(logging.DEBUG) + + # Format: [2025-01-26 14:30:45] INFO: Message + formatter = logging.Formatter( + '[%(asctime)s] %(levelname)s: %(message)s', + datefmt='%Y-%m-%d %H:%M:%S' + ) + file_handler.setFormatter(formatter) + + self._logger.addHandler(file_handler) + self._file_handler = file_handler + + def _setup_console_handler(self): + """Setup console handler for stdout""" + console_handler = logging.StreamHandler(sys.stdout) + console_handler.setLevel(logging.INFO) + + formatter = logging.Formatter( + '[%(levelname)s] %(message)s' + ) + console_handler.setFormatter(formatter) + + self._logger.addHandler(console_handler) + + def set_log_directory(self, directory: Path): + """ + Change the log directory. + + Args: + directory: New directory for log files + """ + self._log_dir = Path(directory) + self._log_dir.mkdir(exist_ok=True) + + # Remove old file handler + self._logger.removeHandler(self._file_handler) + + # Setup new file handler + self._setup_file_handler() + + self.info(f"Log directory changed to: {self._log_dir}") + + def get_log_directory(self) -> Path: + """Get current log directory""" + return self._log_dir + + def register_callback(self, callback: Callable[[str, str], None]): + """ + Register a callback for log messages. + + Args: + callback: Function(level, message) to call on each log message + """ + # Remove if already registered to avoid duplicates + if callback in self._log_callbacks: + self._log_callbacks.remove(callback) + self._log_callbacks.append(callback) + + def unregister_callback(self, callback: Callable[[str, str], None]): + """ + Unregister a log callback. + + Args: + callback: Callback to remove + """ + if callback in self._log_callbacks: + self._log_callbacks.remove(callback) + + def _notify_callbacks(self, level: str, message: str): + """Notify all registered callbacks""" + for callback in self._log_callbacks: + try: + callback(level, message) + except Exception as e: + # Don't let callback errors break logging + print(f"Error in log callback: {e}") + + def debug(self, message: str): + """Log debug message""" + self._logger.debug(message) + self._notify_callbacks('DEBUG', message) + + def info(self, message: str): + """Log info message""" + self._logger.info(message) + self._notify_callbacks('INFO', message) + + def warning(self, message: str): + """Log warning message""" + self._logger.warning(message) + self._notify_callbacks('WARNING', message) + + def error(self, message: str): + """Log error message""" + self._logger.error(message) + self._notify_callbacks('ERROR', message) + + def critical(self, message: str): + """Log critical message""" + self._logger.critical(message) + self._notify_callbacks('CRITICAL', message) + + def exception(self, message: str): + """Log exception with traceback""" + self._logger.exception(message) + self._notify_callbacks('ERROR', message) + + +# Global logger instance +_app_logger: Optional[AppLogger] = None + + +def get_logger() -> AppLogger: + """Get the global application logger""" + global _app_logger + if _app_logger is None: + _app_logger = AppLogger() + return _app_logger + + +# Convenience functions for easy access +def debug(message: str): + """Log debug message""" + get_logger().debug(message) + + +def info(message: str): + """Log info message""" + get_logger().info(message) + + +def warning(message: str): + """Log warning message""" + get_logger().warning(message) + + +def error(message: str): + """Log error message""" + get_logger().error(message) + + +def critical(message: str): + """Log critical message""" + get_logger().critical(message) + + +def exception(message: str): + """Log exception with traceback""" + get_logger().exception(message) diff --git a/main.py b/main.py index c222a1b..5b5ad53 100644 --- a/main.py +++ b/main.py @@ -1,160 +1,32 @@ -import pygame -import time -from typing import List -import multiprocessing as mp - -from camera.amscope import AmscopeCamera -from printer.automated_controller import AutomatedPrinter - -from forgeConfig import ( - ForgeSettings, - ForgeSettingsManager -) - -from UI.frame import Frame -from UI.ui_layout import create_control_panel, RIGHT_PANEL_WIDTH - -if __name__ == "__main__": - mp.freeze_support() - mp.set_start_method("spawn", force=True) - - config = ForgeSettings() - config = ForgeSettingsManager.load("") - - pygame.init() - pygame.display.set_caption("FORGE") - width, height = (config.windowWidth, config.windowHeight) - screen = pygame.display.set_mode((width, height), pygame.RESIZABLE) - - # Frame in which everything is based on - root_frame = Frame(x=0, y=0, width=width, height=height) - - # A clock to limit the frame rate. - clock = pygame.time.Clock() - - right_panel_width = RIGHT_PANEL_WIDTH - # Initialize camera with the refactored class - camera = AmscopeCamera() - - - # Initialize the automated printer with configurations - movementSystem = AutomatedPrinter(config, camera) - - time.sleep(1.5) - - current_sample_index = 1 - - ( - sample_label, - inc_btn, - dec_btn, - go_btn, - speed_display, - position_display - ) = create_control_panel(root_frame, movementSystem, camera, current_sample_index) - - # Verify no duplicate nodes are present - def audit_tree(node): - seen = {} - for ch in node.children: - seen.setdefault(id(ch), []).append(ch) - for ids, lst in seen.items(): - if len(lst) > 1: - print(f"[DUP] {node.__class__.__name__} id={id(node)} has child repeated x{len(lst)} -> {lst[0].__class__.__name__} id={id(lst[0])}") - for ch in node.children: - audit_tree(ch) - - audit_tree(root_frame) - - - def go_to_sample(): - pos = movementSystem.get_sample_position(current_sample_index) - movementSystem.move_to_position(pos) - - def increment_sample(): - global current_sample_index - if current_sample_index < movementSystem.get_num_slots(): - current_sample_index += 1 - sample_label.set_text(f"Sample {current_sample_index}") - - def decrement_sample(): - global current_sample_index - if current_sample_index > 1: - current_sample_index -= 1 - sample_label.set_text(f"Sample {current_sample_index}") - - - inc_btn.function_to_call = increment_sample - dec_btn.function_to_call = decrement_sample - go_btn.function_to_call = go_to_sample - - - - - - running = True - while running: - clock.tick(60) - # Mouse Position - pos = pygame.mouse.get_pos() - - for event in pygame.event.get(): - if event.type == pygame.QUIT: - running = False - elif event.type == pygame.VIDEORESIZE: - new_width, new_height = event.w, event.h - - width, height = new_width, new_height - - root_frame.width = new_width - root_frame.height = new_height - - print(width, height) - - elif event.type == pygame.MOUSEWHEEL: - mx, my = pos - root_frame.process_mouse_wheel(mx, my, dx=event.x, dy=event.y) - elif event.type == pygame.MOUSEBUTTONUP: - if event.button in (1, 2, 3): - root_frame.process_mouse_release(*pos, button="left") - elif event.type == pygame.MOUSEBUTTONDOWN: - if event.button in (1, 2, 3): # left, middle, right only - root_frame.broadcast_mouse_press(*pos, button="left") - root_frame.process_mouse_press(*pos, button="left") - elif event.type == pygame.KEYDOWN: - root_frame.broadcast_key_event(event) - if event.key == pygame.K_ESCAPE: - running = False - elif event.type == pygame.KEYUP: - root_frame.broadcast_key_event(event) - - root_frame.process_mouse_move(*pos) - - # Rendering - screen.fill([60, 60, 60]) - - def draw_debug_outline(surface, frame): - cx, cy, cw, ch = frame.get_content_geometry() - pygame.draw.rect(surface, pygame.Color(0, 255, 0), (cx, cy, cw, ch), 2) - - x, y, w, h = frame.get_absolute_geometry() - color = frame.debug_outline_color - pygame.draw.rect(surface, color, pygame.Rect(x, y, w, h), 1) - - for child in frame.children: - draw_debug_outline(surface, child) - - # Draw GUI - root_frame.draw(screen) - - #draw_debug_outline(screen, root_frame) - - speed_display.set_text(f"Step Size: {movementSystem.speed / 100:.2f}mm") - position_display.set_text( f"X: {movementSystem.position.x/100:.2f} Y: {movementSystem.position.y/100:.2f} Z: {movementSystem.position.z/100:.2f}") - #position1_display.set_text(f"X: {movementSystem.automation_config.x_start/100:.2f} Y: {movementSystem.automation_config.y_start/100:.2f} Z: {movementSystem.automation_config.z_start/100:.2f}") - #position2_display.set_text(f"X: {movementSystem.automation_config.x_end/100:.2f} Y: {movementSystem.automation_config.y_end/100:.2f} Z: {movementSystem.automation_config.z_end/100:.2f}") - pygame.display.flip() - - # Ensure camera is properly closed - camera.close() - pygame.quit() \ No newline at end of file +import sys +import multiprocessing as mp + +from PySide6.QtWidgets import QApplication + +# GUI +from UI.main_window import MainWindow +from UI.style import apply_style + +# Initialize app context early +from app_context import get_app_context + + +if __name__ == "__main__": + mp.freeze_support() + mp.set_start_method("spawn", force=True) + + app = QApplication(sys.argv) + apply_style(app) + + # Initialize app context (this will load camera SDK) + ctx = get_app_context() + + win = MainWindow() + win.show() + + exit_code = app.exec() + + # Cleanup + ctx.cleanup() + + sys.exit(exit_code) From 852f71f09fd46ccd535ea22ecf9f1055a5cab320 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Mon, 26 Jan 2026 03:35:10 -0900 Subject: [PATCH 14/46] Added timestamps to logs --- UI/tabs/logs_tab.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/UI/tabs/logs_tab.py b/UI/tabs/logs_tab.py index fd28834..6506eeb 100644 --- a/UI/tabs/logs_tab.py +++ b/UI/tabs/logs_tab.py @@ -3,6 +3,7 @@ import subprocess import sys from pathlib import Path +from datetime import datetime from PySide6.QtCore import Qt, QTimer from PySide6.QtWidgets import ( @@ -72,9 +73,13 @@ def _on_log_message(self, level: str, message: str): Handle incoming log message. This is called from the logger for each message. """ + + # Get current timestamp + timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S') + # Format with color based on level color = self._get_level_color(level) - formatted = f'[{level}] {self._escape_html(message)}' + formatted = f'[{timestamp}] [{level}] {self._escape_html(message)}' self._log_display.append(formatted) From 157b50f57a29af91505145eb35d8a94e12250d0d Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Wed, 28 Jan 2026 02:20:15 -0900 Subject: [PATCH 15/46] Updated config handling and settings support --- UI/main_window.py | 11 +- UI/tabs/navigate_tab.py | 7 + app_context.py | 58 +++- config/forge/default_settings.yaml | 5 +- forgeConfig.py | 74 ++++-- generic_config.py | 414 ++++++++++++++++++++++++----- main.py | 5 +- 7 files changed, 470 insertions(+), 104 deletions(-) diff --git a/UI/main_window.py b/UI/main_window.py index e4f8ef9..3ff9314 100644 --- a/UI/main_window.py +++ b/UI/main_window.py @@ -30,7 +30,13 @@ def __init__(self) -> None: self.resize(1920, 1080) self._state = State() + + # Get app context + self.app_context = get_app_context() + + # Create and register settings dialog self.settings_dialog = SettingsDialog(self) + self.app_context.register_settings_dialog(self.settings_dialog) # Header Bar self.tabs = QTabWidget() @@ -114,10 +120,7 @@ def _build_status_bar(self) -> QWidget: return status_bar def _open_settings(self, category: str) -> None: - self.settings_dialog.open_to(category) - self.settings_dialog.show() - self.settings_dialog.raise_() - self.settings_dialog.activateWindow() + self.app_context.open_settings(category) def _apply_status(self) -> None: self.status_line.setText(self._state.format_status_text()) diff --git a/UI/tabs/navigate_tab.py b/UI/tabs/navigate_tab.py index f2f118e..09aa7b1 100644 --- a/UI/tabs/navigate_tab.py +++ b/UI/tabs/navigate_tab.py @@ -16,6 +16,10 @@ from UI.tabs.base_tab import CameraWithSidebarPage from UI.widgets.camera_preview import CameraPreview +from UI.widgets.collapsible_section import CollapsibleSection +from UI.widgets.camera_controls_widget import CameraControlsWidget + +from app_context import open_settings class NavigateTab(CameraWithSidebarPage): def __init__(self, parent: QWidget | None = None) -> None: @@ -36,6 +40,9 @@ def _make_sidebar(self) -> QWidget: # Start Widgets + camera_controls = CollapsibleSection("Camera Controls", on_settings=lambda: open_settings("Camera")) + camera_controls.layout_for_content().addWidget(CameraControlsWidget()) + content_layout.addWidget(camera_controls) # End Widgets diff --git a/app_context.py b/app_context.py index fa0b5f5..89cf0ab 100644 --- a/app_context.py +++ b/app_context.py @@ -3,10 +3,14 @@ Provides a singleton pattern for accessing camera and other shared resources. """ -from typing import Optional +from typing import Optional, TYPE_CHECKING from camera.base_camera import BaseCamera from camera.amscope_camera import AmscopeCamera from logger import get_logger +from forgeConfig import ForgeSettingsManager, ForgeSettings + +if TYPE_CHECKING: + from UI.settings.settings_main import SettingsDialog class AppContext: @@ -27,7 +31,13 @@ def __init__(self): self._camera: Optional[BaseCamera] = None self._camera_initialized = False + self._settings_dialog: Optional['SettingsDialog'] = None + self._settings_manager: Optional[ForgeSettingsManager] = None + self._settings: Optional[ForgeSettings] = None self._initialized = True + + # Load settings + self._load_settings() @property def camera(self) -> Optional[BaseCamera]: @@ -36,6 +46,46 @@ def camera(self) -> Optional[BaseCamera]: self._initialize_camera() return self._camera + @property + def settings(self) -> Optional[ForgeSettings]: + """Get the Forge settings""" + return self._settings + + @property + def settings_dialog(self) -> Optional['SettingsDialog']: + """Get the settings dialog instance""" + return self._settings_dialog + + def register_settings_dialog(self, dialog: 'SettingsDialog'): + """Register the settings dialog instance""" + self._settings_dialog = dialog + + def open_settings(self, category: str): + """ + Open settings dialog to a specific category. + + Args: + category: Name of the settings category to open to + """ + if self._settings_dialog: + self._settings_dialog.open_to(category) + self._settings_dialog.show() + self._settings_dialog.raise_() + self._settings_dialog.activateWindow() + + def _load_settings(self): + """Load Forge application settings""" + logger = get_logger() + try: + self._settings_manager = ForgeSettingsManager() + self._settings = self._settings_manager.load() + logger.info(f"Forge settings loaded - version: {self._settings.version}") + except Exception as e: + logger.error(f"Failed to load Forge settings: {e}") + # Create default settings if loading fails + self._settings = ForgeSettings() + logger.warning("Using default Forge settings") + def _initialize_camera(self): """Initialize the camera subsystem""" if self._camera_initialized: @@ -65,9 +115,15 @@ def cleanup(self): self._camera.close() self._camera = None self._camera_initialized = False + self._settings_dialog = None + self._settings_manager = None + self._settings = None # Global instance accessor def get_app_context() -> AppContext: """Get the global application context""" return AppContext() + +def open_settings(category: str): + AppContext().open_settings(category) \ No newline at end of file diff --git a/config/forge/default_settings.yaml b/config/forge/default_settings.yaml index 1879bac..daf26b3 100644 --- a/config/forge/default_settings.yaml +++ b/config/forge/default_settings.yaml @@ -1,4 +1 @@ -serial_port: "COM9" -windowWidth: 1440 -windowHeight: 810 -version: "1.1" \ No newline at end of file +version: "1.2" \ No newline at end of file diff --git a/forgeConfig.py b/forgeConfig.py index 88c79d6..c32af18 100644 --- a/forgeConfig.py +++ b/forgeConfig.py @@ -1,34 +1,58 @@ from __future__ import annotations from dataclasses import dataclass +from typing import Union +from pathlib import Path + +from generic_config import ConfigManager -from generic_config import ConfigManager, DEFAULT_FILENAME, ACTIVE_FILENAME @dataclass -class ForgeSettings(): - serial_port: str = "COM9" - windowWidth: int = 1440 - windowHeight: int = 810 +class ForgeSettings: + """Forge application settings.""" version: str = "1.1" + + def validate(self) -> None: + """ + Validate Forge settings. + + Raises: + ValueError: If any setting is invalid + """ + # Add validation logic as needed + if not isinstance(self.version, str) or not self.version: + raise ValueError("version must be a non-empty string") -def make_forge_settings_manager( - *, - root_dir: str = "./config/forge", - default_filename: str = "default_settings.yaml", - backup_dirname: str = "backups", - backup_keep: int = 5, -) -> ConfigManager[ForgeSettings]: - return ConfigManager[ForgeSettings]( - ForgeSettings, - root_dir=root_dir, - default_filename=default_filename, - backup_dirname=backup_dirname, - backup_keep=backup_keep, - ) -ForgeSettingsManager = make_forge_settings_manager( - root_dir="./config/forge", - default_filename=DEFAULT_FILENAME, - backup_dirname="backups", - backup_keep=5, -) \ No newline at end of file +class ForgeSettingsManager(ConfigManager[ForgeSettings]): + """ + Configuration manager for Forge application settings. + + Directory structure: + config/forge/ + settings.yaml + default_settings.yaml + backups/ + + Example usage: + >>> forge_mgr = ForgeSettingsManager() + >>> settings = forge_mgr.load() + >>> settings.version = "1.2" + >>> forge_mgr.save(settings) + """ + + def __init__( + self, + *, + root_dir: Union[str, Path] = "./config/forge", + default_filename: str = "default_settings.yaml", + backup_dirname: str = "backups", + backup_keep: int = 5, + ) -> None: + super().__init__( + ForgeSettings, + root_dir=root_dir, + default_filename=default_filename, + backup_dirname=backup_dirname, + backup_keep=backup_keep, + ) \ No newline at end of file diff --git a/generic_config.py b/generic_config.py index 7a96b6a..c676c0a 100644 --- a/generic_config.py +++ b/generic_config.py @@ -1,12 +1,15 @@ # config_manager.py from __future__ import annotations +from contextlib import contextmanager from dataclasses import asdict, fields, is_dataclass from pathlib import Path -from typing import Any, Dict, Generic, List, Type, TypeVar, Callable +from typing import Any, Dict, Generic, Iterator, List, Type, TypeVar, Union import shutil import time +from logger import get_logger + # File/dir names are generic—usable for ANY config ACTIVE_FILENAME = "settings.yaml" DEFAULT_FILENAME = "default_settings.yaml" @@ -16,153 +19,426 @@ S = TypeVar("S") # Config schema type (must be a dataclass) +class ConfigValidationError(Exception): + """Raised when settings validation fails.""" + pass + + class ConfigManager(Generic[S]): """ Generic YAML-backed config manager for ANY dataclass-based settings. - Handles: load/save, defaults file, timestamped backups (+ pruning), restore. + Manages a single configuration directory with active settings, defaults, and backups. + + Directory structure: + root_dir/ + settings.yaml # Active settings + default_settings.yaml # Factory defaults + backups/ # Timestamped backups + settings.20250128-143052.yaml + settings.20250128-120301.yaml + + Example: + >>> @dataclass + ... class MySettings: + ... value: int = 10 + ... def validate(self): + ... if self.value < 0: + ... raise ValueError("value must be non-negative") + >>> + >>> manager = ConfigManager[MySettings]( + ... MySettings, + ... root_dir="./config/my_component" + ... ) + >>> settings = manager.load() + >>> settings.value = 20 + >>> manager.save(settings) """ def __init__( self, schema_cls: Type[S], *, - root_dir: str | Path = "./config", - scope_namer: Callable[[str], str] | None = None, + root_dir: Union[str, Path] = "./config", default_filename: str = DEFAULT_FILENAME, backup_dirname: str = BACKUP_DIRNAME, backup_keep: int = BACKUP_KEEP, ) -> None: + """ + Initialize the config manager. + + Args: + schema_cls: Dataclass type defining the settings schema + root_dir: Directory for config files (settings, defaults, backups) + default_filename: Name for the defaults file + backup_dirname: Name for the backups subdirectory + backup_keep: Number of backup files to retain (oldest are deleted) + + Raises: + TypeError: If schema_cls is not a dataclass + """ if not is_dataclass(schema_cls): - raise TypeError("schema_cls must be a dataclass type") + logger = get_logger() + logger.error(f"Attempted to create ConfigManager with non-dataclass type: {schema_cls}") + raise TypeError(f"schema_cls must be a dataclass type, got {type(schema_cls).__name__}") + self.schema_cls = schema_cls self.root_dir = Path(root_dir).resolve() self.root_dir.mkdir(parents=True, exist_ok=True) - self.scope_namer = scope_namer or (lambda s: s) self.default_filename = default_filename self.backup_dirname = backup_dirname self.backup_keep = backup_keep + self._logger = get_logger() + + self._logger.debug(f"Initialized ConfigManager for {schema_cls.__name__} at {self.root_dir}") # ------------------------- # YAML (de)serialization # ------------------------- def _to_dict(self, settings: S) -> Dict[str, Any]: + """Convert settings dataclass to dictionary.""" return asdict(settings) - def _from_dict(self, data: Dict[str, Any] | None) -> S: + def _from_dict(self, data: Union[Dict[str, Any], None]) -> S: + """ + Create settings instance from dictionary. + Only includes fields that are defined in the schema. + """ data = data or {} allowed = {f.name for f in fields(self.schema_cls)} - return self.schema_cls(**{k: v for k, v in data.items() if k in allowed}) # type: ignore + filtered = {k: v for k, v in data.items() if k in allowed} + return self.schema_cls(**filtered) # type: ignore - def scope_dir(self, scope: str) -> Path: - d = self.root_dir / self.scope_namer(scope) - d.mkdir(parents=True, exist_ok=True) - return d + def _validate(self, settings: S, context: str = "") -> None: + """ + Validate settings if a validate() method exists. + + Args: + settings: Settings instance to validate + context: Additional context for error messages + + Raises: + ConfigValidationError: If validation fails + """ + if hasattr(settings, 'validate') and callable(settings.validate): + try: + settings.validate() + self._logger.debug(f"Validation passed{' for ' + context if context else ''}") + except Exception as e: + error_msg = f"Settings validation failed{' for ' + context if context else ''}: {e}" + self._logger.error(error_msg) + raise ConfigValidationError(error_msg) from e - def active_path(self, scope: str) -> Path: - return self.scope_dir(scope) / ACTIVE_FILENAME + # ------------------------- + # Path helpers + # ------------------------- + def active_path(self) -> Path: + """Get the path to the active settings file.""" + return self.root_dir / ACTIVE_FILENAME - def default_path(self, scope: str) -> Path: - return self.scope_dir(scope) / self.default_filename + def default_path(self) -> Path: + """Get the path to the default settings file.""" + return self.root_dir / self.default_filename - def backup_dir(self, scope: str) -> Path: - bd = self.scope_dir(scope) / self.backup_dirname + def backup_dir(self) -> Path: + """Get the backup directory, creating it if needed.""" + bd = self.root_dir / self.backup_dirname bd.mkdir(parents=True, exist_ok=True) return bd - def _backup_if_exists(self, scope: str) -> None: - src = self.active_path(scope) + # ------------------------- + # Backup management + # ------------------------- + def _backup_if_exists(self) -> None: + """ + Create a timestamped backup of the active settings file if it exists. + Also prunes old backups to maintain backup_keep limit. + """ + src = self.active_path() if not src.exists(): return + ts = time.strftime("%Y%m%d-%H%M%S") - dst = self.backup_dir(scope) / f"{src.stem}.{ts}{src.suffix}" + dst = self.backup_dir() / f"{src.stem}.{ts}{src.suffix}" + try: shutil.copy2(src, dst) + self._logger.info(f"Created backup: {dst.name}") except Exception as e: - print(f"Warning: failed to create settings backup: {e}") - return + self._logger.error(f"Failed to create settings backup: {e}") + raise IOError("Failed to create backup") from e + + # Prune old backups try: backups: List[Path] = sorted( - self.backup_dir(scope).glob(f"{src.stem}.*{src.suffix}"), + self.backup_dir().glob(f"{src.stem}.*{src.suffix}"), key=lambda p: p.stat().st_mtime, reverse=True, ) - for old in backups[self.backup_keep:]: - try: - old.unlink(missing_ok=True) - except TypeError: - old.unlink() + + if len(backups) > self.backup_keep: + for old in backups[self.backup_keep:]: + try: + self._logger.debug(f"Pruning old backup: {old.name}") + old.unlink(missing_ok=True) + except Exception as e: + self._logger.warning(f"Failed to delete backup {old.name}: {e}") + + self._logger.info(f"Pruned {len(backups) - self.backup_keep} old backup(s)") except Exception as e: - print(f"Warning: failed to prune backups: {e}") + self._logger.warning(f"Failed to prune old backups: {e}") - # -------- public scope-first API - def load(self, scope: str) -> S: + # ------------------------- + # Public API + # ------------------------- + def load(self) -> S: + """ + Load settings from the active settings file. + + Attempts to load in order: + 1. Active settings file + 2. Default settings file + 3. Fresh instance from schema + + Returns: + Settings instance (validated if validate() method exists) + + Raises: + ConfigValidationError: If loaded settings fail validation + """ import yaml - p = self.active_path(scope) + + p = self.active_path() + + # Try loading active settings if p.exists(): try: with open(p, "r") as f: - return self._from_dict(yaml.safe_load(f) or {}) + data = yaml.safe_load(f) or {} + settings = self._from_dict(data) + self._validate(settings, "active settings") + self._logger.info(f"Loaded active settings from {p.name}") + return settings + except ConfigValidationError: + raise except Exception as e: - print(f"Error loading settings: {e}") - # fallbacks - dp = self.default_path(scope) + self._logger.error(f"Failed to load settings from {p}: {e}") + raise IOError("Failed to load active settings") from e + + # Fallback to defaults + dp = self.default_path() if dp.exists(): try: with open(dp, "r") as f: - return self._from_dict(yaml.safe_load(f) or {}) + data = yaml.safe_load(f) or {} + settings = self._from_dict(data) + self._validate(settings, "default settings") + self._logger.info(f"Loaded default settings from {dp.name}") + return settings + except ConfigValidationError: + raise except Exception as e: - print(f"Error loading default settings: {e}") - return self.schema_cls() + self._logger.error(f"Failed to load default settings from {dp}: {e}") + raise IOError("Failed to load default settings") from e + + # Last resort: create fresh instance + self._logger.info("No existing settings found, using fresh instance") + settings = self.schema_cls() + self._validate(settings, "fresh instance") + return settings - def load_from_file(self, path: str | Path): + def load_from_file(self, path: Union[str, Path]) -> S: + """ + Load settings from an arbitrary file path. + + This is useful for loading user-provided or downloaded configuration files. + + Args: + path: Path to the settings file + + Returns: + Settings instance (validated if validate() method exists) + + Raises: + ConfigValidationError: If loaded settings fail validation + IOError: If file cannot be read + """ import yaml + p = Path(path) - with open(p, "r") as f: - data = yaml.safe_load(f) or {} - return self._from_dict(data) + try: + with open(p, "r") as f: + data = yaml.safe_load(f) or {} + settings = self._from_dict(data) + self._validate(settings, f"file {p.name}") + self._logger.info(f"Loaded settings from file: {p}") + return settings + except ConfigValidationError: + raise + except Exception as e: + self._logger.error(f"Failed to load settings from {p}: {e}") + raise IOError(f"Failed to load settings from {path}") from e - def save(self, scope: str, settings: S) -> None: + def save(self, settings: S) -> None: + """ + Save settings to the active settings file. + + Creates a backup of existing settings before saving. + + Args: + settings: Settings instance to save + + Raises: + ConfigValidationError: If settings fail validation + IOError: If file cannot be written + """ import yaml - self._backup_if_exists(scope) + + # Validate before saving + self._validate(settings, "before save") + + # Backup existing file + self._backup_if_exists() + + # Save new settings + p = self.active_path() try: - with open(self.active_path(scope), "w") as f: + with open(p, "w") as f: yaml.safe_dump(self._to_dict(settings), f, sort_keys=False) + self._logger.info(f"Saved settings to {p.name}") except Exception as e: - print(f"Error saving settings: {e}") + self._logger.error(f"Failed to save settings to {p}: {e}") + raise IOError("Failed to save settings") from e - def write_defaults(self, scope: str, settings: S | None = None) -> Path: + def write_defaults(self, settings: Union[S, None] = None) -> Path: + """ + Write default settings file. + + Args: + settings: Settings to write as defaults. If None, uses fresh schema instance. + + Returns: + Path to the written defaults file + + Raises: + ConfigValidationError: If settings fail validation + IOError: If file cannot be written + """ import yaml - payload = self._to_dict(settings or self.schema_cls()) - dp = self.default_path(scope) + + settings_to_save = settings or self.schema_cls() + self._validate(settings_to_save, "defaults") + + payload = self._to_dict(settings_to_save) + dp = self.default_path() + try: with open(dp, "w") as f: yaml.safe_dump(payload, f, sort_keys=False) + self._logger.info(f"Wrote default settings to {dp.name}") + return dp except Exception as e: - print(f"Error writing default settings: {e}") - return dp + self._logger.error(f"Failed to write default settings to {dp}: {e}") + raise IOError("Failed to write default settings") from e - def restore_defaults_into_active(self, scope: str) -> S: - defaults = self.load_defaults(scope) - self._backup_if_exists(scope) - self.save(scope, defaults) + def restore_defaults(self) -> S: + """ + Restore default settings as the active settings. + + Creates a backup of current active settings before restoring. + + Returns: + The restored default settings + + Raises: + ConfigValidationError: If default settings fail validation + IOError: If restore operation fails + """ + defaults = self.load_defaults() + self._backup_if_exists() + self.save(defaults) + self._logger.info("Restored defaults as active settings") return defaults - def load_defaults(self, scope: str) -> S: + def load_defaults(self) -> S: + """ + Load default settings. + + Returns: + Default settings instance + + Raises: + ConfigValidationError: If default settings fail validation + IOError: If defaults file cannot be read + """ import yaml - dp = self.default_path(scope) + + dp = self.default_path() if not dp.exists(): - return self.schema_cls() + self._logger.debug("No defaults file, using fresh instance") + settings = self.schema_cls() + self._validate(settings, "fresh defaults") + return settings + try: with open(dp, "r") as f: - return self._from_dict(yaml.safe_load(f) or {}) + data = yaml.safe_load(f) or {} + settings = self._from_dict(data) + self._validate(settings, "defaults") + self._logger.info(f"Loaded default settings from {dp.name}") + return settings + except ConfigValidationError: + raise + except Exception as e: + self._logger.error(f"Failed to load default settings from {dp}: {e}") + raise IOError("Failed to load defaults") from e + + def list_backups(self) -> List[Path]: + """ + List all backup files, most recent first. + + Returns: + List of backup file paths, sorted by modification time (newest first) + """ + bd = self.backup_dir() + try: + backups = sorted( + bd.glob(f"{ACTIVE_FILENAME.split('.')[0]}.*.yaml"), + key=lambda p: p.stat().st_mtime, + reverse=True + ) + self._logger.debug(f"Found {len(backups)} backup(s)") + return backups except Exception as e: - print(f"Error loading default settings: {e}") - return self.schema_cls() + self._logger.warning(f"Failed to list backups: {e}") + return [] - def list_backups(self, scope: str) -> list[Path]: - bd = self.backup_dir(scope) + @contextmanager + def edit(self) -> Iterator[S]: + """ + Context manager for transactional settings editing. + + Loads settings, yields for editing, and automatically saves + on successful exit. If an exception occurs, changes are discarded. + + Yields: + Settings instance for editing + + Raises: + ConfigValidationError: If edited settings fail validation + IOError: If load or save operations fail + + Example: + >>> with manager.edit() as settings: + ... settings.value = 150 + # Auto-saves on successful exit + """ + self._logger.debug("Starting edit transaction") + settings = self.load() try: - return sorted(bd.glob(f"{ACTIVE_FILENAME.split('.')[0]}.*.yaml"), - key=lambda p: p.stat().st_mtime, reverse=True) - except Exception: - return [] \ No newline at end of file + yield settings + except Exception as e: + self._logger.error(f"Edit transaction failed: {e}") + raise + else: + self.save(settings) + self._logger.info("Edit transaction completed") diff --git a/main.py b/main.py index 5b5ad53..636f892 100644 --- a/main.py +++ b/main.py @@ -9,6 +9,7 @@ # Initialize app context early from app_context import get_app_context +from logger import info if __name__ == "__main__": @@ -18,8 +19,9 @@ app = QApplication(sys.argv) apply_style(app) - # Initialize app context (this will load camera SDK) + # Initialize app context (this will load camera SDK and config) ctx = get_app_context() + info("Forge application starting") win = MainWindow() win.show() @@ -27,6 +29,7 @@ exit_code = app.exec() # Cleanup + info("Forge application shutting down") ctx.cleanup() sys.exit(exit_code) From b45d7c6949b243d86db3128ace4c3771962f5d64 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Thu, 29 Jan 2026 03:34:38 -0900 Subject: [PATCH 16/46] Added Toasts and Camera Controls --- UI/main_window.py | 19 +- UI/widgets/camera_controls_widget.py | 264 +++++++- UI/widgets/toast_widget.py | 385 +++++++++++ app_context.py | 18 + camera/amscope_camera.py | 971 ++++++++++++++++++++++++++- camera/base_camera.py | 324 ++++++++- camera/camera_settings.py | 288 ++++++-- forgeConfig.py | 2 +- 8 files changed, 2165 insertions(+), 106 deletions(-) create mode 100644 UI/widgets/toast_widget.py diff --git a/UI/main_window.py b/UI/main_window.py index 3ff9314..4bd789a 100644 --- a/UI/main_window.py +++ b/UI/main_window.py @@ -26,14 +26,22 @@ class MainWindow(QMainWindow): def __init__(self) -> None: super().__init__() - self.setWindowTitle("Forge") - self.resize(1920, 1080) - - self._state = State() # Get app context self.app_context = get_app_context() + # Register this main window with app context (initializes toast manager) + self.app_context.register_main_window(self) + + # Set window title with version from config + version = "Unknown" + if self.app_context.settings: + version = self.app_context.settings.version + self.setWindowTitle(f"Forge - v{version}") + self.resize(1920, 1080) + + self._state = State() + # Create and register settings dialog self.settings_dialog = SettingsDialog(self) self.app_context.register_settings_dialog(self.settings_dialog) @@ -54,6 +62,7 @@ def __init__(self) -> None: def resizeEvent(self, event) -> None: super().resizeEvent(event) + # Toast manager now tracks moves/resizes via event filter def _setup_header_right(self) -> None: header_edge = QWidget() @@ -146,4 +155,4 @@ def closeEvent(self, event): ctx = get_app_context() ctx.cleanup() - super().closeEvent(event) + super().closeEvent(event) \ No newline at end of file diff --git a/UI/widgets/camera_controls_widget.py b/UI/widgets/camera_controls_widget.py index 1880d87..1c0d568 100644 --- a/UI/widgets/camera_controls_widget.py +++ b/UI/widgets/camera_controls_widget.py @@ -1,11 +1,265 @@ from __future__ import annotations -from PySide6.QtWidgets import QWidget +from pathlib import Path +from datetime import datetime +from PySide6.QtWidgets import ( + QWidget, QVBoxLayout, QHBoxLayout, QGroupBox, + QPushButton, QLineEdit, QLabel, QFileDialog, QMessageBox, QComboBox +) +from PySide6.QtCore import Qt +from logger import info, error, warning +from app_context import get_app_context + class CameraControlsWidget(QWidget): + """ + Widget for camera controls including photo capture and file management. + """ + def __init__(self, parent: QWidget | None = None): super().__init__(parent) - - - - + + # Default values + self._default_folder = Path("./output") + self._current_folder = self._default_folder + + # Supported image formats + self._image_formats = { + "TIFF": ".tiff", + "JPEG": ".jpg", + "PNG": ".png" + } + + # Ensure output folder exists + self._ensure_output_folder() + + # Setup UI + self._setup_ui() + + def _setup_ui(self): + """Setup the user interface""" + layout = QVBoxLayout(self) + layout.setContentsMargins(10, 10, 10, 10) + layout.setSpacing(10) + + # Photo capture group + capture_group = self._create_capture_group() + layout.addWidget(capture_group) + + layout.addStretch() + + def _create_capture_group(self) -> QGroupBox: + """Create the photo capture control group""" + group = QGroupBox("Photo Capture") + layout = QVBoxLayout(group) + + # Folder selection row + folder_layout = QHBoxLayout() + folder_label = QLabel("Output Folder:") + folder_label.setMinimumWidth(100) + + self._folder_edit = QLineEdit() + self._folder_edit.setText(str(self._current_folder)) + self._folder_edit.setPlaceholderText("Select output folder...") + + self._browse_button = QPushButton("Browse...") + self._browse_button.clicked.connect(self._browse_folder) + + folder_layout.addWidget(folder_label) + folder_layout.addWidget(self._folder_edit, 1) + folder_layout.addWidget(self._browse_button) + + # Filename row + filename_layout = QHBoxLayout() + filename_label = QLabel("Filename:") + filename_label.setMinimumWidth(100) + + self._filename_edit = QLineEdit() + self._filename_edit.setPlaceholderText("Leave empty for auto-generated name") + + filename_layout.addWidget(filename_label) + filename_layout.addWidget(self._filename_edit, 1) + + # Image format row + format_layout = QHBoxLayout() + format_label = QLabel("Image Format:") + format_label.setMinimumWidth(100) + + self._format_combo = QComboBox() + self._format_combo.addItems(self._image_formats.keys()) + self._format_combo.setCurrentText("TIFF") # Default to TIFF + + self._open_folder_button = QPushButton("Browse Output") + self._open_folder_button.clicked.connect(self._open_folder) + + format_layout.addWidget(format_label) + format_layout.addWidget(self._format_combo) + format_layout.addWidget(self._open_folder_button) + format_layout.addStretch() + + # Capture button row + buttons_layout = QHBoxLayout() + + self._capture_button = QPushButton("Take Photo") + self._capture_button.setMinimumHeight(40) + self._capture_button.clicked.connect(self._take_photo) + + buttons_layout.addWidget(self._capture_button) + + # Add all to group layout + layout.addLayout(folder_layout) + layout.addLayout(filename_layout) + layout.addLayout(format_layout) + layout.addLayout(buttons_layout) + + return group + + def _ensure_output_folder(self): + """Ensure the output folder exists""" + try: + self._current_folder.mkdir(parents=True, exist_ok=True) + info(f"Output folder ready: {self._current_folder}") + except Exception as e: + error(f"Failed to create output folder: {e}") + # Show toast for error + ctx = get_app_context() + if ctx.toast: + ctx.toast.error(f"{str(e)}", title="Folder Creation Failed") + + def _browse_folder(self): + """Open folder selection dialog""" + folder = QFileDialog.getExistingDirectory( + self, + "Select Output Folder", + str(self._current_folder), + QFileDialog.Option.ShowDirsOnly + ) + + if folder: + self._current_folder = Path(folder) + self._folder_edit.setText(str(self._current_folder)) + self._ensure_output_folder() + info(f"Output folder changed to: {self._current_folder}") + + # Show toast notification + ctx = get_app_context() + if ctx.toast: + ctx.toast.success(f"{self._current_folder.name}", title="Output Folder Changed") + + def _open_folder(self): + """Open the output folder in the system file manager""" + import subprocess + import sys + + ctx = get_app_context() + toast = ctx.toast + + try: + folder_path = str(self._current_folder.resolve()) + + if sys.platform == 'win32': + # Windows + subprocess.run(['explorer', folder_path]) + elif sys.platform == 'darwin': + # macOS + subprocess.run(['open', folder_path]) + else: + # Linux + subprocess.run(['xdg-open', folder_path]) + + info(f"Opened folder: {folder_path}") + if toast: + toast.info("Opening in file explorer...", title="Opening Folder", duration=10000) + except Exception as e: + error(f"Failed to open folder: {e}") + if toast: + toast.error(f"{str(e)}", title="Failed to Open Folder") + QMessageBox.warning( + self, + "Error", + f"Could not open folder: {e}" + ) + + def _generate_filename(self) -> str: + """Generate a filename based on current timestamp and selected format""" + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + format_name = self._format_combo.currentText() + extension = self._image_formats[format_name] + return f"image_{timestamp}{extension}" + + def _get_filepath(self) -> Path: + """Get the complete filepath for saving""" + # Get selected format + format_name = self._format_combo.currentText() + extension = self._image_formats[format_name] + + # Use custom filename if provided, otherwise generate one + filename = self._filename_edit.text().strip() + if not filename: + filename = self._generate_filename() + else: + # Remove any existing extension + filename_path = Path(filename) + filename_base = filename_path.stem + + # Add the selected extension + filename = f"{filename_base}{extension}" + + return self._current_folder / filename + + def _take_photo(self): + """Capture a still photo from the camera""" + ctx = get_app_context() + toast = ctx.toast + + try: + # Get camera from app context + camera = ctx.camera + + if camera is None: + warning("Attempted to capture photo but camera is not available") + if toast: + toast.warning("Camera not available", title="Camera Error") + return + + if not camera.is_open: + warning("Attempted to capture photo but camera is not open") + if toast: + toast.warning("Please open the camera first", title="Camera Not Open") + return + + # Get filepath + filepath = self._get_filepath() + + # Ensure folder exists + self._ensure_output_folder() + + info(f"Capturing photo to: {filepath}") + if toast: + toast.info("Please wait while the image is captured...", title="Capturing Image") + + # Capture still image (highest resolution) + success = camera.capture_and_save_still( + filepath=filepath, + resolution_index=0, # Highest resolution + additional_metadata={"timestamp": datetime.now().isoformat()}, + timeout_ms=5000 + ) + + if success: + info(f"Photo saved successfully: {filepath}") + if toast: + toast.success(f"Saved to: {filepath.name}", title="Image Captured", duration=10000) + # Clear custom filename after successful capture + self._filename_edit.clear() + else: + error(f"Failed to capture photo to: {filepath}") + if toast: + toast.error("Unable to capture image from camera", title="Capture Failed") + + except Exception as e: + error(f"Error capturing photo: {e}") + if toast: + toast.error(f"{str(e)}", title="Capture Error") + import traceback + error(traceback.format_exc()) \ No newline at end of file diff --git a/UI/widgets/toast_widget.py b/UI/widgets/toast_widget.py new file mode 100644 index 0000000..1ef03a8 --- /dev/null +++ b/UI/widgets/toast_widget.py @@ -0,0 +1,385 @@ +""" +Toast notification widget for Forge microscope application. + +Provides temporary, color-coded notifications that stack and auto-dismiss. +Integrates with the logging system for consistent message handling. +""" + +from PySide6.QtWidgets import QWidget, QLabel, QVBoxLayout, QHBoxLayout, QFrame, QPushButton, QProgressBar +from PySide6.QtCore import Qt, QTimer, QPropertyAnimation, QEasingCurve, Property, QElapsedTimer +from PySide6.QtGui import QFont +from enum import Enum +from typing import Optional + + +class ToastType(Enum): + """Toast notification types with associated colors.""" + INFO = "info" + SUCCESS = "success" + WARNING = "warning" + ERROR = "error" + + +class Toast(QFrame): + """Individual toast notification widget.""" + + # Color schemes for each toast type (background, border, progress bar) + COLORS = { + ToastType.INFO: ("#E3F2FD", "#1976D2", "#1976D2"), + ToastType.SUCCESS: ("#E8F5E9", "#388E3C", "#388E3C"), + ToastType.WARNING: ("#FFF3E0", "#F57C00", "#F57C00"), + ToastType.ERROR: ("#FFEBEE", "#D32F2F", "#D32F2F"), + } + + # Titles for each toast type + TITLES = { + ToastType.INFO: "Information", + ToastType.SUCCESS: "Success", + ToastType.WARNING: "Warning", + ToastType.ERROR: "Error", + } + + def __init__(self, message: str, toast_type: ToastType = ToastType.INFO, + duration: int = 3000, title: str = None, parent: Optional[QWidget] = None): + """ + Initialize a toast notification. + + Args: + message: Description text to display + toast_type: Type of toast (INFO, SUCCESS, WARNING, ERROR) + duration: Duration in milliseconds before auto-dismiss (0 = no auto-dismiss) + title: Optional custom title (defaults to toast type name) + parent: Parent widget + """ + super().__init__(parent) + self.message = message + self.toast_type = toast_type + self.duration = duration + self.title = title if title is not None else self.TITLES[toast_type] + self._opacity = 1.0 + self._progress_value = 100 + self._start_time = None + + self._setup_ui() + self._setup_animations() + self._setup_progress_timer() + + # Auto-dismiss timer + if duration > 0: + QTimer.singleShot(duration, self.dismiss) + + def _setup_ui(self): + """Setup the toast UI with appropriate styling.""" + self.setFrameShape(QFrame.Shape.StyledPanel) + self.setFrameShadow(QFrame.Shadow.Raised) + + # Get colors for this toast type + bg_color, border_color, progress_color = self.COLORS[self.toast_type] + + # Apply styling - no padding so progress bar can span full width + self.setStyleSheet(f""" + Toast {{ + background-color: {bg_color}; + border: 1px solid {border_color}; + border-radius: 0px; + padding: 0px; + }} + """) + + # Main layout - no margins so progress bar spans full width + layout = QVBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.setSpacing(0) + + # Content container (with padding for text) + content_widget = QWidget() + content_widget.setStyleSheet("background: transparent; border: none;") + content_layout = QVBoxLayout(content_widget) + content_layout.setContentsMargins(12, 10, 12, 10) # Increased from 8,6,8,6 + content_layout.setSpacing(6) # Increased from 4 + + # Header row: Title and Close button + header_layout = QHBoxLayout() + header_layout.setSpacing(6) + + # Title label - no color styling + self.title_label = QLabel(self.title) + self.title_label.setStyleSheet("background: transparent; border: none; font-weight: bold;") + self.title_label.setFont(QFont("Segoe UI", 10, QFont.Weight.Bold)) + header_layout.addWidget(self.title_label, 1) + + # Close button - no hover effect + self.close_button = QPushButton("×") + self.close_button.setStyleSheet(f""" + QPushButton {{ + background: transparent; + border: none; + font-size: 18px; + font-weight: bold; + padding: 0px; + margin: 0px; + }} + """) + self.close_button.setFixedSize(18, 18) + self.close_button.setCursor(Qt.CursorShape.PointingHandCursor) + self.close_button.clicked.connect(self.dismiss) + header_layout.addWidget(self.close_button) + + content_layout.addLayout(header_layout) + + # Message/Description label - no color styling + self.message_label = QLabel(self.message) + self.message_label.setStyleSheet("background: transparent; border: none;") + self.message_label.setWordWrap(True) + self.message_label.setFont(QFont("Segoe UI", 9)) + content_layout.addWidget(self.message_label) + + # Add content widget to main layout + layout.addWidget(content_widget) + + # Progress bar at the very bottom edge - spans full width + self.progress_bar = QProgressBar() + self.progress_bar.setRange(0, 100) + self.progress_bar.setValue(100) + self.progress_bar.setTextVisible(False) + self.progress_bar.setFixedHeight(6) + self.progress_bar.setStyleSheet(f""" + QProgressBar {{ + background-color: rgba(0, 0, 0, 0.1); + border: none; + border-radius: 0px; + margin: 0px; + }} + QProgressBar::chunk {{ + background-color: {progress_color}; + border-radius: 0px; + }} + """) + layout.addWidget(self.progress_bar) + + # Set size constraints + self.setMinimumWidth(260) + self.setMaximumWidth(350) + self.adjustSize() + + def _setup_progress_timer(self): + """Setup timer to update progress bar.""" + if self.duration > 0: + # Use QElapsedTimer for precise timing + self.elapsed_timer = QElapsedTimer() + self.elapsed_timer.start() + + # Update progress every 16ms for smooth 60fps animation + self.progress_timer = QTimer(self) + self.progress_timer.timeout.connect(self._update_progress) + self.progress_timer.start(16) + else: + # No duration, hide progress bar + self.progress_bar.hide() + + def _update_progress(self): + """Update the progress bar based on elapsed time.""" + elapsed_ms = self.elapsed_timer.elapsed() + + if elapsed_ms >= self.duration: + # Ensure we end at exactly 0% + self.progress_bar.setValue(0) + self.progress_timer.stop() + else: + # Calculate remaining percentage + remaining_percent = int(((self.duration - elapsed_ms) / self.duration) * 100) + self.progress_bar.setValue(remaining_percent) + + def _setup_animations(self): + """Setup fade in/out animations.""" + # Fade in animation + self.fade_in_animation = QPropertyAnimation(self, b"opacity") + self.fade_in_animation.setDuration(200) + self.fade_in_animation.setStartValue(0.0) + self.fade_in_animation.setEndValue(1.0) + self.fade_in_animation.setEasingCurve(QEasingCurve.Type.OutCubic) + + # Fade out animation + self.fade_out_animation = QPropertyAnimation(self, b"opacity") + self.fade_out_animation.setDuration(200) + self.fade_out_animation.setStartValue(1.0) + self.fade_out_animation.setEndValue(0.0) + self.fade_out_animation.setEasingCurve(QEasingCurve.Type.InCubic) + self.fade_out_animation.finished.connect(self._on_fade_out_finished) + + def show_animated(self): + """Show the toast with fade-in animation.""" + self.show() + self.fade_in_animation.start() + + def dismiss(self): + """Dismiss the toast with fade-out animation.""" + # Stop progress timer if it exists + if hasattr(self, 'progress_timer') and self.progress_timer.isActive(): + self.progress_timer.stop() + self.fade_out_animation.start() + + def _on_fade_out_finished(self): + """Called when fade-out animation completes.""" + self.hide() + self.deleteLater() + + def _get_opacity(self): + """Get current opacity value.""" + return self._opacity + + def _set_opacity(self, value): + """Set opacity value and update window opacity.""" + self._opacity = value + self.setWindowOpacity(value) + + opacity = Property(float, _get_opacity, _set_opacity) + + +class ToastManager(QWidget): + """ + Manages multiple toast notifications in a stack. + + Toasts appear in the bottom-right corner and stack vertically upward. + """ + + def __init__(self, parent: Optional[QWidget] = None): + """ + Initialize the toast manager. + + Args: + parent: Parent widget (typically the main window) + """ + super().__init__(parent) + self.parent_widget = parent + self.toasts = [] + + # Setup container + self.setWindowFlags(Qt.WindowType.FramelessWindowHint | + Qt.WindowType.Tool) + self.setAttribute(Qt.WidgetAttribute.WA_TranslucentBackground) + self.setAttribute(Qt.WidgetAttribute.WA_TransparentForMouseEvents, False) # Allow mouse events + + # Layout for stacking toasts (bottom to top) - reduced spacing + self.layout = QVBoxLayout(self) + self.layout.setContentsMargins(0, 0, 0, 0) + self.layout.setSpacing(6) # Reduced from 10 to 6 + self.layout.setAlignment(Qt.AlignmentFlag.AlignBottom) # Align to bottom + + # Install event filter on parent to track moves + if self.parent_widget: + self.parent_widget.installEventFilter(self) + + # Position and show + self._update_position() + self.show() + + def eventFilter(self, obj, event): + """Track parent window moves to reposition toasts.""" + if obj == self.parent_widget: + # Update position on move or resize + if event.type() in (event.Type.Move, event.Type.Resize): + self._update_position() + return super().eventFilter(obj, event) + + def _update_position(self): + """Update position to bottom-right corner of parent.""" + if self.parent_widget: + # Get the parent widget's geometry in global screen coordinates + parent_global_rect = self.parent_widget.geometry() + parent_pos = self.parent_widget.pos() + + # For QMainWindow, we need to get the actual screen position + if hasattr(self.parent_widget, 'frameGeometry'): + parent_global_rect = self.parent_widget.frameGeometry() + parent_pos = parent_global_rect.topLeft() + else: + # Map parent position to global coordinates + parent_pos = self.parent_widget.mapToGlobal(parent_pos) + + margin = 10 # Reduced from 20 to 10 + toast_width = 350 # Reduced from 420 to 350 + toast_container_height = 600 # Max height for toast container + + # Calculate position for bottom-right corner + x = parent_pos.x() + parent_global_rect.width() - toast_width - margin + y = parent_pos.y() + parent_global_rect.height() - toast_container_height - margin + + self.setGeometry( + x, + y, + toast_width, + toast_container_height + ) + + def show_toast(self, message: str, toast_type: ToastType = ToastType.INFO, + duration: int = 3000, title: str = None): + """ + Show a new toast notification. + + Args: + message: Description text to display + toast_type: Type of toast (INFO, SUCCESS, WARNING, ERROR) + duration: Duration in milliseconds before auto-dismiss + title: Optional custom title (defaults to toast type name) + """ + # Create new toast + toast = Toast(message, toast_type, duration, title, self) + + # Add to layout and list + self.layout.addWidget(toast) + self.toasts.append(toast) + + # Show with animation + toast.show_animated() + + # Connect deletion signal to cleanup + toast.destroyed.connect(lambda: self._remove_toast(toast)) + + # Update position + self._update_position() + + def _remove_toast(self, toast: Toast): + """Remove toast from tracking list.""" + if toast in self.toasts: + self.toasts.remove(toast) + + def info(self, message: str, duration: int = 3000, title: str = None): + """Show an info toast.""" + self.show_toast(message, ToastType.INFO, duration, title) + + def success(self, message: str, duration: int = 3000, title: str = None): + """Show a success toast.""" + self.show_toast(message, ToastType.SUCCESS, duration, title) + + def warning(self, message: str, duration: int = 4000, title: str = None): + """Show a warning toast (slightly longer duration).""" + self.show_toast(message, ToastType.WARNING, duration, title) + + def error(self, message: str, duration: int = 5000, title: str = None): + """Show an error toast (longer duration).""" + self.show_toast(message, ToastType.ERROR, duration, title) + + def clear_all(self): + """Dismiss all active toasts.""" + for toast in self.toasts[:]: # Copy list to avoid modification during iteration + toast.dismiss() + + +# Convenience function for standalone usage +def show_toast(parent: QWidget, message: str, toast_type: ToastType = ToastType.INFO, + duration: int = 3000, title: str = None): + """ + Show a toast notification (convenience function). + + Args: + parent: Parent widget + message: Description text to display + toast_type: Type of toast + duration: Duration in milliseconds + title: Optional custom title + """ + if not hasattr(parent, '_toast_manager'): + parent._toast_manager = ToastManager(parent) + + parent._toast_manager.show_toast(message, toast_type, duration, title) \ No newline at end of file diff --git a/app_context.py b/app_context.py index 89cf0ab..50ecc6c 100644 --- a/app_context.py +++ b/app_context.py @@ -11,6 +11,7 @@ if TYPE_CHECKING: from UI.settings.settings_main import SettingsDialog + from UI.widgets.toast_widget import ToastManager class AppContext: @@ -34,6 +35,8 @@ def __init__(self): self._settings_dialog: Optional['SettingsDialog'] = None self._settings_manager: Optional[ForgeSettingsManager] = None self._settings: Optional[ForgeSettings] = None + self._toast_manager: Optional['ToastManager'] = None + self._main_window = None self._initialized = True # Load settings @@ -56,6 +59,19 @@ def settings_dialog(self) -> Optional['SettingsDialog']: """Get the settings dialog instance""" return self._settings_dialog + @property + def toast(self) -> Optional['ToastManager']: + """Get the toast manager instance""" + return self._toast_manager + + def register_main_window(self, window): + """Register the main window instance""" + self._main_window = window + # Initialize toast manager when main window is registered + if self._toast_manager is None: + from UI.widgets.toast_widget import ToastManager + self._toast_manager = ToastManager(window) + def register_settings_dialog(self, dialog: 'SettingsDialog'): """Register the settings dialog instance""" self._settings_dialog = dialog @@ -118,6 +134,8 @@ def cleanup(self): self._settings_dialog = None self._settings_manager = None self._settings = None + self._toast_manager = None + self._main_window = None # Global instance accessor diff --git a/camera/amscope_camera.py b/camera/amscope_camera.py index c4feb77..f8b7aa8 100644 --- a/camera/amscope_camera.py +++ b/camera/amscope_camera.py @@ -2,8 +2,13 @@ Amscope camera implementation using the amcam SDK. """ -from typing import Tuple, Callable, Any, Optional, TYPE_CHECKING +from typing import Tuple, Callable, Any, Optional, Dict, TYPE_CHECKING +from types import SimpleNamespace from pathlib import Path +import ctypes +import numpy as np +import threading +import gc from camera.base_camera import BaseCamera, CameraResolution, CameraInfo from logger import get_logger @@ -103,7 +108,6 @@ def get_event_constants(cls): Returns: SimpleNamespace with event constants """ - from types import SimpleNamespace amcam = cls._get_sdk_static() return SimpleNamespace( IMAGE=amcam.AMCAM_EVENT_IMAGE, @@ -175,6 +179,14 @@ def start_capture(self, callback: Callable, context: Any) -> bool: amcam = self._get_sdk() try: + # Get current resolution to allocate frame buffer + res_index, width, height = self.get_current_resolution() + + # Create persistent frame buffer (like manufacturer's self.pData) + # This will be continuously updated by the event callback + buffer_size = amcam.TDIBWIDTHBYTES(width * 24) * height + self._frame_buffer = bytearray(buffer_size) # Use bytearray so it's mutable + self._callback = callback self._callback_context = context self._hcam.StartPullModeWithCallback(self._event_callback_wrapper, self) @@ -191,16 +203,33 @@ def stop_capture(self): except self._get_sdk().HRESULTException: pass - def pull_image(self, buffer: bytes, bits_per_pixel: int = 24) -> bool: - """Pull the latest image into buffer""" + def pull_image(self, buffer: ctypes.Array, bits_per_pixel: int = 24, timeout_ms: int = 1000) -> bool: + """ + Pull the latest image into buffer (expects ctypes.create_string_buffer) + + Args: + buffer: ctypes buffer to receive image data + bits_per_pixel: Bits per pixel (typically 24) + timeout_ms: Timeout in milliseconds to wait for frame (default 1000ms) + + Returns: + True if successful, False otherwise + """ if not self._hcam: + logger = get_logger() + logger.error("Cannot pull image: camera handle is None") return False amcam = self._get_sdk() try: - self._hcam.PullImageV4(buffer, 0, bits_per_pixel, 0, None) + # Use WaitImageV4 to wait for a frame (bStill=0 for video stream) + # This is more reliable than PullImageV2 which may fail if no frame is ready + self._hcam.WaitImageV4(timeout_ms, buffer, 0, bits_per_pixel, 0, None) return True - except self._get_sdk().HRESULTException: + except self._get_sdk().HRESULTException as e: + # If timeout or no frame available, log the error + logger = get_logger() + logger.error(f"Failed to pull image: {e}") return False def snap_image(self, resolution_index: int = 0) -> bool: @@ -215,12 +244,12 @@ def snap_image(self, resolution_index: int = 0) -> bool: except self._get_sdk().HRESULTException: return False - def pull_still_image(self, buffer: bytes, bits_per_pixel: int = 24) -> Tuple[bool, int, int]: + def pull_still_image(self, buffer: ctypes.Array, bits_per_pixel: int = 24) -> Tuple[bool, int, int]: """ Pull a still image into buffer Args: - buffer: Buffer to receive image data (should be large enough) + buffer: Buffer to receive image data (ctypes.create_string_buffer, should be large enough) bits_per_pixel: Bits per pixel (typically 24) Returns: @@ -230,15 +259,12 @@ def pull_still_image(self, buffer: bytes, bits_per_pixel: int = 24) -> Tuple[boo return False, 0, 0 amcam = self._get_sdk() - info = amcam.AmcamFrameInfoV3() - try: - # First peek to get dimensions - self._hcam.PullImageV3(None, 1, bits_per_pixel, 0, info) - if info.width > 0 and info.height > 0: - # Then pull the actual image - self._hcam.PullImageV3(buffer, 1, bits_per_pixel, 0, info) - return True, info.width, info.height - return False, 0, 0 + try: + # Get still resolution to return dimensions + w, h = self._hcam.get_StillResolution(0) + # Use PullStillImageV2 which works with ctypes.create_string_buffer + self._hcam.PullStillImageV2(buffer, bits_per_pixel, None) + return True, w, h except self._get_sdk().HRESULTException: return False, 0, 0 @@ -414,6 +440,515 @@ def auto_white_balance(self) -> bool: except self._get_sdk().HRESULTException: return False + # ======================================================================== + # Image Processing Parameters + # ======================================================================== + + def get_hue(self) -> int: + """ + Get hue value. + + Returns: + Hue value in range [-180, 180] + + Raises: + RuntimeError: If camera is not initialized + """ + if not self._hcam: + raise RuntimeError("Camera not initialized") + + try: + return self._hcam.get_Hue() + except self._get_sdk().HRESULTException as e: + raise RuntimeError(f"Failed to get hue: {e}") from e + + def set_hue(self, hue: int) -> bool: + """ + Set hue value. + + Args: + hue: Hue value in range [-180, 180] + + Returns: + True if successful, False otherwise + """ + if not self._hcam: + return False + + try: + self._hcam.put_Hue(hue) + return True + except self._get_sdk().HRESULTException: + return False + + def get_saturation(self) -> int: + """ + Get saturation value. + + Returns: + Saturation value in range [0, 255] + + Raises: + RuntimeError: If camera is not initialized + """ + if not self._hcam: + raise RuntimeError("Camera not initialized") + + try: + return self._hcam.get_Saturation() + except self._get_sdk().HRESULTException as e: + raise RuntimeError(f"Failed to get saturation: {e}") from e + + def set_saturation(self, saturation: int) -> bool: + """ + Set saturation value. + + Args: + saturation: Saturation value in range [0, 255] + + Returns: + True if successful, False otherwise + """ + if not self._hcam: + return False + + try: + self._hcam.put_Saturation(saturation) + return True + except self._get_sdk().HRESULTException: + return False + + def get_brightness(self) -> int: + """ + Get brightness value. + + Returns: + Brightness value in range [-64, 64] + + Raises: + RuntimeError: If camera is not initialized + """ + if not self._hcam: + raise RuntimeError("Camera not initialized") + + try: + return self._hcam.get_Brightness() + except self._get_sdk().HRESULTException as e: + raise RuntimeError(f"Failed to get brightness: {e}") from e + + def set_brightness(self, brightness: int) -> bool: + """ + Set brightness value. + + Args: + brightness: Brightness value in range [-64, 64] + + Returns: + True if successful, False otherwise + """ + if not self._hcam: + return False + + try: + self._hcam.put_Brightness(brightness) + return True + except self._get_sdk().HRESULTException: + return False + + def get_contrast(self) -> int: + """ + Get contrast value. + + Returns: + Contrast value in range [-100, 100] + + Raises: + RuntimeError: If camera is not initialized + """ + if not self._hcam: + raise RuntimeError("Camera not initialized") + + try: + return self._hcam.get_Contrast() + except self._get_sdk().HRESULTException as e: + raise RuntimeError(f"Failed to get contrast: {e}") from e + + def set_contrast(self, contrast: int) -> bool: + """ + Set contrast value. + + Args: + contrast: Contrast value in range [-100, 100] + + Returns: + True if successful, False otherwise + """ + if not self._hcam: + return False + + try: + self._hcam.put_Contrast(contrast) + return True + except self._get_sdk().HRESULTException: + return False + + def get_gamma(self) -> int: + """ + Get gamma value. + + Returns: + Gamma value in range [20, 180] + + Raises: + RuntimeError: If camera is not initialized + """ + if not self._hcam: + raise RuntimeError("Camera not initialized") + + try: + return self._hcam.get_Gamma() + except self._get_sdk().HRESULTException as e: + raise RuntimeError(f"Failed to get gamma: {e}") from e + + def set_gamma(self, gamma: int) -> bool: + """ + Set gamma value. + + Args: + gamma: Gamma value in range [20, 180] + + Returns: + True if successful, False otherwise + """ + if not self._hcam: + return False + + try: + self._hcam.put_Gamma(gamma) + return True + except self._get_sdk().HRESULTException: + return False + + def get_auto_exposure_target(self) -> int: + """ + Get auto exposure target brightness. + + Returns: + Auto exposure target in range [16, 235] + + Raises: + RuntimeError: If camera is not initialized + """ + if not self._hcam: + raise RuntimeError("Camera not initialized") + + try: + return self._hcam.get_AutoExpoTarget() + except self._get_sdk().HRESULTException as e: + raise RuntimeError(f"Failed to get auto exposure target: {e}") from e + + def set_auto_exposure_target(self, target: int) -> bool: + """ + Set auto exposure target brightness. + + Args: + target: Auto exposure target in range [16, 235] + + Returns: + True if successful, False otherwise + """ + if not self._hcam: + return False + + try: + self._hcam.put_AutoExpoTarget(target) + return True + except self._get_sdk().HRESULTException: + return False + + def get_white_balance_gain(self) -> Tuple[int, int, int]: + """ + Get RGB white balance gain values. + + Returns: + Tuple of (R, G, B) gain values in range [-127, 127] + + Raises: + RuntimeError: If camera is not initialized or not supported + + Note: + Only works in RGB Gain mode. + """ + if not self._hcam: + raise RuntimeError("Camera not initialized") + + try: + return self._hcam.get_WhiteBalanceGain() + except self._get_sdk().HRESULTException as e: + raise RuntimeError(f"Failed to get white balance gain (may not be supported in Temp/Tint mode): {e}") from e + + def set_white_balance_gain(self, r: int, g: int, b: int) -> bool: + """ + Set RGB white balance gain values. + + Args: + r: Red gain in range [-127, 127] + g: Green gain in range [-127, 127] + b: Blue gain in range [-127, 127] + + Returns: + True if successful, False otherwise + + Note: + Only works in RGB Gain mode. + """ + if not self._hcam: + return False + + try: + self._hcam.put_WhiteBalanceGain([r, g, b]) + return True + except self._get_sdk().HRESULTException: + return False + + def get_level_range(self) -> Tuple[Tuple[int, int, int, int], Tuple[int, int, int, int]]: + """ + Get level range (low and high) for RGBA channels. + + Returns: + Tuple of ((R_low, G_low, B_low, A_low), (R_high, G_high, B_high, A_high)) + Each value in range [0, 255] + + Raises: + RuntimeError: If camera is not initialized + """ + if not self._hcam: + raise RuntimeError("Camera not initialized") + + try: + low, high = self._hcam.get_LevelRange() + return (tuple(low), tuple(high)) + except self._get_sdk().HRESULTException as e: + raise RuntimeError(f"Failed to get level range: {e}") from e + + def set_level_range( + self, + low: Tuple[int, int, int, int], + high: Tuple[int, int, int, int] + ) -> bool: + """ + Set level range (low and high) for RGBA channels. + + Args: + low: Tuple of (R_low, G_low, B_low, A_low), each in range [0, 255] + high: Tuple of (R_high, G_high, B_high, A_high), each in range [0, 255] + + Returns: + True if successful, False otherwise + """ + if not self._hcam: + return False + + try: + self._hcam.put_LevelRange(list(low), list(high)) + return True + except self._get_sdk().HRESULTException: + return False + + def auto_level_range(self) -> bool: + """ + Perform automatic level range adjustment. + + Returns: + True if successful, False otherwise + """ + if not self._hcam: + return False + + try: + self._hcam.LevelRangeAuto() + return True + except self._get_sdk().HRESULTException: + return False + + def get_option(self, option: int) -> int: + """ + Get a camera option value. + + Args: + option: Option ID (use AMCAM_OPTION_* constants) + + Returns: + Option value + + Raises: + RuntimeError: If camera is not initialized + """ + if not self._hcam: + raise RuntimeError("Camera not initialized") + + try: + return self._hcam.get_Option(option) + except self._get_sdk().HRESULTException as e: + raise RuntimeError(f"Failed to get option {option}: {e}") from e + + def set_option(self, option: int, value: int) -> bool: + """ + Set a camera option value. + + Args: + option: Option ID (use AMCAM_OPTION_* constants) + value: Value to set + + Returns: + True if successful, False otherwise + """ + if not self._hcam: + return False + + try: + self._hcam.put_Option(option, value) + return True + except self._get_sdk().HRESULTException: + return False + + def get_sharpening(self) -> Tuple[int, int, int]: + """ + Get sharpening parameters. + + Returns: + Tuple of (strength, radius, threshold): + - strength: [0, 500], 0 = disabled + - radius: [1, 10] + - threshold: [0, 255] + + Raises: + RuntimeError: If camera is not initialized + """ + amcam = self._get_sdk() + if not self._hcam: + raise RuntimeError("Camera not initialized") + + try: + # Get sharpening option value + val = self._hcam.get_Option(amcam.AMCAM_OPTION_SHARPENING) + + # Extract components: (threshold << 24) | (radius << 16) | strength + strength = val & 0xFFFF + radius = (val >> 16) & 0xFF + threshold = (val >> 24) & 0xFF + + return (strength, radius, threshold) + except self._get_sdk().HRESULTException as e: + raise RuntimeError(f"Failed to get sharpening: {e}") from e + + def set_sharpening(self, strength: int, radius: int = 2, threshold: int = 0) -> bool: + """ + Set sharpening parameters. + + Args: + strength: Sharpening strength [0, 500], 0 = disabled + radius: Sharpening radius [1, 10], default 2 + threshold: Sharpening threshold [0, 255], default 0 + + Returns: + True if successful, False otherwise + """ + amcam = self._get_sdk() + if not self._hcam: + return False + + try: + # Combine into single value: (threshold << 24) | (radius << 16) | strength + val = (threshold << 24) | (radius << 16) | strength + self._hcam.put_Option(amcam.AMCAM_OPTION_SHARPENING, val) + return True + except self._get_sdk().HRESULTException: + return False + + def get_linear_tone_mapping(self) -> bool: + """ + Get linear tone mapping state. + + Returns: + True if enabled, False if disabled + + Raises: + RuntimeError: If camera is not initialized + """ + amcam = self._get_sdk() + if not self._hcam: + raise RuntimeError("Camera not initialized") + + try: + val = self._hcam.get_Option(amcam.AMCAM_OPTION_LINEAR) + return val == 1 + except self._get_sdk().HRESULTException as e: + raise RuntimeError(f"Failed to get linear tone mapping: {e}") from e + + def set_linear_tone_mapping(self, enabled: bool) -> bool: + """ + Set linear tone mapping on/off. + + Args: + enabled: True to enable, False to disable + + Returns: + True if successful, False otherwise + """ + amcam = self._get_sdk() + if not self._hcam: + return False + + try: + self._hcam.put_Option(amcam.AMCAM_OPTION_LINEAR, 1 if enabled else 0) + return True + except self._get_sdk().HRESULTException: + return False + + def get_curve_tone_mapping(self) -> int: + """ + Get curve tone mapping setting. + + Returns: + 0 = off, 1 = polynomial, 2 = logarithmic + + Raises: + RuntimeError: If camera is not initialized + """ + amcam = self._get_sdk() + if not self._hcam: + raise RuntimeError("Camera not initialized") + + try: + return self._hcam.get_Option(amcam.AMCAM_OPTION_CURVE) + except self._get_sdk().HRESULTException as e: + raise RuntimeError(f"Failed to get curve tone mapping: {e}") from e + + def set_curve_tone_mapping(self, curve_type: int) -> bool: + """ + Set curve tone mapping. + + Args: + curve_type: 0 = off, 1 = polynomial, 2 = logarithmic + + Returns: + True if successful, False otherwise + """ + amcam = self._get_sdk() + if not self._hcam: + return False + + try: + self._hcam.put_Option(amcam.AMCAM_OPTION_CURVE, curve_type) + return True + except self._get_sdk().HRESULTException: + return False + + # ======================================================================== + # End of Image Processing Parameters + # ======================================================================== + def get_frame_rate(self) -> Tuple[int, int, int]: """Get frame rate info (frames_in_period, time_period_ms, total_frames)""" amcam = self._get_sdk() @@ -476,6 +1011,391 @@ def get_still_resolutions(self) -> list[CameraResolution]: resolutions.append(CameraResolution(res.width, res.height)) return resolutions + def get_camera_metadata(self) -> Dict[str, Any]: + """ + Get current camera settings as metadata + + Returns: + Dictionary containing current camera settings including: + - Camera identification (name, model, id) + - Resolution settings + - Exposure settings (time, gain, auto-exposure state) + - White balance settings (if supported) + - Image processing parameters (hue, saturation, brightness, etc.) + - Frame rate information + + Note: + If camera is not initialized or a parameter cannot be read, + that parameter will be omitted from the metadata dictionary. + """ + metadata = {} + + # Camera identification + if self._camera_info: + metadata["camera_name"] = self._camera_info.displayname + metadata["camera_id"] = self._camera_info.id + if self._camera_info.model: + metadata["model_name"] = getattr(self._camera_info.model, 'name', 'Unknown') + + # Helper function to safely get values + def safe_get(getter_func, key, default=None): + try: + return getter_func() + except (RuntimeError, Exception): + return default + + # Resolution + res_index, width, height = self.get_current_resolution() + metadata["resolution_index"] = res_index + metadata["width"] = width + metadata["height"] = height + metadata["resolution"] = f"{width}x{height}" + + # Exposure settings + metadata["exposure_time_us"] = self.get_exposure_time() + metadata["gain_percent"] = self.get_gain() + metadata["auto_exposure_enabled"] = self.get_auto_exposure() + + target = safe_get(self.get_auto_exposure_target, "auto_exposure_target") + if target is not None: + metadata["auto_exposure_target"] = target + + # Exposure range info + exp_min, exp_max, exp_def = self.get_exposure_range() + metadata["exposure_range_us"] = { + "min": exp_min, + "max": exp_max, + "default": exp_def + } + + # Gain range info + gain_min, gain_max, gain_def = self.get_gain_range() + metadata["gain_range_percent"] = { + "min": gain_min, + "max": gain_max, + "default": gain_def + } + + # White balance (if supported) + if self.supports_white_balance(): + temp, tint = self.get_white_balance() + metadata["white_balance_temperature"] = temp + metadata["white_balance_tint"] = tint + + (temp_min, temp_max), (tint_min, tint_max) = self.get_white_balance_range() + metadata["white_balance_range"] = { + "temperature": {"min": temp_min, "max": temp_max}, + "tint": {"min": tint_min, "max": tint_max} + } + + # RGB gain mode (may not work in Temp/Tint mode) + rgb_gain = safe_get(self.get_white_balance_gain, "white_balance_rgb_gain") + if rgb_gain is not None: + r_gain, g_gain, b_gain = rgb_gain + metadata["white_balance_rgb_gain"] = { + "red": r_gain, + "green": g_gain, + "blue": b_gain + } + else: + metadata["monochrome"] = True + + # Image processing parameters + hue = safe_get(self.get_hue, "hue") + if hue is not None: + metadata["hue"] = hue + + saturation = safe_get(self.get_saturation, "saturation") + if saturation is not None: + metadata["saturation"] = saturation + + brightness = safe_get(self.get_brightness, "brightness") + if brightness is not None: + metadata["brightness"] = brightness + + contrast = safe_get(self.get_contrast, "contrast") + if contrast is not None: + metadata["contrast"] = contrast + + gamma = safe_get(self.get_gamma, "gamma") + if gamma is not None: + metadata["gamma"] = gamma + + # Level range + level_range = safe_get(self.get_level_range, "level_range") + if level_range is not None: + low, high = level_range + metadata["level_range_low"] = { + "red": low[0], + "green": low[1], + "blue": low[2], + "alpha": low[3] + } + metadata["level_range_high"] = { + "red": high[0], + "green": high[1], + "blue": high[2], + "alpha": high[3] + } + + # Sharpening + sharpening = safe_get(self.get_sharpening, "sharpening") + if sharpening is not None: + strength, radius, threshold = sharpening + metadata["sharpening"] = { + "strength": strength, + "radius": radius, + "threshold": threshold + } + + # Tone mapping + linear = safe_get(self.get_linear_tone_mapping, "linear_tone_mapping") + if linear is not None: + metadata["linear_tone_mapping"] = linear + + curve = safe_get(self.get_curve_tone_mapping, "curve_tone_mapping") + if curve is not None: + curve_names = {0: "off", 1: "polynomial", 2: "logarithmic"} + metadata["curve_tone_mapping"] = curve_names.get(curve, "unknown") + metadata["curve_tone_mapping_value"] = curve + + # Frame rate + frames, period_ms, total = self.get_frame_rate() + if period_ms > 0: + metadata["frame_rate_fps"] = round(frames * 1000 / period_ms, 2) + metadata["frame_rate_info"] = { + "frames_in_period": frames, + "period_ms": period_ms, + "total_frames": total + } + + # SDK version if available + amcam = self._get_sdk() + try: + metadata["sdk_version"] = amcam.Amcam.Version() + except Exception: + metadata["sdk_version"] = "unknown" + + return metadata + + def capture_and_save_still( + self, + filepath: Path, + resolution_index: int = 0, + additional_metadata: Optional[Dict[str, Any]] = None, + timeout_ms: int = 5000 + ) -> bool: + """ + Capture a still image and save it to disk with metadata. + + This method handles the complete workflow: + 1. Triggers still image capture at specified resolution + 2. Waits for image to be ready (with timeout) + 3. Pulls image data and converts to numpy array + 4. Saves with full metadata + + Args: + filepath: Path where image should be saved + resolution_index: Resolution index for still capture (0 = highest) + additional_metadata: Optional dictionary of additional metadata to save + timeout_ms: Timeout in milliseconds to wait for capture (default 5000) + + Returns: + True if successful, False otherwise + """ + logger = get_logger() + + if not self._hcam: + logger.error("Camera not open") + return False + + if not self.supports_still_capture(): + logger.error("Camera does not support still capture") + logger.info(f"Camera model: {self._camera_info.model.name if self._camera_info else 'Unknown'}") + logger.info(f"Still resolution count: {self._camera_info.model.still if self._camera_info else 0}") + return False + + try: + # Get resolution for this still index + still_resolutions = self.get_still_resolutions() + if resolution_index >= len(still_resolutions): + logger.error(f"Invalid resolution index: {resolution_index}") + return False + + res = still_resolutions[resolution_index] + width, height = res.width, res.height + logger.debug(f"Still capture target resolution: {width}x{height}") + + # Use Python bytes instead of ctypes buffer + amcam = self._get_sdk() + buffer_size = amcam.TDIBWIDTHBYTES(width * 24) * height + pData = bytes(buffer_size) + + # Event to signal still image is ready + still_ready = threading.Event() + capture_success = {'success': False, 'width': 0, 'height': 0} + + # Store original callback + original_callback = self._callback + original_context = self._callback_context + + logger.debug(f"Original callback: {original_callback is not None}, context: {original_context is not None}") + + def still_callback(event, ctx): + logger.debug(f"Still callback received event: {event}, STILLIMAGE={self.EVENT_STILLIMAGE}, IMAGE={self.EVENT_IMAGE}") + if event == self.EVENT_STILLIMAGE: + # Pull the still image using PullImageV3 + info = amcam.AmcamFrameInfoV3() + try: + logger.debug("Attempting to pull still image...") + self._hcam.PullImageV3(pData, 1, 24, 0, info) + capture_success['success'] = True + capture_success['width'] = info.width + capture_success['height'] = info.height + logger.debug(f"Still image pulled successfully: {info.width}x{info.height}") + except Exception as e: + logger.error(f"Failed to pull still image: {e}") + capture_success['success'] = False + still_ready.set() + + # Also call original callback if it exists + if original_callback: + original_callback(event, original_context) + + # Temporarily replace callback + self._callback = still_callback + self._callback_context = None + + logger.debug("Triggering still capture...") + # Trigger still capture + if not self.snap_image(resolution_index): + logger.error("Failed to trigger still capture") + self._callback = original_callback + self._callback_context = original_context + return False + + logger.debug(f"Waiting for still image (timeout: {timeout_ms}ms)...") + # Wait for still image with timeout + if not still_ready.wait(timeout_ms / 1000.0): + logger.error(f"Still capture timed out after {timeout_ms}ms") + logger.error("STILLIMAGE event never received") + self._callback = original_callback + self._callback_context = original_context + return False + + logger.debug("Still image event received!") + + # Restore original callback + self._callback = original_callback + self._callback_context = original_context + + if not capture_success['success']: + logger.error("Failed to pull still image") + return False + + # Convert to numpy array - creates a copy + w = capture_success['width'] + h = capture_success['height'] + stride = amcam.TDIBWIDTHBYTES(w * 24) + image_data = np.frombuffer(pData, dtype=np.uint8).reshape((h, stride))[:, :w*3].reshape((h, w, 3)).copy() + + # Convert BGR to RGB + image_data = image_data[:, :, ::-1].copy() + + # Delete pData immediately + del pData + + # Save with metadata + success = self.save_image(image_data, filepath, additional_metadata) + + # Explicitly delete and force GC + del image_data + gc.collect() + + if success: + logger.info(f"Still image captured and saved: {filepath}") + else: + logger.error(f"Failed to save still image: {filepath}") + + return success + + except Exception as e: + logger.exception(f"Failed to capture and save still image: {filepath}") + return False + + def capture_and_save_stream( + self, + filepath: Path, + additional_metadata: Optional[Dict[str, Any]] = None + ) -> bool: + """ + Capture current frame from live stream and save it to disk with metadata. + + Uses the manufacturer's approach: directly saves the most recent frame + from the continuously-updated buffer (no waiting or pausing needed). + + Args: + filepath: Path where image should be saved + additional_metadata: Optional dictionary of additional metadata to save + + Returns: + True if successful, False otherwise + + Note: + Camera must be in capture mode (start_capture() must have been called) + """ + logger = get_logger() + + if not self._hcam: + logger.error("Camera not open") + return False + + if not self._is_open: + logger.error("Camera not in capture mode") + return False + + # Check if we have a frame buffer (set during start_capture) + if not hasattr(self, '_frame_buffer') or self._frame_buffer is None: + logger.error("No frame buffer available - camera may not be streaming") + return False + + try: + # Get current resolution + res_index, width, height = self.get_current_resolution() + + # Simply copy from the current frame buffer (updated continuously by callback) + # This is the manufacturer's approach - no waiting or pausing needed! + amcam = self._get_sdk() + stride = amcam.TDIBWIDTHBYTES(width * 24) + + # Create numpy array from the persistent buffer + image_data = np.frombuffer(self._frame_buffer, dtype=np.uint8).reshape((height, stride))[:, :width*3].reshape((height, width, 3)).copy() + + # Convert BGR to RGB + image_data = image_data[:, :, ::-1].copy() + + # Save with metadata + success = self.save_image(image_data, filepath, additional_metadata) + + # Explicitly delete image_data and force GC + del image_data + gc.collect() + + if success: + logger.info(f"Stream frame captured and saved: {filepath}") + else: + logger.error(f"Failed to save stream frame: {filepath}") + + return success + + except Exception as e: + logger.exception(f"Failed to capture and save stream frame: {filepath}") + return False + + except Exception as e: + logger.exception(f"Failed to capture and save stream frame: {filepath}") + return False + @staticmethod def calculate_buffer_size(width: int, height: int, bits_per_pixel: int = 24) -> int: """ @@ -527,7 +1447,18 @@ def _event_callback_wrapper(self, event: int, context: Any): """ Internal wrapper for camera events. Translates amcam events to the callback registered with start_capture. + Also updates the persistent frame buffer on IMAGE events (manufacturer's approach). """ - if self._callback and self._callback_context: - # Call the registered callback with the event - self._callback(event, self._callback_context) + # Update persistent frame buffer on IMAGE events + # This is how the manufacturer's example works - continuous buffer updates + if event == self.EVENT_IMAGE and hasattr(self, '_frame_buffer') and self._frame_buffer is not None: + try: + # Pull the latest frame into our persistent buffer + self._hcam.PullImageV4(self._frame_buffer, 0, 24, 0, None) + except: + pass # Silently ignore pull errors in callback + + # IMPORTANT: Always call the registered callback if it exists + # Don't check _callback_context because it might be None during still capture + if self._callback: + self._callback(event, self._callback_context) \ No newline at end of file diff --git a/camera/base_camera.py b/camera/base_camera.py index df89410..395ec99 100644 --- a/camera/base_camera.py +++ b/camera/base_camera.py @@ -4,9 +4,15 @@ """ from abc import ABC, abstractmethod -from typing import Optional, Tuple, Callable, Any +from typing import Optional, Tuple, Callable, Any, Dict from dataclasses import dataclass from pathlib import Path +from datetime import datetime +import numpy as np +from PIL import Image, ExifTags +from PIL.Image import Exif +from PIL.TiffImagePlugin import ImageFileDirectory_v2 +import json @dataclass @@ -346,6 +352,16 @@ def enumerate_cameras() -> list[CameraInfo]: """ pass + @abstractmethod + def get_camera_metadata(self) -> Dict[str, Any]: + """ + Get current camera settings as metadata + + Returns: + Dictionary of camera settings (exposure, gain, white balance, etc.) + """ + pass + @abstractmethod def supports_still_capture(self) -> bool: """ @@ -365,3 +381,309 @@ def get_still_resolutions(self) -> list[CameraResolution]: List of available still resolutions """ pass + + @abstractmethod + def capture_and_save_still( + self, + filepath: Path, + resolution_index: int = 0, + additional_metadata: Optional[Dict[str, Any]] = None, + timeout_ms: int = 5000 + ) -> bool: + """ + Capture a still image and save it to disk with metadata. + + This is a convenience method that handles the complete workflow: + 1. Triggers still image capture + 2. Waits for image to be ready + 3. Pulls image data + 4. Saves with metadata + + Args: + filepath: Path where image should be saved + resolution_index: Resolution index for still capture (0 = highest) + additional_metadata: Optional dictionary of additional metadata to save + timeout_ms: Timeout in milliseconds to wait for capture + + Returns: + True if successful, False otherwise + + Note: + Only works if supports_still_capture() returns True + """ + pass + + @abstractmethod + def capture_and_save_stream( + self, + filepath: Path, + additional_metadata: Optional[Dict[str, Any]] = None + ) -> bool: + """ + Capture current frame from live stream and save it to disk with metadata. + + This is a convenience method that handles the complete workflow: + 1. Pulls current frame from live stream + 2. Converts to numpy array + 3. Saves with metadata + + Args: + filepath: Path where image should be saved + additional_metadata: Optional dictionary of additional metadata to save + + Returns: + True if successful, False otherwise + + Note: + Camera must be in capture mode (start_capture() must have been called) + """ + pass + + def save_image( + self, + image_data: np.ndarray, + filepath: Path, + additional_metadata: Optional[Dict[str, Any]] = None + ) -> bool: + """ + Save image to disk with camera and optional additional metadata. + + Supports formats: TIFF, TIF, JPG, JPEG, PNG + + Args: + image_data: Image as numpy array (height, width, channels) or (height, width) + filepath: Path where image should be saved + additional_metadata: Optional dictionary of additional metadata to save + + Returns: + True if successful, False otherwise + + Note: + - TIFF/TIF: Metadata saved in TIFF tags and as JSON in ImageDescription + - JPG/JPEG: Metadata saved in EXIF UserComment as JSON + - PNG: Metadata saved in PNG text chunks + """ + pil_image = None + try: + from logger import get_logger + logger = get_logger() + + # Ensure filepath is a Path object + filepath = Path(filepath) + + # Get camera metadata + camera_metadata = self.get_camera_metadata() + + # Combine with additional metadata + full_metadata = { + "timestamp": datetime.now().isoformat(), + "camera": camera_metadata + } + + if additional_metadata: + full_metadata["additional"] = additional_metadata + + # Convert to PIL Image + if image_data.dtype != np.uint8: + # Normalize to uint8 if needed + if image_data.max() > 255: + image_data = (image_data / image_data.max() * 255).astype(np.uint8) + else: + image_data = image_data.astype(np.uint8) + + # Handle grayscale vs RGB + if len(image_data.shape) == 2: + pil_image = Image.fromarray(image_data, mode='L') + elif image_data.shape[2] == 3: + pil_image = Image.fromarray(image_data, mode='RGB') + elif image_data.shape[2] == 4: + pil_image = Image.fromarray(image_data, mode='RGBA') + else: + logger.error(f"Unsupported image shape: {image_data.shape}") + return False + + # Get file extension + ext = filepath.suffix.lower() + + # Save with format-specific metadata + if ext in ['.tif', '.tiff']: + self._save_tiff_with_metadata(pil_image, filepath, full_metadata, logger) + elif ext in ['.jpg', '.jpeg']: + self._save_jpeg_with_metadata(pil_image, filepath, full_metadata, logger) + elif ext == '.png': + self._save_png_with_metadata(pil_image, filepath, full_metadata, logger) + else: + logger.error(f"Unsupported file format: {ext}") + return False + + logger.info(f"Image saved successfully: {filepath}") + return True + + except Exception as e: + try: + from logger import get_logger + logger = get_logger() + logger.exception(f"Failed to save image to {filepath}") + except: + print(f"Failed to save image to {filepath}: {e}") + return False + finally: + # Explicitly close and delete PIL image to free memory + if pil_image is not None: + pil_image.close() + del pil_image + + def _save_tiff_with_metadata( + self, + pil_image: Image.Image, + filepath: Path, + metadata: Dict[str, Any], + logger + ): + """Save TIFF with metadata in EXIF tags and ImageDescription""" + # Get tag mappings from Base enum + base_tags = {tag.name: tag.value for tag in ExifTags.Base} + + # Create Exif object + exif = Exif() + + # Add software information - placeholder for version + exif[base_tags['Software']] = "Forge - v{VERSION_PLACEHOLDER}" + + # Add timestamp + timestamp = metadata.get("timestamp", datetime.now().isoformat()) + exif[base_tags['DateTime']] = datetime.fromisoformat(timestamp).strftime("%Y:%m:%d %H:%M:%S") + + # Add camera metadata if available + camera_meta = metadata.get("camera", {}) + + # Camera Make and Model + if "model" in camera_meta: + exif[base_tags['Model']] = str(camera_meta["model"]) + + # Get the EXIF IFD to add camera-specific tags + exif_ifd = exif.get_ifd(ExifTags.IFD.Exif) + + # Exposure time (tag ExposureTime) + if "exposure_time_us" in camera_meta: + exposure_sec = camera_meta["exposure_time_us"] / 1_000_000 + # Store as rational (numerator, denominator) + exif_ifd[base_tags['ExposureTime']] = (int(exposure_sec * 1_000_000), 1_000_000) + + # ISO Speed (tag ISOSpeedRatings) + if "gain_percent" in camera_meta: + iso_value = camera_meta["gain_percent"] + exif_ifd[base_tags['ISOSpeedRatings']] = iso_value + + # Add timestamp to EXIF IFD as well + exif_ifd[base_tags['DateTimeOriginal']] = datetime.fromisoformat(timestamp).strftime("%Y:%m:%d %H:%M:%S") + exif_ifd[base_tags['DateTimeDigitized']] = datetime.fromisoformat(timestamp).strftime("%Y:%m:%d %H:%M:%S") + + # Image description from user-provided metadata only + additional_meta = metadata.get("additional", {}) + description_parts = [] + + if "description" in additional_meta: + description_parts.append(str(additional_meta["description"])) + if "sample_id" in additional_meta: + description_parts.append(f"Sample: {additional_meta['sample_id']}") + + # Only set ImageDescription if user provided a description + if description_parts: + exif[base_tags['ImageDescription']] = " | ".join(description_parts) + + # Store complete metadata as JSON in UserComment instead + metadata_json = json.dumps(metadata, indent=2) + exif_ifd[base_tags['UserComment']] = metadata_json.encode('utf-16') + + # Save with EXIF + pil_image.save(filepath, format='TIFF', exif=exif, compression='tiff_deflate') + logger.debug(f"TIFF with EXIF metadata saved to {filepath}") + + def _save_jpeg_with_metadata( + self, + pil_image: Image.Image, + filepath: Path, + metadata: Dict[str, Any], + logger + ): + """Save JPEG with metadata in proper EXIF tags""" + # Get tag mappings from Base enum + base_tags = {tag.name: tag.value for tag in ExifTags.Base} + + # Create Exif object + exif = Exif() + + # Add software information - placeholder for version + exif[base_tags['Software']] = "Forge - v{VERSION_PLACEHOLDER}" + + # Add timestamp + timestamp = metadata.get("timestamp", datetime.now().isoformat()) + exif[base_tags['DateTime']] = datetime.fromisoformat(timestamp).strftime("%Y:%m:%d %H:%M:%S") + + # Add camera metadata if available + camera_meta = metadata.get("camera", {}) + + # Camera Make and Model + if "model" in camera_meta: + exif[base_tags['Model']] = str(camera_meta["model"]) + + # Image description from additional metadata + additional_meta = metadata.get("additional", {}) + if "description" in additional_meta: + exif[base_tags['ImageDescription']] = str(additional_meta["description"]) + elif "sample_id" in additional_meta: + exif[base_tags['ImageDescription']] = f"Sample: {additional_meta['sample_id']}" + + # Get the EXIF IFD to add camera-specific tags + exif_ifd = exif.get_ifd(ExifTags.IFD.Exif) + + # Exposure time + if "exposure_time_us" in camera_meta: + exposure_sec = camera_meta["exposure_time_us"] / 1_000_000 + # Store as rational (numerator, denominator) + exif_ifd[base_tags['ExposureTime']] = (int(exposure_sec * 1_000_000), 1_000_000) + + # ISO Speed + if "gain_percent" in camera_meta: + # Map gain percent to ISO-like value + iso_value = camera_meta["gain_percent"] + exif_ifd[base_tags['ISOSpeedRatings']] = iso_value + + # Add timestamp to EXIF IFD + exif_ifd[base_tags['DateTimeOriginal']] = datetime.fromisoformat(timestamp).strftime("%Y:%m:%d %H:%M:%S") + exif_ifd[base_tags['DateTimeDigitized']] = datetime.fromisoformat(timestamp).strftime("%Y:%m:%d %H:%M:%S") + + # Store complete metadata as JSON in UserComment for full data preservation + metadata_json = json.dumps(metadata, indent=2) + exif_ifd[base_tags['UserComment']] = metadata_json.encode('utf-16') + + # Save with EXIF + pil_image.save(filepath, format='JPEG', exif=exif, quality=95) + logger.debug(f"JPEG with EXIF metadata saved to {filepath}") + + def _save_png_with_metadata( + self, + pil_image: Image.Image, + filepath: Path, + metadata: Dict[str, Any], + logger + ): + """Save PNG with metadata in text chunks""" + from PIL import PngImagePlugin + + # Create PNG info + pnginfo = PngImagePlugin.PngInfo() + + # Add metadata as text chunks + pnginfo.add_text("Software", "Forge - v{VERSION_PLACEHOLDER}") + pnginfo.add_text("Metadata", json.dumps(metadata, indent=2)) + + # Add individual camera settings as separate chunks for easier access + camera_meta = metadata.get("camera", {}) + for key, value in camera_meta.items(): + pnginfo.add_text(f"Camera.{key}", str(value)) + + # Save with metadata + pil_image.save(filepath, format='PNG', pnginfo=pnginfo) + logger.debug(f"PNG metadata saved to {filepath}") \ No newline at end of file diff --git a/camera/camera_settings.py b/camera/camera_settings.py index f76ade0..3db7032 100644 --- a/camera/camera_settings.py +++ b/camera/camera_settings.py @@ -1,11 +1,66 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Tuple -from generic_config import ConfigManager, DEFAULT_FILENAME, ACTIVE_FILENAME - - -# (From the API...) +from enum import Enum +from typing import NamedTuple, Union +from pathlib import Path + +from generic_config import ConfigManager, ConfigValidationError + +from app_context import get_app_context + +# ------------------------- +# Enums for type safety +# ------------------------- +class CurveType(str, Enum): + """Tone mapping curve types.""" + LOGARITHMIC = 'Logarithmic' + POLYNOMIAL = 'Polynomial' + OFF = 'Off' + + +class FileFormat(str, Enum): + """Supported image file formats.""" + PNG = 'png' + TIFF = 'tiff' + JPEG = 'jpeg' + BMP = 'bmp' + + +# ------------------------- +# Type-safe tuples +# ------------------------- +class RGBALevel(NamedTuple): + """RGBA level range values (0-255 each).""" + r: int + g: int + b: int + a: int + + def validate(self) -> None: + """Ensure all values are in valid range.""" + for name, value in [('r', self.r), ('g', self.g), ('b', self.b), ('a', self.a)]: + if not (0 <= value <= 255): + raise ValueError(f"RGBALevel.{name} must be in range [0, 255], got {value}") + + +class RGBGain(NamedTuple): + """RGB white balance gain values (-127 to 127 each).""" + r: int + g: int + b: int + + def validate(self) -> None: + """Ensure all values are in valid range.""" + for name, value in [('r', self.r), ('g', self.g), ('b', self.b)]: + if not (-127 <= value <= 127): + raise ValueError(f"RGBGain.{name} must be in range [-127, 127], got {value}") + + +# ------------------------- +# Settings dataclass +# ------------------------- +# From the API documentation: # .-[ DEFAULT VALUES FOR THE IMAGE ]--------------------------------. # | Parameter | Range | Default | # |-----------------------------------------------------------------| @@ -26,7 +81,12 @@ @dataclass class CameraSettings: - # Values + """Camera image processing settings with validation.""" + + # Version tracking (defaults to None, will be set to Forge version if missing) + version: str = get_app_context().settings.version + + # Image processing parameters auto_expo: bool = False exposure: int = 120 # Auto Exposure Target temp: int = 11616 # White balance temperature @@ -37,72 +97,152 @@ class CameraSettings: brightness: int = -64 gamma: int = 100 sharpening: int = 500 - - levelrange_low: Tuple[int, int, int, int] = (0, 0, 0, 0) - levelrange_high: Tuple[int, int, int, int] = (255, 255, 255, 255) - wbgain: Tuple[int, int, int] = (0, 0, 0) # (R, G, B) + + # Complex parameters (now type-safe) + levelrange_low: RGBALevel = RGBALevel(0, 0, 0, 0) + levelrange_high: RGBALevel = RGBALevel(255, 255, 255, 255) + wbgain: RGBGain = RGBGain(0, 0, 0) + + # Tone mapping and format linear: int = 0 # 0/1 - curve: str = 'Polynomial' - fformat: str = 'png' - - # Ranges (API docs) - exposure_min: int = 16 - exposure_max: int = 220 - - temp_min: int = 2000 - temp_max: int = 15000 - - tint_min: int = 200 - tint_max: int = 2500 - - levelrange_min: int = 0 - levelrange_max: int = 255 - - contrast_min: int = -100 - contrast_max: int = 100 - - hue_min: int = -180 - hue_max: int = 180 - - saturation_min: int = 0 - saturation_max: int = 255 - - brightness_min: int = -64 - brightness_max: int = 64 - - gamma_min: int = 20 - gamma_max: int = 180 - - wbgain_min: int = -127 - wbgain_max: int = 127 - - sharpening_min: int = 0 - sharpening_max: int = 500 - - linear_min: int = 0 - linear_max: int = 1 - - -# A pre-bound manager that knows how to load/save CameraSettings. -# You can instantiate this wherever you need camera configs. -def make_camera_settings_manager( - *, - root_dir: str = "./config/cameras", - default_filename: str = "default_settings.yaml", - backup_dirname: str = "backups", - backup_keep: int = 5, -) -> ConfigManager[CameraSettings]: - return ConfigManager[CameraSettings]( - CameraSettings, - root_dir=root_dir, - default_filename=default_filename, - backup_dirname=backup_dirname, - backup_keep=backup_keep, - ) - -CameraSettingsManager = make_camera_settings_manager( - root_dir="./config/cameras", - default_filename=DEFAULT_FILENAME, - backup_dirname="backups", - backup_keep=5, -) \ No newline at end of file + curve: CurveType = CurveType.POLYNOMIAL + fformat: FileFormat = FileFormat.PNG + + @classmethod + def get_ranges(cls) -> dict: + """ + Return validation ranges for all numeric parameters. + + Returns: + Dictionary mapping parameter names to (min, max) tuples + """ + return { + 'exposure': (16, 220), + 'temp': (2000, 15000), + 'tint': (200, 2500), + 'levelrange': (0, 255), + 'contrast': (-100, 100), + 'hue': (-180, 180), + 'saturation': (0, 255), + 'brightness': (-64, 64), + 'gamma': (20, 180), + 'wbgain': (-127, 127), + 'sharpening': (0, 500), + 'linear': (0, 1), + } + + def validate(self) -> None: + """ + Validate all settings are within acceptable ranges. + + Raises: + ValueError: If any parameter is outside its valid range + """ + ranges = self.get_ranges() + + # Validate simple numeric parameters + for param, (min_val, max_val) in ranges.items(): + if param in ('levelrange', 'wbgain'): + continue # Handled separately + + value = getattr(self, param) + if not isinstance(value, bool) and not (min_val <= value <= max_val): + raise ValueError( + f"{param} = {value} is outside valid range [{min_val}, {max_val}]" + ) + + # Validate complex types (they have their own validate methods) + try: + self.levelrange_low.validate() + except ValueError as e: + raise ValueError(f"levelrange_low invalid: {e}") from e + + try: + self.levelrange_high.validate() + except ValueError as e: + raise ValueError(f"levelrange_high invalid: {e}") from e + + try: + self.wbgain.validate() + except ValueError as e: + raise ValueError(f"wbgain invalid: {e}") from e + + # Validate enum types + if not isinstance(self.curve, CurveType): + raise ValueError(f"curve must be a CurveType enum, got {type(self.curve)}") + + if not isinstance(self.fformat, FileFormat): + raise ValueError(f"fformat must be a FileFormat enum, got {type(self.fformat)}") + + def __post_init__(self) -> None: + """ + Post-initialization hook to ensure enums are converted from strings. + + This allows YAML deserialization to work correctly by converting + string values back to enum instances. + """ + # Convert string values to enums if needed + if isinstance(self.curve, str): + self.curve = CurveType(self.curve) + if isinstance(self.fformat, str): + self.fformat = FileFormat(self.fformat) + + # Convert tuples/lists to NamedTuples if needed + if isinstance(self.levelrange_low, (tuple, list)): + self.levelrange_low = RGBALevel(*self.levelrange_low) + if isinstance(self.levelrange_high, (tuple, list)): + self.levelrange_high = RGBALevel(*self.levelrange_high) + if isinstance(self.wbgain, (tuple, list)): + self.wbgain = RGBGain(*self.wbgain) + + +# ------------------------- +# Specialized manager +# ------------------------- +class CameraSettingsManager(ConfigManager[CameraSettings]): + """ + Specialized configuration manager for a single camera model. + + Each camera model should have its own manager instance. + This ensures settings don't bleed between incompatible models. + + Directory structure: + config/cameras/MU500/ + settings.yaml + default_settings.yaml + backups/ + + Example usage: + >>> # Create manager for MU500 + >>> mu500_mgr = CameraSettingsManager(model="MU500") + >>> settings = mu500_mgr.load() + >>> settings.exposure = 150 + >>> mu500_mgr.save(settings) + >>> + >>> # Create separate manager for MU3000 (different settings!) + >>> mu3000_mgr = CameraSettingsManager(model="MU3000") + >>> settings = mu3000_mgr.load() # Won't interfere with MU500 + """ + + def __init__( + self, + *, + model: str, + base_dir: Union[str, Path] = "./config/cameras", + default_filename: str = "default_settings.yaml", + backup_dirname: str = "backups", + backup_keep: int = 5, + ) -> None: + # Set root_dir to the model-specific directory + model_dir = Path(base_dir) / model + + super().__init__( + CameraSettings, + root_dir=model_dir, + default_filename=default_filename, + backup_dirname=backup_dirname, + backup_keep=backup_keep, + ) + + self.model = model + self._logger.info(f"Initialized CameraSettingsManager for model '{model}' at {model_dir}") diff --git a/forgeConfig.py b/forgeConfig.py index c32af18..84ff883 100644 --- a/forgeConfig.py +++ b/forgeConfig.py @@ -10,7 +10,7 @@ @dataclass class ForgeSettings: """Forge application settings.""" - version: str = "1.1" + version: str = "1.2" def validate(self) -> None: """ From 2cdf7fff14cf645e4ff633ef4bfb0101a3447055 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Fri, 30 Jan 2026 20:15:02 -0900 Subject: [PATCH 17/46] added more logging features --- UI/tabs/logs_tab.py | 142 ++++++++++++++++++++++++++++++++++++++------ logger.py | 52 ++++++++++++++++ 2 files changed, 176 insertions(+), 18 deletions(-) diff --git a/UI/tabs/logs_tab.py b/UI/tabs/logs_tab.py index 6506eeb..1300660 100644 --- a/UI/tabs/logs_tab.py +++ b/UI/tabs/logs_tab.py @@ -2,16 +2,17 @@ import subprocess import sys -from pathlib import Path +import re from datetime import datetime -from PySide6.QtCore import Qt, QTimer from PySide6.QtWidgets import ( + QCheckBox, QHBoxLayout, QPushButton, QTextEdit, QVBoxLayout, QWidget, + QLabel, ) from logger import get_logger @@ -23,6 +24,21 @@ class LogsTab(QWidget): def __init__(self) -> None: super().__init__() + # Log level filters (DEBUG disabled by default) + self._level_filters = { + 'DEBUG': False, + 'INFO': True, + 'WARNING': True, + 'ERROR': True, + 'CRITICAL': True, + } + + # Store all log entries that have been received + self._log_entries = [] + + # Track if we've done initial load + self._initial_load_done = False + # Log display self._log_display = QTextEdit() self._log_display.setReadOnly(True) @@ -44,39 +60,131 @@ def __init__(self) -> None: self._open_folder_btn.clicked.connect(self._open_log_folder) # Auto-scroll checkbox - from PySide6.QtWidgets import QCheckBox self._auto_scroll_check = QCheckBox("Auto-scroll") self._auto_scroll_check.setChecked(True) - # Button layout - button_layout = QHBoxLayout() - button_layout.addWidget(self._clear_btn) - button_layout.addWidget(self._open_folder_btn) - button_layout.addStretch() - button_layout.addWidget(self._auto_scroll_check) + # Log level filter checkboxes + self._level_checkboxes = {} + + # Control layout - all on one line + control_layout = QHBoxLayout() + control_layout.addWidget(self._clear_btn) + control_layout.addWidget(self._open_folder_btn) + control_layout.addSpacing(20) + control_layout.addWidget(QLabel("Show levels:")) + + for level in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']: + checkbox = QCheckBox(level) + checkbox.setChecked(self._level_filters[level]) + checkbox.stateChanged.connect(lambda state, lvl=level: self._on_filter_changed(lvl, state)) + self._level_checkboxes[level] = checkbox + control_layout.addWidget(checkbox) + + control_layout.addStretch() + control_layout.addWidget(self._auto_scroll_check) # Main layout layout = QVBoxLayout(self) layout.addWidget(self._log_display, 1) - layout.addLayout(button_layout) + layout.addLayout(control_layout) # Register with logger self._logger = get_logger() self._logger.register_callback(self._on_log_message) - # Add initial message - self._log_display.append(f"Logs directory: {self._logger.get_log_directory()}") - self._log_display.append("=" * 80) + # Load existing logs from current log file (one time only) + self._load_existing_logs() + self._initial_load_done = True + + def _load_existing_logs(self): + """Load existing logs from the current log file (one time only at startup)""" + try: + current_log_file = self._logger.get_current_log_file() + if not current_log_file or not current_log_file.exists(): + return + + # Read and parse existing log file + # Format: [2025-01-26 14:30:45] INFO: Message + log_pattern = re.compile(r'\[([^\]]+)\]\s+(\w+):\s+(.*)') + + with open(current_log_file, 'r', encoding='utf-8') as f: + for line in f: + line = line.rstrip() + if not line: + continue + + match = log_pattern.match(line) + if match: + timestamp, level, message = match.groups() + + # Store the log entry + self._log_entries.append({ + 'timestamp': timestamp, + 'level': level, + 'message': message + }) + + # Apply level filter and display + if self._level_filters.get(level, True): + color = self._get_level_color(level) + formatted = f'[{timestamp}] [{level}] {self._escape_html(message)}' + self._log_display.append(formatted) + else: + # Line doesn't match pattern, show as-is (might be multiline continuation) + self._log_display.append(self._escape_html(line)) + + # Auto-scroll to bottom + scrollbar = self._log_display.verticalScrollBar() + scrollbar.setValue(scrollbar.maximum()) + + except Exception as e: + self._log_display.append(f"Error loading existing logs: {e}") + + def _on_filter_changed(self, level: str, state: int): + """Handle log level filter checkbox change""" + self._level_filters[level] = bool(state) + # Redisplay logs from memory with new filter + self._redisplay_logs() + + def _redisplay_logs(self): + """Redisplay all logs from memory with current filters""" + self._log_display.clear() + + for entry in self._log_entries: + level = entry['level'] + + # Apply level filter + if not self._level_filters.get(level, True): + continue + + # Format with color + color = self._get_level_color(level) + formatted = f'[{entry["timestamp"]}] [{level}] {self._escape_html(entry["message"])}' + self._log_display.append(formatted) + + # Auto-scroll to bottom + scrollbar = self._log_display.verticalScrollBar() + scrollbar.setValue(scrollbar.maximum()) def _on_log_message(self, level: str, message: str): """ Handle incoming log message. This is called from the logger for each message. """ - # Get current timestamp timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S') + # Store the log entry in memory + self._log_entries.append({ + 'timestamp': timestamp, + 'level': level, + 'message': message + }) + + # Apply level filter + if not self._level_filters.get(level, True): + return + # Format with color based on level color = self._get_level_color(level) formatted = f'[{timestamp}] [{level}] {self._escape_html(message)}' @@ -109,11 +217,9 @@ def _escape_html(self, text: str) -> str: .replace("'", ''')) def _clear_display(self): - """Clear the log display (doesn't delete log files)""" + """Clear the log display and memory""" self._log_display.clear() - self._log_display.append(f"Logs directory: {self._logger.get_log_directory()}") - self._log_display.append("=" * 80) - self._log_display.append("Display cleared") + self._log_entries.clear() def _open_log_folder(self): """Open the log folder in file explorer""" diff --git a/logger.py b/logger.py index a6c87c7..87b3669 100644 --- a/logger.py +++ b/logger.py @@ -48,6 +48,9 @@ def _setup_file_handler(self): timestamp = datetime.now().strftime('%Y%m%d_%H%M%S') log_file = self._log_dir / f"Forge_{timestamp}.log" + # Store current log file path + self._current_log_file = log_file + # Rotating file handler - 10MB max, keep 5 backups file_handler = RotatingFileHandler( log_file, @@ -66,6 +69,51 @@ def _setup_file_handler(self): self._logger.addHandler(file_handler) self._file_handler = file_handler + + # Clean up old log files before creating new one + self._cleanup_old_logs() + + def _cleanup_old_logs(self): + """ + Keep only the last 10 log file sets. + Each set includes the main log and its rotated backups (.1, .2, etc). + """ + try: + # Get all base log files (without .1, .2, etc extensions) + log_files = sorted( + [f for f in self._log_dir.glob("Forge_*.log") + if not f.stem.split('.')[-1].isdigit()], + key=lambda x: x.stat().st_mtime, + reverse=True + ) + + # Keep only the 10 most recent sets + max_log_sets = 10 + if len(log_files) >= max_log_sets: + deleted_files = [] + + # Delete older log sets (beyond the 10 most recent) + for old_log in log_files[max_log_sets:]: + # Delete the main log file + if old_log.exists(): + old_log.unlink(missing_ok=True) + deleted_files.append(old_log.name) + + # Delete all rotated versions (.1, .2, .3, etc) + for rotated in self._log_dir.glob(f"{old_log.name}.*"): + if rotated.suffix[1:].isdigit(): # Check if extension is a number + rotated.unlink(missing_ok=True) + deleted_files.append(rotated.name) + + # Log the cleanup action + if deleted_files: + files_list = ", ".join(deleted_files) + self._logger.info(f"Log cleanup: Deleted old log files: {files_list}") + + except Exception as e: + # Don't let cleanup errors break logging + print(f"Error cleaning up old logs: {e}") + self._logger.error(f"Error cleaning up old logs: {e}") def _setup_console_handler(self): """Setup console handler for stdout""" @@ -101,6 +149,10 @@ def get_log_directory(self) -> Path: """Get current log directory""" return self._log_dir + def get_current_log_file(self) -> Optional[Path]: + """Get current log file path""" + return getattr(self, '_current_log_file', None) + def register_callback(self, callback: Callable[[str, str], None]): """ Register a callback for log messages. From 69b662eb4e3a03ce7b90a189644c4c90ae82f37c Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Sat, 31 Jan 2026 02:10:47 -0900 Subject: [PATCH 18/46] Camera is now multithreaded --- .../ISSUE_TEMPLATE/camera-mount-request.md | 44 -- .github/ISSUE_TEMPLATE/mount-request.md | 67 --- UI/widgets/camera_controls_widget.py | 99 +++-- UI/widgets/camera_preview.py | 227 ++++++---- app_context.py | 66 +-- camera/threaded_camera.py | 394 ++++++++++++++++++ logger.py | 8 +- 7 files changed, 644 insertions(+), 261 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE/camera-mount-request.md delete mode 100644 .github/ISSUE_TEMPLATE/mount-request.md create mode 100644 camera/threaded_camera.py diff --git a/.github/ISSUE_TEMPLATE/camera-mount-request.md b/.github/ISSUE_TEMPLATE/camera-mount-request.md deleted file mode 100644 index 542166d..0000000 --- a/.github/ISSUE_TEMPLATE/camera-mount-request.md +++ /dev/null @@ -1,44 +0,0 @@ ---- -name: Camera Mount Request -about: Request a new camera mount design for a specific 3D printer. -title: "[Mount Request] Camera Model : Printer Model" -labels: enhancement, hardware -assignees: '' - ---- - -## Printer Information -Note : Non bedslinger printers are not supported. - -**Printer Make & Model:** -(e.g., Creality Ender 3 v1) - -**Build Volume (in mm):** -(e.g., 220x220x250) - -**Firmware Type:** -(e.g., Marlin, Klipper, unknown) - -### Where did you buy the printer? - -Provide a link to the store or product page if possible. This helps confirm the exact hardware revision. - -## Camera Information -Camera Make and Model - -**Camera Mount Type:** -(e.g., 1/4-inch-20 UNC screw thread) - -- [ ] I added the camera and 3D printer name to the title -- [ ] This camera is already supported - -## Reference Images, Technical Drawings or CAD Files - -Clear photos with the print head removed taken from straight on. Include ruler or measurements if possible. Links to STLs or mechanical drawings are greatly preferred. This will increase the chances of getting a model made. - -*(drag-and-drop your photos below.)* - - -## Additional Notes - -Include any additional details about mounting constraints, screw hole locations, or modifications you've already made. diff --git a/.github/ISSUE_TEMPLATE/mount-request.md b/.github/ISSUE_TEMPLATE/mount-request.md deleted file mode 100644 index 2bb3c87..0000000 --- a/.github/ISSUE_TEMPLATE/mount-request.md +++ /dev/null @@ -1,67 +0,0 @@ -name: 🛠️ Camera Mount Request -description: Request a new camera mount design for a specific 3D printer. -title: "[Mount Request] Printer Model: " -labels: [enhancement, hardware] -assignees: '' - -body: - - type: input - id: printer-model - attributes: - label: Printer Make and Model - description: Specify the full model name of the printer (e.g., Creality Ender 3 v1). - placeholder: e.g., "Creality Ender 3 v1" - validations: - required: true - - - type: dropdown - id: firmware - attributes: - label: Firmware Type - description: Which firmware is the printer running? - options: - - Marlin - - Klipper - - Other / Unknown - validations: - required: true - - - type: input - id: build-volume - attributes: - label: Build Volume - placeholder: e.g., "250 × 210 × 210 mm" - validations: - required: false - - - type: textarea - id: dimensions - attributes: - label: Bed and Carriage Dimensions - description: Include bed size, carriage width, and any relevant measurements. - validations: - required: false - - - type: textarea - id: photos - attributes: - label: Photos / Drawings of the Print Head Carriage - description: Upload clear photos with the print head removed. Include ruler or measurements if possible. - validations: - required: false - - - type: input - id: technical-resources - attributes: - label: Technical Drawings or CAD Files (optional) - placeholder: Paste any links to STLs, DXFs, or mechanical drawings. This will increase the chances of getting a model made. - validations: - required: false - - - type: textarea - id: other-notes - attributes: - label: Additional Notes - description: Any other information we should know? - validations: - required: false \ No newline at end of file diff --git a/UI/widgets/camera_controls_widget.py b/UI/widgets/camera_controls_widget.py index 1c0d568..131eed7 100644 --- a/UI/widgets/camera_controls_widget.py +++ b/UI/widgets/camera_controls_widget.py @@ -6,16 +6,19 @@ QWidget, QVBoxLayout, QHBoxLayout, QGroupBox, QPushButton, QLineEdit, QLabel, QFileDialog, QMessageBox, QComboBox ) -from PySide6.QtCore import Qt +from PySide6.QtCore import Slot, Signal from logger import info, error, warning from app_context import get_app_context class CameraControlsWidget(QWidget): """ - Widget for camera controls including photo capture and file management. + Camera-agnostic widget for camera controls including photo capture and file management. """ + # Signal emitted when photo capture completes + photo_captured = Signal(bool, str) # success, filepath + def __init__(self, parent: QWidget | None = None): super().__init__(parent) @@ -36,6 +39,9 @@ def __init__(self, parent: QWidget | None = None): # Setup UI self._setup_ui() + # Connect signal to handler + self.photo_captured.connect(self._on_photo_captured) + def _setup_ui(self): """Setup the user interface""" layout = QVBoxLayout(self) @@ -168,12 +174,10 @@ def _open_folder(self): subprocess.run(['xdg-open', folder_path]) info(f"Opened folder: {folder_path}") - if toast: - toast.info("Opening in file explorer...", title="Opening Folder", duration=10000) + toast.info("Opening in file explorer...", title="Opening Folder", duration=10000) except Exception as e: error(f"Failed to open folder: {e}") - if toast: - toast.error(f"{str(e)}", title="Failed to Open Folder") + toast.error(f"{str(e)}", title="Failed to Open Folder") QMessageBox.warning( self, "Error", @@ -207,25 +211,26 @@ def _get_filepath(self) -> Path: return self._current_folder / filename + @Slot() def _take_photo(self): - """Capture a still photo from the camera""" + """ + Capture a still photo from the camera. + Works with any camera implementation that supports capture_and_save_still. + """ ctx = get_app_context() toast = ctx.toast try: - # Get camera from app context camera = ctx.camera if camera is None: warning("Attempted to capture photo but camera is not available") - if toast: - toast.warning("Camera not available", title="Camera Error") + toast.warning("Camera not available", title="Camera Error") return - if not camera.is_open: + if not camera.underlying_camera.is_open: warning("Attempted to capture photo but camera is not open") - if toast: - toast.warning("Please open the camera first", title="Camera Not Open") + toast.warning("Please open the camera first", title="Camera Not Open") return # Get filepath @@ -234,32 +239,58 @@ def _take_photo(self): # Ensure folder exists self._ensure_output_folder() - info(f"Capturing photo to: {filepath}") - if toast: - toast.info("Please wait while the image is captured...", title="Capturing Image") + info(f"Capturing still image to: {filepath}") + toast.info("Capturing high-resolution image...", title="Capturing Image") + + # Disable button while capturing + self._capture_button.setEnabled(False) - # Capture still image (highest resolution) - success = camera.capture_and_save_still( + # Define completion callback - runs on camera thread! + def on_capture_complete(success: bool, result): + """Called when capture completes (on camera thread)""" + # Emit signal to handle UI updates on main thread + self.photo_captured.emit(success, str(filepath)) + + # Capture and save still image asynchronously at highest resolution + # This returns immediately - UI stays responsive! + camera.capture_and_save_still( filepath=filepath, resolution_index=0, # Highest resolution - additional_metadata={"timestamp": datetime.now().isoformat()}, - timeout_ms=5000 + additional_metadata={ + "timestamp": datetime.now().isoformat(), + "source": "still_capture" + }, + timeout_ms=5000, + on_complete=on_capture_complete ) - - if success: - info(f"Photo saved successfully: {filepath}") - if toast: - toast.success(f"Saved to: {filepath.name}", title="Image Captured", duration=10000) - # Clear custom filename after successful capture - self._filename_edit.clear() - else: - error(f"Failed to capture photo to: {filepath}") - if toast: - toast.error("Unable to capture image from camera", title="Capture Failed") except Exception as e: error(f"Error capturing photo: {e}") - if toast: - toast.error(f"{str(e)}", title="Capture Error") + toast.error(f"{str(e)}", title="Capture Error") import traceback - error(traceback.format_exc()) \ No newline at end of file + error(traceback.format_exc()) + # Re-enable button on error + self._capture_button.setEnabled(True) + + @Slot(bool, str) + def _on_photo_captured(self, success: bool, filepath: str): + """ + Handle photo capture completion on UI thread. + This slot is called via signal from the camera thread. + """ + ctx = get_app_context() + toast = ctx.toast + + # Re-enable button + self._capture_button.setEnabled(True) + + if success: + info(f"Photo captured and saved successfully: {filepath}") + toast.success(f"Saved to: {Path(filepath).name}", + title="Image Captured", + duration=10000) + # Clear custom filename after successful capture + self._filename_edit.clear() + else: + error(f"Failed to capture photo to: {filepath}") + toast.error("Unable to capture image from camera", title="Capture Failed") diff --git a/UI/widgets/camera_preview.py b/UI/widgets/camera_preview.py index f5e47c2..eda1a76 100644 --- a/UI/widgets/camera_preview.py +++ b/UI/widgets/camera_preview.py @@ -1,41 +1,44 @@ from __future__ import annotations from typing import Optional, Any -from PySide6.QtCore import Qt, Signal, QTimer +import numpy as np +from PySide6.QtCore import Qt, Signal, QTimer, Slot from PySide6.QtGui import QImage, QPixmap -from PySide6.QtWidgets import QFrame, QLabel, QVBoxLayout, QWidget +from PySide6.QtWidgets import QFrame, QLabel, QVBoxLayout, QWidget, QSizePolicy from app_context import get_app_context -from camera.amscope_camera import AmscopeCamera -from camera.base_camera import BaseCamera, CameraInfo -from logger import get_logger - +from camera.base_camera import BaseCamera +from logger import info, error, warning class CameraPreview(QFrame): - """Camera Preview Area with live streaming""" + """ + Camera-agnostic Preview Area with live streaming. + """ # Signal for camera events (thread-safe) camera_event = Signal(int) + + # Signal when new frame is available for capture + frame_ready = Signal(np.ndarray) def __init__(self, parent: QWidget | None = None) -> None: super().__init__(parent) self.setFrameShape(QFrame.Shape.NoFrame) # Camera state - self._camera: Optional[BaseCamera] = None - self._camera_info: Optional[CameraInfo] = None + self._camera: BaseCamera | None = None + self._camera_info = None self._img_width = 0 self._img_height = 0 self._img_buffer: Optional[bytes] = None self._is_streaming = False - self._no_camera_logged = False # Track if we've already logged no camera message + self._no_camera_logged = False # UI elements self._video_label = QLabel() self._video_label.setAlignment(Qt.AlignmentFlag.AlignCenter) self._video_label.setScaledContents(False) - self._video_label.setMinimumSize(1, 1) # Allow shrinking - from PySide6.QtWidgets import QSizePolicy + self._video_label.setMinimumSize(1, 1) self._video_label.setSizePolicy(QSizePolicy.Policy.Ignored, QSizePolicy.Policy.Ignored) self._video_label.setStyleSheet("color: #888; font-size: 16px;") self._video_label.setText("Initializing camera...") @@ -57,29 +60,31 @@ def __init__(self, parent: QWidget | None = None) -> None: # Start initialization self._init_timer.start(500) + @Slot() def _try_initialize_camera(self): """Try to initialize and connect to camera""" self._init_timer.stop() - logger = get_logger() - # Get camera from app context ctx = get_app_context() - self._camera = ctx.camera + self._camera: BaseCamera | None = ctx.camera if self._camera is None: self._video_label.setText("No camera available - SDK not loaded") - logger.error("No camera available - SDK not loaded") + error("No camera available - SDK not loaded") return + # Get the underlying camera to access class methods + base_camera_class = type(self._camera.underlying_camera) + # Try to enumerate and connect to first camera try: - cameras = AmscopeCamera.enumerate_cameras() + cameras = base_camera_class.enumerate_cameras() if len(cameras) == 0: self._video_label.setText("No camera detected") if not self._no_camera_logged: - logger.warning("No camera connected") + warning("No camera connected") self._no_camera_logged = True # Retry in a few seconds self._init_timer.start(3000) @@ -90,62 +95,84 @@ def _try_initialize_camera(self): # Use first camera self._camera_info = cameras[0] + info(f"Found camera: {self._camera_info.displayname}") self._open_camera() except Exception as e: self._video_label.setText(f"Camera error: {str(e)}") - logger.error(f"Camera initialization error: {e}") + error(f"Camera initialization error: {e}") + import traceback + error(traceback.format_exc()) def _open_camera(self): """Open and start streaming from camera""" if not self._camera or not self._camera_info: return - # Don't re-open if already streaming - if self._is_streaming and self._camera.is_open: + try: + # Set camera info on underlying camera + if hasattr(self._camera.underlying_camera, 'set_camera_info'): + self._camera.underlying_camera.set_camera_info(self._camera_info) + + # Open camera (async with callback) + def on_open_complete(success: bool, result): + if not success: + self._video_label.setText("Failed to open camera") + error("Failed to open camera") + return + + # Camera opened successfully + self._start_streaming() + + # Use async open + self._camera.open(self._camera_info.id, on_complete=on_open_complete) + + except Exception as e: + self._video_label.setText(f"Error: {str(e)}") + error(f"Camera open error: {e}") + import traceback + error(traceback.format_exc()) + + def _start_streaming(self): + """Start camera streaming after camera is opened""" + if not self._camera: return - logger = get_logger() - try: - # Set camera info for Amscope camera - if isinstance(self._camera, AmscopeCamera): - self._camera.set_camera_info(self._camera_info) - - # Open camera - if not self._camera.open(self._camera_info.id): - self._video_label.setText("Failed to open camera") - logger.error("Failed to open camera") - return + # Get current resolution from underlying camera + res_index, width, height = self._camera.underlying_camera.get_current_resolution() - # Get current resolution - res_index, width, height = self._camera.get_current_resolution() self._img_width = width self._img_height = height - # Allocate image buffer - if isinstance(self._camera, AmscopeCamera): - buffer_size = AmscopeCamera.calculate_buffer_size(width, height, 24) - self._img_buffer = bytes(buffer_size) + # Calculate buffer size using base camera class method + base_camera_class = type(self._camera.underlying_camera) + buffer_size = base_camera_class.calculate_buffer_size(width, height, 24) + self._img_buffer = bytes(buffer_size) - # Enable auto exposure by default - self._camera.set_auto_exposure(True) + info("Starting camera stream...") + # Start capture - use underlying camera directly + success = self._camera.underlying_camera.start_capture( + self._camera_callback, + self + ) - # Start capture - if not self._camera.start_capture(self._camera_callback, self): - self._camera.close() + if not success: + error("start_capture returned False") + self._camera.underlying_camera.close() self._video_label.setText("Failed to start camera stream") - logger.error("Failed to start camera stream") return self._is_streaming = True - # Clear text when streaming starts - video will show instead + # Clear text when streaming starts self._video_label.setText("") - logger.info(f"Streaming: {self._camera_info.displayname} ({width}x{height})") + info(f"Streaming started: {self._camera_info.displayname} ({width}x{height})") except Exception as e: self._video_label.setText(f"Error: {str(e)}") - logger.error(f"Camera open error: {e}") + error(f"Camera start streaming error: {e}") + import traceback + error(traceback.format_exc()) @staticmethod def _camera_callback(event: int, context: Any): @@ -156,21 +183,28 @@ def _camera_callback(event: int, context: Any): if isinstance(context, CameraPreview): context.camera_event.emit(event) + @Slot(int) def _on_camera_event(self, event: int): """Handle camera events in UI thread""" - if not self._camera or not self._camera.is_open: + if not self._camera: return - # Get event constants - if isinstance(self._camera, AmscopeCamera): - events = AmscopeCamera.get_event_constants() - - if event == events.IMAGE: - self._handle_image_event() - elif event == events.ERROR: - self._handle_error() - elif event == events.DISCONNECTED: - self._handle_disconnected() + # Get underlying camera + base_camera = self._camera.underlying_camera + + # Check if camera is open + if not base_camera.is_open: + return + + # Get event constants from camera + events = base_camera.get_event_constants() + + if event == events.IMAGE: + self._handle_image_event() + elif event == events.ERROR: + self._handle_error() + elif event == events.DISCONNECTED: + self._handle_disconnected() def _handle_image_event(self): """Handle new image from camera""" @@ -178,38 +212,40 @@ def _handle_image_event(self): return try: - # Pull image into buffer - if self._camera.pull_image(self._img_buffer, 24): + # Pull image into buffer from underlying camera + if self._camera.underlying_camera.pull_image(self._img_buffer, 24): + # Calculate stride using base camera class method + base_camera_class = type(self._camera.underlying_camera) + stride = base_camera_class.calculate_stride(self._img_width, 24) + # Create QImage from buffer - if isinstance(self._camera, AmscopeCamera): - stride = AmscopeCamera.calculate_stride(self._img_width, 24) - image = QImage( - self._img_buffer, - self._img_width, - self._img_height, - stride, - QImage.Format.Format_RGB888 + image = QImage( + self._img_buffer, + self._img_width, + self._img_height, + stride, + QImage.Format.Format_RGB888 + ) + + # Make a deep copy for display + image = image.copy() + + # Scale to fit label while maintaining aspect ratio + if self._video_label.width() > 0 and self._video_label.height() > 0: + scaled_image = image.scaled( + self._video_label.width(), + self._video_label.height(), + Qt.AspectRatioMode.KeepAspectRatio, + Qt.TransformationMode.FastTransformation ) - - # Make a deep copy to avoid keeping reference to buffer - image = image.copy() - - # Scale to fit label while maintaining aspect ratio - if self._video_label.width() > 0 and self._video_label.height() > 0: - scaled_image = image.scaled( - self._video_label.width(), - self._video_label.height(), - Qt.AspectRatioMode.KeepAspectRatio, - Qt.TransformationMode.FastTransformation # Use fast transformation to reduce memory - ) - self._video_label.setPixmap(QPixmap.fromImage(scaled_image)) + self._video_label.setPixmap(QPixmap.fromImage(scaled_image)) except Exception as e: - get_logger().error(f"Error handling image: {e}") + error(f"Error handling image: {e}") def _handle_error(self): """Handle camera error""" self._video_label.setText("Camera error occurred") - get_logger().error("Camera error occurred") + error("Camera error occurred") self._close_camera() # Try to reconnect self._init_timer.start(3000) @@ -217,7 +253,7 @@ def _handle_error(self): def _handle_disconnected(self): """Handle camera disconnection""" self._video_label.setText("Camera disconnected") - get_logger().warning("Camera disconnected") + warning("Camera disconnected") self._close_camera() # Try to reconnect self._init_timer.start(3000) @@ -225,10 +261,22 @@ def _handle_disconnected(self): def _close_camera(self): """Close camera and cleanup""" self._is_streaming = False + if self._camera: - self._camera.close() + try: + # Stop capture first (use underlying camera for immediate effect) + info("Stopping camera capture...") + if self._camera.underlying_camera.is_open: + self._camera.underlying_camera.stop_capture() + + # Close camera (async is fine, we're shutting down) + info("Closing camera...") + self._camera.close() + + except Exception as e: + error(f"Error closing camera: {e}") + self._img_buffer = None - # Don't clear the label here - let error messages show def closeEvent(self, event): """Handle widget close event""" @@ -237,5 +285,12 @@ def closeEvent(self, event): def cleanup(self): """Cleanup resources when widget is being destroyed""" + info("Preview cleanup starting...") + + # Stop the initialization timer first self._init_timer.stop() + + # Close camera self._close_camera() + + info("Preview cleanup complete") \ No newline at end of file diff --git a/app_context.py b/app_context.py index 50ecc6c..cc9109a 100644 --- a/app_context.py +++ b/app_context.py @@ -3,10 +3,10 @@ Provides a singleton pattern for accessing camera and other shared resources. """ -from typing import Optional, TYPE_CHECKING -from camera.base_camera import BaseCamera +from typing import TYPE_CHECKING from camera.amscope_camera import AmscopeCamera -from logger import get_logger +from camera.threaded_camera import ThreadedCamera +from logger import info, error, warning from forgeConfig import ForgeSettingsManager, ForgeSettings if TYPE_CHECKING: @@ -18,7 +18,7 @@ class AppContext: """ Singleton application context managing shared resources. """ - _instance: Optional['AppContext'] = None + _instance: 'AppContext' | None = None def __new__(cls): if cls._instance is None: @@ -30,12 +30,12 @@ def __init__(self): if self._initialized: return - self._camera: Optional[BaseCamera] = None + self._camera: AmscopeCamera | None = None self._camera_initialized = False - self._settings_dialog: Optional['SettingsDialog'] = None - self._settings_manager: Optional[ForgeSettingsManager] = None - self._settings: Optional[ForgeSettings] = None - self._toast_manager: Optional['ToastManager'] = None + self._settings_dialog: 'SettingsDialog' | None = None + self._settings_manager: ForgeSettingsManager | None = None + self._settings: ForgeSettings | None = None + self._toast_manager: 'ToastManager' | None = None self._main_window = None self._initialized = True @@ -43,24 +43,24 @@ def __init__(self): self._load_settings() @property - def camera(self) -> Optional[BaseCamera]: + def camera(self) -> AmscopeCamera | None: """Get the camera instance, initializing if needed""" if not self._camera_initialized: self._initialize_camera() return self._camera @property - def settings(self) -> Optional[ForgeSettings]: + def settings(self) -> ForgeSettings | None: """Get the Forge settings""" return self._settings @property - def settings_dialog(self) -> Optional['SettingsDialog']: + def settings_dialog(self) -> 'SettingsDialog' | None: """Get the settings dialog instance""" return self._settings_dialog @property - def toast(self) -> Optional['ToastManager']: + def toast(self) -> 'ToastManager' | None: """Get the toast manager instance""" return self._toast_manager @@ -91,44 +91,58 @@ def open_settings(self, category: str): def _load_settings(self): """Load Forge application settings""" - logger = get_logger() try: self._settings_manager = ForgeSettingsManager() self._settings = self._settings_manager.load() - logger.info(f"Forge settings loaded - version: {self._settings.version}") + info(f"Forge settings loaded - version: {self._settings.version}") except Exception as e: - logger.error(f"Failed to load Forge settings: {e}") + error(f"Failed to load Forge settings: {e}") # Create default settings if loading fails self._settings = ForgeSettings() - logger.warning("Using default Forge settings") + warning("Using default Forge settings") def _initialize_camera(self): """Initialize the camera subsystem""" if self._camera_initialized: return - logger = get_logger() try: # Load SDK AmscopeCamera.ensure_sdk_loaded() - # Enable GigE support - AmscopeCamera.enable_gige(None, None) + # Create camera instance wrapped in threaded wrapper + base_camera = AmscopeCamera() + self._camera: AmscopeCamera = ThreadedCamera(base_camera) + + # Start the camera thread + self._camera.start_thread() - # Create camera instance - self._camera = AmscopeCamera() self._camera_initialized = True - logger.info("Camera subsystem initialized") except Exception as e: - logger.error(f"Failed to initialize camera subsystem: {e}") + error(f"Failed to initialize camera subsystem: {e}") self._camera = None self._camera_initialized = True def cleanup(self): """Cleanup resources""" - if self._camera and self._camera.is_open: - self._camera.close() + if self._camera: + info("Closing camera") + result = self._camera.close(wait=True) + + if result is not None: + success, _ = result + if success: + info("Camera closed successfully") + else: + warning("Camera close returned failure") + else: + # None means close was called directly (not through thread) + # This is fine if camera was already closed + info("Camera close completed") + + self._camera.stop_thread(wait=True) + self._camera = None self._camera_initialized = False self._settings_dialog = None diff --git a/camera/threaded_camera.py b/camera/threaded_camera.py new file mode 100644 index 0000000..6b1da56 --- /dev/null +++ b/camera/threaded_camera.py @@ -0,0 +1,394 @@ +""" +Threaded camera wrapper using dynamic attribute access. +Provides full IDE type hinting by transparently proxying to the underlying camera. +""" + +from typing import Optional, Callable, Any, TypeVar, Generic +from queue import Queue, Empty +from threading import Thread, Event, Lock +from functools import wraps + +from PySide6.QtCore import QObject, Signal + +from camera.base_camera import BaseCamera +from logger import info, error, warning, debug, exception + +T = TypeVar('T', bound=BaseCamera) + + +class AsyncResult: + """ + Represents the result of an async operation. + Can be used with callbacks or awaited in the future. + """ + def __init__(self): + self._event = Event() + self._success = False + self._result = None + + def set_result(self, success: bool, result: Any): + self._success = success + self._result = result + self._event.set() + + def wait(self, timeout: Optional[float] = None) -> tuple[bool, Any]: + """Wait for result (blocking)""" + self._event.wait(timeout) + return self._success, self._result + + +class CameraCommand: + """Command to execute on camera thread""" + def __init__( + self, + method_name: str, + args: tuple, + kwargs: dict, + completion_callback: Optional[Callable] = None + ): + self.method_name = method_name + self.args = args + self.kwargs = kwargs + self.completion_callback = completion_callback + self.result = AsyncResult() + + +class ShutdownCommand: + """Signal to shutdown the thread""" + pass + + +class CameraThread(QObject): + """ + Qt-aware camera thread that runs camera operations in background. + + Signals: + operation_completed: Emitted when any operation completes (method_name, success, result) + error_occurred: Emitted when an error occurs (error_msg) + """ + + operation_completed = Signal(str, bool, object) # method_name, success, result + error_occurred = Signal(str) # error_msg + + def __init__(self, camera: BaseCamera): + super().__init__() + self._camera = camera + self._command_queue = Queue() + self._thread = None + self._running = Event() + self._lock = Lock() + + def start(self): + """Start the camera thread""" + if self._thread is not None and self._thread.is_alive(): + warning("Camera thread already running") + return + + self._running.set() + self._thread = Thread(target=self._run, daemon=True, name="CameraThread") + self._thread.start() + info("Camera thread started") + + def stop(self, wait: bool = True): + """Stop the camera thread""" + if self._thread is None or not self._thread.is_alive(): + return + + info("Stopping camera thread") + self._running.clear() + + # Clear any pending commands + pending_count = 0 + try: + while True: + command = self._command_queue.get_nowait() + if not isinstance(command, ShutdownCommand): + # Signal that this command won't be executed + command.result.set_result(False, None) + if command.completion_callback: + try: + command.completion_callback(False, None) + except: + pass + pending_count += 1 + except Empty: + pass + + if pending_count > 0: + info(f"Cancelled {pending_count} pending commands") + + # Send shutdown command + self._command_queue.put(ShutdownCommand()) + + if wait and self._thread is not None: + # Wait longer for thread to finish processing + self._thread.join(timeout=3.0) # Reduced from 10s + if self._thread.is_alive(): + warning("Camera thread did not stop within 3 seconds") + else: + info("Camera thread stopped successfully") + + def execute(self, command: CameraCommand) -> AsyncResult: + """ + Execute a command and return AsyncResult + + Args: + command: The command to execute + + Returns: + AsyncResult that can be waited on or ignored + """ + self._command_queue.put(command) + return command.result + + def _run(self): + """Main thread loop""" + debug("Camera thread running") + + while self._running.is_set(): + try: + # Get command with timeout + try: + command = self._command_queue.get(timeout=0.1) + except Empty: + continue + + # Handle shutdown + if isinstance(command, ShutdownCommand): + debug("Received shutdown command") + break + + # Check if we should still process (thread might be stopping) + if not self._running.is_set(): + debug(f"Thread stopping, skipping command: {command.method_name}") + command.result.set_result(False, None) + if command.completion_callback: + try: + command.completion_callback(False, None) + except Exception as e: + exception(f"Error in completion callback: {e}") + continue + + # Execute command + try: + success, result = self._execute_command(command) + + # Set result + command.result.set_result(success, result) + + # Emit signal + self.operation_completed.emit(command.method_name, success, result) + + # Call completion callback if provided + if command.completion_callback is not None: + try: + command.completion_callback(success, result) + except Exception as e: + exception(f"Error in completion callback: {e}") + + except Exception as e: + exception(f"Error executing {command.method_name}") + error_msg = str(e) + + command.result.set_result(False, None) + self.operation_completed.emit(command.method_name, False, None) + self.error_occurred.emit(error_msg) + + if command.completion_callback is not None: + try: + command.completion_callback(False, None) + except Exception as cb_error: + exception(f"Error in completion callback: {cb_error}") + + except Exception as e: + exception("Unexpected error in camera thread") + self.error_occurred.emit(str(e)) + + debug("Camera thread exiting") + + def _execute_command(self, command: CameraCommand) -> tuple[bool, Any]: + """Execute a single command""" + with self._lock: + method = getattr(self._camera, command.method_name, None) + if method is None: + error(f"Method not found: {command.method_name}") + return False, None + + try: + result = method(*command.args, **command.kwargs) + + # If method returns bool, use that as success indicator + # Otherwise assume success + if isinstance(result, bool): + return result, None + else: + return True, result + + except Exception as e: + exception(f"Error calling {command.method_name}") + raise + + +class ThreadedCamera(Generic[T]): + """ + Wrapper around BaseCamera that executes all operations in a background thread. + + This class uses Python's __getattr__ magic method to transparently proxy + all method calls to the underlying camera, providing full IDE type hinting. + + Usage: + # Create with type hint for full IDE support + base_camera = AmscopeCamera() + camera: AmscopeCamera = ThreadedCamera(base_camera) + camera.start_thread() + + # Now you get full type hinting and autocomplete! + camera.set_white_balance(5000, 1000) # IDE knows this method exists + camera.auto_white_balance() # IDE autocompletes this + + # All methods are async by default + camera.snap_image(0) # Returns immediately + + # Use callbacks for chaining + camera.snap_image(0, on_complete=lambda s, r: print("Done!")) + + # Or wait for result + success, result = camera.snap_image(0, wait=True) + """ + + def __init__(self, camera: T): + # Use object.__setattr__ to avoid triggering __setattr__ + object.__setattr__(self, '_camera', camera) + object.__setattr__(self, '_thread', CameraThread(camera)) + object.__setattr__(self, '_started', False) + + def start_thread(self): + """Start the background thread""" + self._thread.start() + object.__setattr__(self, '_started', True) + + def stop_thread(self, wait: bool = True): + """Stop the background thread""" + self._thread.stop(wait) + object.__setattr__(self, '_started', False) + + @property + def operation_completed(self): + """Access to operation_completed signal""" + return self._thread.operation_completed + + @property + def error_occurred(self): + """Access to error_occurred signal""" + return self._thread.error_occurred + + @property + def underlying_camera(self) -> T: + """Get the underlying camera instance""" + return self._camera + + def __getattr__(self, name: str): + """ + Magic method that intercepts all attribute access. + + This provides transparent proxying to the underlying camera while + running everything in the background thread. + """ + # Get the attribute from underlying camera + attr = getattr(self._camera, name) + + # If it's not callable, just return it + if not callable(attr): + return attr + + # If it's a method, wrap it + @wraps(attr) + def threaded_method( + *args, + wait: bool = False, + on_complete: Optional[Callable[[bool, Any], None]] = None, + **kwargs + ): + """ + Threaded wrapper for camera methods. + + Args: + *args: Positional arguments for the method + wait: If True, wait for operation to complete (blocking) + on_complete: Optional callback(success, result) when done + **kwargs: Keyword arguments for the method + + Returns: + If wait=True: (success, result) + If wait=False: None + """ + if not self._started: + debug(f"Camera thread not running, calling {name} on main thread") + # Call underlying method directly + result = attr(*args, **kwargs) + + # If wait=True, we need to return a tuple + # But underlying method might return bool, None, or tuple + if wait: + if isinstance(result, tuple): + return result + elif isinstance(result, bool): + return (result, None) + else: + # None or other - treat as success + return (True, result) + return result + + # Create command + command = CameraCommand(name, args, kwargs, on_complete) + + # Execute + result = self._thread.execute(command) + + # Wait if requested + if wait: + return result.wait() + + return None + + return threaded_method + + def __setattr__(self, name: str, value: Any): + """ + Intercept attribute setting to forward to underlying camera. + """ + # Our own attributes (those set in __init__) + if name in ('_camera', '_thread', '_started'): + object.__setattr__(self, name, value) + else: + # Forward to underlying camera + setattr(self._camera, name, value) + + def __dir__(self): + """ + Return the combined attributes of this class and the underlying camera. + This helps IDE autocomplete work properly. + """ + return list(set( + dir(type(self)) + + list(self.__dict__.keys()) + + dir(self._camera) + )) + + +def create_threaded_camera(camera: T) -> T: + """ + Factory function to create a threaded camera with proper type hints. + + Args: + camera: The base camera instance + + Returns: + ThreadedCamera that appears as the same type as input + + Example: + base = AmscopeCamera() + camera = create_threaded_camera(base) # Type is AmscopeCamera + camera.set_white_balance(5000, 1000) # Full type hints! + """ + return ThreadedCamera(camera) # type: ignore \ No newline at end of file diff --git a/logger.py b/logger.py index 87b3669..980209d 100644 --- a/logger.py +++ b/logger.py @@ -7,7 +7,7 @@ import sys from pathlib import Path from datetime import datetime -from typing import Optional, Callable +from typing import Callable from logging.handlers import RotatingFileHandler @@ -15,7 +15,7 @@ class AppLogger: """ Singleton application logger with file and UI output. """ - _instance: Optional['AppLogger'] = None + _instance: 'AppLogger' | None = None _initialized = False def __new__(cls): @@ -149,7 +149,7 @@ def get_log_directory(self) -> Path: """Get current log directory""" return self._log_dir - def get_current_log_file(self) -> Optional[Path]: + def get_current_log_file(self) -> Path | None: """Get current log file path""" return getattr(self, '_current_log_file', None) @@ -216,7 +216,7 @@ def exception(self, message: str): # Global logger instance -_app_logger: Optional[AppLogger] = None +_app_logger: AppLogger | None = None def get_logger() -> AppLogger: From 48da1b854e1d026a2389571ab3bde3bb15074ca6 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Sat, 31 Jan 2026 02:45:46 -0900 Subject: [PATCH 19/46] uses annotations to prevent errors --- app_context.py | 15 ++++++++------- logger.py | 4 +++- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/app_context.py b/app_context.py index cc9109a..aeaddb1 100644 --- a/app_context.py +++ b/app_context.py @@ -3,6 +3,8 @@ Provides a singleton pattern for accessing camera and other shared resources. """ +from __future__ import annotations + from typing import TYPE_CHECKING from camera.amscope_camera import AmscopeCamera from camera.threaded_camera import ThreadedCamera @@ -13,12 +15,11 @@ from UI.settings.settings_main import SettingsDialog from UI.widgets.toast_widget import ToastManager - class AppContext: """ Singleton application context managing shared resources. """ - _instance: 'AppContext' | None = None + _instance: AppContext | None = None def __new__(cls): if cls._instance is None: @@ -32,10 +33,10 @@ def __init__(self): self._camera: AmscopeCamera | None = None self._camera_initialized = False - self._settings_dialog: 'SettingsDialog' | None = None + self._settings_dialog: SettingsDialog | None = None self._settings_manager: ForgeSettingsManager | None = None self._settings: ForgeSettings | None = None - self._toast_manager: 'ToastManager' | None = None + self._toast_manager: ToastManager | None = None self._main_window = None self._initialized = True @@ -55,12 +56,12 @@ def settings(self) -> ForgeSettings | None: return self._settings @property - def settings_dialog(self) -> 'SettingsDialog' | None: + def settings_dialog(self) -> SettingsDialog | None: """Get the settings dialog instance""" return self._settings_dialog @property - def toast(self) -> 'ToastManager' | None: + def toast(self) -> ToastManager | None: """Get the toast manager instance""" return self._toast_manager @@ -72,7 +73,7 @@ def register_main_window(self, window): from UI.widgets.toast_widget import ToastManager self._toast_manager = ToastManager(window) - def register_settings_dialog(self, dialog: 'SettingsDialog'): + def register_settings_dialog(self, dialog: SettingsDialog): """Register the settings dialog instance""" self._settings_dialog = dialog diff --git a/logger.py b/logger.py index 980209d..c5830d0 100644 --- a/logger.py +++ b/logger.py @@ -3,6 +3,8 @@ Provides logging to both file and UI components. """ +from __future__ import annotations + import logging import sys from pathlib import Path @@ -15,7 +17,7 @@ class AppLogger: """ Singleton application logger with file and UI output. """ - _instance: 'AppLogger' | None = None + _instance: AppLogger | None = None _initialized = False def __new__(cls): From 2ea9985257b2b077641deb7947f4e37c72c7027d Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Sat, 31 Jan 2026 04:35:04 -0900 Subject: [PATCH 20/46] Added camera manager --- UI/widgets/camera_preview.py | 157 +++++++++-------- app_context.py | 105 +++++++----- camera/camera_enumerator.py | 222 ++++++++++++++++++++++++ camera/camera_manager.py | 321 +++++++++++++++++++++++++++++++++++ 4 files changed, 680 insertions(+), 125 deletions(-) create mode 100644 camera/camera_enumerator.py create mode 100644 camera/camera_manager.py diff --git a/UI/widgets/camera_preview.py b/UI/widgets/camera_preview.py index eda1a76..5822a8f 100644 --- a/UI/widgets/camera_preview.py +++ b/UI/widgets/camera_preview.py @@ -65,112 +65,112 @@ def _try_initialize_camera(self): """Try to initialize and connect to camera""" self._init_timer.stop() - # Get camera from app context + # Get camera manager from app context ctx = get_app_context() - self._camera: BaseCamera | None = ctx.camera + camera_manager = ctx.camera_manager - if self._camera is None: - self._video_label.setText("No camera available - SDK not loaded") - error("No camera available - SDK not loaded") + # Check if camera is already active + if camera_manager.has_active_camera: + info("Preview: Using active camera") + self._camera = ctx.camera + + if self._camera: + # Get camera info from manager + self._camera_info = camera_manager.active_camera_info + self._start_streaming() + else: + error("Preview: Camera manager says camera is active but ctx.camera is None") + self._init_timer.start(3000) return - # Get the underlying camera to access class methods - base_camera_class = type(self._camera.underlying_camera) + # No active camera, enumerate and open + info("Preview: No active camera, enumerating...") + cameras = camera_manager.enumerate_cameras() - # Try to enumerate and connect to first camera - try: - cameras = base_camera_class.enumerate_cameras() - - if len(cameras) == 0: - self._video_label.setText("No camera detected") - if not self._no_camera_logged: - warning("No camera connected") - self._no_camera_logged = True - # Retry in a few seconds - self._init_timer.start(3000) - return - - # Camera found, reset flag - self._no_camera_logged = False - - # Use first camera - self._camera_info = cameras[0] - info(f"Found camera: {self._camera_info.displayname}") - self._open_camera() - - except Exception as e: - self._video_label.setText(f"Camera error: {str(e)}") - error(f"Camera initialization error: {e}") - import traceback - error(traceback.format_exc()) - - def _open_camera(self): - """Open and start streaming from camera""" - if not self._camera or not self._camera_info: + if not cameras: + self._video_label.setText("No camera detected") + if not self._no_camera_logged: + warning("Preview: No cameras found") + self._no_camera_logged = True + # Retry in a few seconds + self._init_timer.start(3000) return - try: - # Set camera info on underlying camera - if hasattr(self._camera.underlying_camera, 'set_camera_info'): - self._camera.underlying_camera.set_camera_info(self._camera_info) - - # Open camera (async with callback) - def on_open_complete(success: bool, result): - if not success: - self._video_label.setText("Failed to open camera") - error("Failed to open camera") - return - - # Camera opened successfully - self._start_streaming() - - # Use async open - self._camera.open(self._camera_info.id, on_complete=on_open_complete) - - except Exception as e: - self._video_label.setText(f"Error: {str(e)}") - error(f"Camera open error: {e}") - import traceback - error(traceback.format_exc()) + # Camera found, reset flag + self._no_camera_logged = False + + # Open first camera + if camera_manager.switch_camera(cameras[0]): + info(f"Preview: Opened camera: {cameras[0]}") + self._camera = ctx.camera + self._camera_info = cameras[0] + self._start_streaming() + else: + self._video_label.setText("Failed to open camera") + error("Preview: Failed to open camera") + # Retry + self._init_timer.start(3000) def _start_streaming(self): """Start camera streaming after camera is opened""" if not self._camera: + error("Preview: Cannot start streaming - no camera") return try: + # Get underlying camera + base_camera = self._camera.underlying_camera + # Get current resolution from underlying camera - res_index, width, height = self._camera.underlying_camera.get_current_resolution() + res_index, width, height = base_camera.get_current_resolution() + + # If no resolution set (0x0), set to first resolution + if width == 0 or height == 0: + info("Preview: Setting default resolution...") + + # Get available resolutions + resolutions = base_camera.get_resolutions() + if not resolutions: + error("Preview: No resolutions available") + self._video_label.setText("Camera has no resolutions available") + return + + # Use first resolution (typically highest quality) + if not base_camera.set_resolution(0): + error("Preview: Failed to set resolution") + self._video_label.setText("Failed to set camera resolution") + return + + # Get resolution again after setting + res_index, width, height = base_camera.get_current_resolution() self._img_width = width self._img_height = height # Calculate buffer size using base camera class method - base_camera_class = type(self._camera.underlying_camera) + base_camera_class = type(base_camera) buffer_size = base_camera_class.calculate_buffer_size(width, height, 24) self._img_buffer = bytes(buffer_size) - info("Starting camera stream...") # Start capture - use underlying camera directly - success = self._camera.underlying_camera.start_capture( + success = base_camera.start_capture( self._camera_callback, self ) if not success: - error("start_capture returned False") - self._camera.underlying_camera.close() + error("Preview: start_capture returned False") self._video_label.setText("Failed to start camera stream") return self._is_streaming = True # Clear text when streaming starts self._video_label.setText("") - info(f"Streaming started: {self._camera_info.displayname} ({width}x{height})") + info(f"Preview: Streaming started ({width}x{height})") except Exception as e: self._video_label.setText(f"Error: {str(e)}") - error(f"Camera start streaming error: {e}") + error(f"Preview: Camera start streaming error: {e}") import traceback error(traceback.format_exc()) @@ -240,12 +240,12 @@ def _handle_image_event(self): ) self._video_label.setPixmap(QPixmap.fromImage(scaled_image)) except Exception as e: - error(f"Error handling image: {e}") + error(f"Preview: Error handling image: {e}") def _handle_error(self): """Handle camera error""" self._video_label.setText("Camera error occurred") - error("Camera error occurred") + error("Preview: Camera error occurred") self._close_camera() # Try to reconnect self._init_timer.start(3000) @@ -253,7 +253,7 @@ def _handle_error(self): def _handle_disconnected(self): """Handle camera disconnection""" self._video_label.setText("Camera disconnected") - warning("Camera disconnected") + warning("Preview: Camera disconnected") self._close_camera() # Try to reconnect self._init_timer.start(3000) @@ -265,18 +265,17 @@ def _close_camera(self): if self._camera: try: # Stop capture first (use underlying camera for immediate effect) - info("Stopping camera capture...") + info("Preview: Stopping camera capture...") if self._camera.underlying_camera.is_open: self._camera.underlying_camera.stop_capture() - # Close camera (async is fine, we're shutting down) - info("Closing camera...") - self._camera.close() + info("Preview: Stopped using camera") except Exception as e: - error(f"Error closing camera: {e}") + error(f"Preview: Error stopping camera: {e}") self._img_buffer = None + self._camera = None def closeEvent(self, event): """Handle widget close event""" @@ -285,12 +284,12 @@ def closeEvent(self, event): def cleanup(self): """Cleanup resources when widget is being destroyed""" - info("Preview cleanup starting...") + info("Preview: cleanup starting...") # Stop the initialization timer first self._init_timer.stop() - # Close camera + # Stop using camera self._close_camera() - info("Preview cleanup complete") \ No newline at end of file + info("Preview cleanup complete") diff --git a/app_context.py b/app_context.py index aeaddb1..29dd412 100644 --- a/app_context.py +++ b/app_context.py @@ -6,15 +6,16 @@ from __future__ import annotations from typing import TYPE_CHECKING -from camera.amscope_camera import AmscopeCamera -from camera.threaded_camera import ThreadedCamera -from logger import info, error, warning +from camera.camera_manager import CameraManager +from camera.base_camera import BaseCamera +from logger import info, error, warning, debug from forgeConfig import ForgeSettingsManager, ForgeSettings if TYPE_CHECKING: from UI.settings.settings_main import SettingsDialog from UI.widgets.toast_widget import ToastManager + class AppContext: """ Singleton application context managing shared resources. @@ -31,8 +32,7 @@ def __init__(self): if self._initialized: return - self._camera: AmscopeCamera | None = None - self._camera_initialized = False + self._camera_manager: CameraManager | None = None self._settings_dialog: SettingsDialog | None = None self._settings_manager: ForgeSettingsManager | None = None self._settings: ForgeSettings | None = None @@ -42,13 +42,36 @@ def __init__(self): # Load settings self._load_settings() + + # Initialize camera manager + self._initialize_camera_manager() + + @property + def camera_manager(self) -> CameraManager: + """ + Get the camera manager instance. + Use this to enumerate cameras, switch cameras, etc. + """ + if self._camera_manager is None: + self._initialize_camera_manager() + return self._camera_manager + + @property + def camera(self) -> BaseCamera | None: + """ + Get the currently active camera instance. + Returns None if no camera is active. + + This is a convenience property that delegates to camera_manager. + """ + if self._camera_manager is None: + return None + return self._camera_manager.active_camera @property - def camera(self) -> AmscopeCamera | None: - """Get the camera instance, initializing if needed""" - if not self._camera_initialized: - self._initialize_camera() - return self._camera + def has_camera(self) -> bool: + """Check if there is an active camera""" + return self.camera is not None @property def settings(self) -> ForgeSettings | None: @@ -102,50 +125,40 @@ def _load_settings(self): self._settings = ForgeSettings() warning("Using default Forge settings") - def _initialize_camera(self): - """Initialize the camera subsystem""" - if self._camera_initialized: + def _initialize_camera_manager(self): + """Initialize the camera manager and open first available camera""" + if self._camera_manager is not None: return - + try: - # Load SDK - AmscopeCamera.ensure_sdk_loaded() - - # Create camera instance wrapped in threaded wrapper - base_camera = AmscopeCamera() - self._camera: AmscopeCamera = ThreadedCamera(base_camera) + info("Initializing camera manager...") + self._camera_manager = CameraManager() - # Start the camera thread - self._camera.start_thread() - - self._camera_initialized = True + # Enumerate cameras + info("Enumerating cameras...") + cameras = self._camera_manager.enumerate_cameras() + if cameras: + # Auto-open the first camera + info("Auto-opening first available camera...") + if self._camera_manager.open_first_available(): + debug("Camera opened successfully during initialization") + else: + warning("Failed to auto-open first camera") + else: + warning("No cameras found during enumeration") + except Exception as e: - error(f"Failed to initialize camera subsystem: {e}") - self._camera = None - self._camera_initialized = True + error(f"Failed to initialize camera manager: {e}") + self._camera_manager = None def cleanup(self): """Cleanup resources""" - if self._camera: - info("Closing camera") - result = self._camera.close(wait=True) - - if result is not None: - success, _ = result - if success: - info("Camera closed successfully") - else: - warning("Camera close returned failure") - else: - # None means close was called directly (not through thread) - # This is fine if camera was already closed - info("Camera close completed") - - self._camera.stop_thread(wait=True) + if self._camera_manager: + info("Cleaning up camera manager") + self._camera_manager.cleanup() - self._camera = None - self._camera_initialized = False + self._camera_manager = None self._settings_dialog = None self._settings_manager = None self._settings = None @@ -159,4 +172,4 @@ def get_app_context() -> AppContext: return AppContext() def open_settings(category: str): - AppContext().open_settings(category) \ No newline at end of file + AppContext().open_settings(category) diff --git a/camera/camera_enumerator.py b/camera/camera_enumerator.py new file mode 100644 index 0000000..a794939 --- /dev/null +++ b/camera/camera_enumerator.py @@ -0,0 +1,222 @@ +""" +Camera enumeration system with plugin architecture. +Supports multiple camera types through enumerator plugins. +""" + +from __future__ import annotations + +from abc import ABC, abstractmethod +from dataclasses import dataclass +from enum import Enum +from typing import List, Optional, Dict, Any +from logger import error, exception, debug + + +class CameraType(Enum): + """Supported camera types""" + AMSCOPE = "amscope" + GENERIC_USB = "generic_usb" + + +@dataclass +class CameraInfo: + """ + Information about an available camera. + Lightweight object returned by enumeration before camera instantiation. + """ + camera_type: CameraType + device_id: str + display_name: str + model: Optional[str] = None + manufacturer: Optional[str] = None + serial_number: Optional[str] = None + max_resolution: Optional[tuple[int, int]] = None + metadata: Optional[Dict[str, Any]] = None + + def __str__(self) -> str: + parts = [self.display_name] + if self.model: + parts.append(f"({self.model})") + if self.serial_number: + parts.append(f"SN:{self.serial_number}") + return " ".join(parts) + + def __repr__(self) -> str: + return f"CameraInfo({self.camera_type.value}, {self.display_name})" + + +class CameraEnumerator(ABC): + """ + Base class for camera enumerators. + Each camera type implements this to provide enumeration capability. + """ + + @abstractmethod + def enumerate(self) -> List[CameraInfo]: + """ + Enumerate all cameras of this type. + + Returns: + List of CameraInfo objects for available cameras + """ + pass + + @abstractmethod + def get_camera_type(self) -> CameraType: + """ + Get the camera type this enumerator handles. + + Returns: + CameraType enum value + """ + pass + + @abstractmethod + def is_available(self) -> bool: + """ + Check if this camera type is available (SDK loaded, etc). + + Returns: + True if this camera type can be enumerated + """ + pass + + +class AmscopeEnumerator(CameraEnumerator): + """Enumerator for Amscope cameras""" + + def __init__(self): + self._sdk_loaded = False + self._sdk = None + + def get_camera_type(self) -> CameraType: + return CameraType.AMSCOPE + + def is_available(self) -> bool: + """Check if Amscope SDK is available""" + if self._sdk_loaded: + return self._sdk is not None + + try: + from camera.amscope_camera import AmscopeCamera + + # Ensure SDK is loaded + debug("Loading Amscope SDK...") + load_result = AmscopeCamera.ensure_sdk_loaded() + + if not load_result: + error("AmscopeCamera.ensure_sdk_loaded() returned False") + self._sdk_loaded = True + self._sdk = None + return False + + # Get SDK instance using the private method + self._sdk = AmscopeCamera._get_sdk() + self._sdk_loaded = True + + if self._sdk is None: + error("Amscope SDK loaded but _get_sdk() returned None") + return False + + debug("Amscope SDK loaded successfully") + return True + + except ImportError as ie: + exception(f"Failed to import AmscopeCamera: {ie}") + self._sdk_loaded = True + self._sdk = None + return False + except RuntimeError as re: + exception(f"Runtime error loading Amscope SDK: {re}") + self._sdk_loaded = True + self._sdk = None + return False + except Exception as e: + exception(f"Unexpected error loading Amscope SDK: {e}") + self._sdk_loaded = True + self._sdk = None + return False + + def enumerate(self) -> List[CameraInfo]: + """Enumerate Amscope cameras""" + # Ensure SDK is available before enumerating + if not self.is_available(): + error("Amscope SDK not available, cannot enumerate cameras") + return [] + + cameras = [] + + try: + # Get SDK (should be loaded now) + from camera.amscope_camera import AmscopeCamera + sdk = AmscopeCamera._get_sdk() + + if sdk is None: + error("SDK is None during enumeration") + return [] + + # Enumerate devices + device_list = sdk.Amcam.EnumV2() + debug(f"Amscope enumerator found {len(device_list)} camera(s)") + + for idx, device in enumerate(device_list): + try: + # Get model info + model_name = device.model.name if device.model else "Unknown" + + # Get max resolution + max_res = None + if device.model and device.model.res and len(device.model.res) > 0: + # First resolution is typically the highest + max_res = (device.model.res[0].width, device.model.res[0].height) + + # Create camera info + camera_info = CameraInfo( + camera_type=CameraType.AMSCOPE, + device_id=device.id, + display_name=device.displayname or f"Amscope Camera {idx}", + model=model_name, + manufacturer="Amscope", + serial_number=None, # Could extract from device.id if needed + max_resolution=max_res, + metadata={ + 'device_index': idx, + 'model_info': device.model, + } + ) + + cameras.append(camera_info) + + except Exception as e: + exception(f"Error processing Amscope device {idx}: {e}") + continue + + except Exception as e: + exception(f"Error enumerating Amscope cameras: {e}") + + return cameras + + +class GenericUSBEnumerator(CameraEnumerator): + """ + Enumerator for generic USB cameras (future implementation). + Placeholder for now. + """ + + def get_camera_type(self) -> CameraType: + return CameraType.GENERIC_USB + + def is_available(self) -> bool: + """Check if OpenCV or other generic USB support is available""" + try: + import cv2 + return True + except ImportError: + return False + + def enumerate(self) -> List[CameraInfo]: + """Enumerate generic USB cameras (placeholder)""" + # For now, return empty list + # Future: Implement using OpenCV or platform-specific APIs + debug("Generic USB camera enumeration not yet implemented") + return [] diff --git a/camera/camera_manager.py b/camera/camera_manager.py new file mode 100644 index 0000000..8d2d5ed --- /dev/null +++ b/camera/camera_manager.py @@ -0,0 +1,321 @@ +""" +Camera manager for handling camera enumeration, selection, and lifecycle. +Provides plugin architecture for multiple camera types. +""" + +from __future__ import annotations + +from typing import Optional, List, Callable +from PySide6.QtCore import QObject, Signal + +from camera.base_camera import BaseCamera +from camera.amscope_camera import AmscopeCamera +from camera.threaded_camera import ThreadedCamera +from camera.camera_enumerator import ( + CameraEnumerator, + CameraInfo, + CameraType, + AmscopeEnumerator, + GenericUSBEnumerator +) +from logger import info, error, warning, exception, debug + + +class CameraManager(QObject): + """ + Manages camera enumeration, selection, and lifecycle. + + Signals: + camera_list_changed: Emitted when available cameras change + active_camera_changed: Emitted when active camera changes (camera_info or None) + enumeration_complete: Emitted when camera enumeration completes (camera_count) + """ + + camera_list_changed = Signal() + active_camera_changed = Signal(object) # CameraInfo or None + enumeration_complete = Signal(int) # count + + def __init__(self): + super().__init__() + + # Available camera enumerators (plugin architecture) + self._enumerators: List[CameraEnumerator] = [ + AmscopeEnumerator(), + GenericUSBEnumerator(), + # Future: Add more enumerators here + ] + + # Available cameras (from last enumeration) + self._available_cameras: List[CameraInfo] = [] + + # Active camera + self._active_camera: Optional[BaseCamera] = None + self._active_camera_info: Optional[CameraInfo] = None + self._camera_thread_started = False + + info("Camera manager initialized") + + @property + def available_cameras(self) -> List[CameraInfo]: + """Get list of available cameras from last enumeration""" + return self._available_cameras.copy() + + @property + def active_camera(self) -> Optional[BaseCamera]: + """Get the currently active camera (may be None)""" + return self._active_camera + + @property + def active_camera_info(self) -> Optional[CameraInfo]: + """Get info about the currently active camera""" + return self._active_camera_info + + @property + def has_active_camera(self) -> bool: + """Check if there is an active camera""" + return self._active_camera is not None + + def enumerate_cameras(self) -> List[CameraInfo]: + """ + Enumerate all available cameras across all enumerators. + + Returns: + List of CameraInfo objects for all available cameras + """ + cameras = [] + + for enumerator in self._enumerators: + enumerator_type = enumerator.get_camera_type().value + + try: + if enumerator.is_available(): + enum_cameras = enumerator.enumerate() + cameras.extend(enum_cameras) + else: + debug(f"{enumerator_type} enumerator not available") + except Exception as e: + exception(f"Error in {enumerator_type} enumerator: {e}") + continue + + self._available_cameras = cameras + + # Single clean summary log + if cameras: + info(f"Found {len(cameras)} camera(s):") + for idx, cam in enumerate(cameras): + info(f" [{idx}] {cam.display_name} ({cam.model})") + else: + info("No cameras found") + + self.camera_list_changed.emit() + self.enumeration_complete.emit(len(cameras)) + + return cameras + + def get_camera_by_id(self, device_id: str) -> Optional[CameraInfo]: + """ + Find a camera by its device ID. + + Args: + device_id: The device ID to search for + + Returns: + CameraInfo if found, None otherwise + """ + for camera_info in self._available_cameras: + if camera_info.device_id == device_id: + return camera_info + return None + + def get_cameras_by_type(self, camera_type: CameraType) -> List[CameraInfo]: + """ + Get all cameras of a specific type. + + Args: + camera_type: The camera type to filter by + + Returns: + List of CameraInfo objects matching the type + """ + return [cam for cam in self._available_cameras if cam.camera_type == camera_type] + + def switch_camera(self, camera_info: CameraInfo) -> bool: + """ + Switch to a different camera. + Closes the current camera if any, then opens the new one. + + Args: + camera_info: Information about the camera to switch to + + Returns: + True if switch was successful, False otherwise + """ + info(f"Switching to camera: {camera_info}") + + # Close current camera if any + if self._active_camera is not None: + info("Closing current camera before switching") + self.close_camera() + + # Create new camera + camera = self._create_camera_instance(camera_info) + if camera is None: + error(f"Failed to create camera instance for {camera_info}") + return False + + # Set camera info if the camera supports it + if hasattr(camera, 'set_camera_info'): + # Create the old-style CameraInfo from our new CameraInfo + from camera.base_camera import CameraInfo as OldCameraInfo + old_camera_info = OldCameraInfo( + id=camera_info.device_id, + displayname=camera_info.display_name, + model=camera_info.metadata.get('model_info') if camera_info.metadata else None + ) + camera.set_camera_info(old_camera_info) + + # Wrap in threaded camera + threaded_camera = ThreadedCamera(camera) + threaded_camera.start_thread() + self._camera_thread_started = True + + # Open the camera with the device_id + try: + info(f"Opening camera: {camera_info.display_name}") + + # Call open with device_id and wait=True to ensure it completes + success, _ = threaded_camera.open(camera_info.device_id, wait=True) + + if not success: + error(f"Failed to open camera: {camera_info}") + threaded_camera.stop_thread(wait=True) + return False + + # Set as active camera + self._active_camera = threaded_camera + self._active_camera_info = camera_info + + debug(f"Successfully switched to camera: {camera_info}") + self.active_camera_changed.emit(camera_info) + return True + + except Exception as e: + exception(f"Error opening camera: {e}") + try: + threaded_camera.stop_thread(wait=True) + except Exception as stop_error: + exception(f"Error stopping thread: {stop_error}") + return False + + def open_first_available(self) -> bool: + """ + Convenience method to enumerate and open the first available camera. + + Returns: + True if a camera was opened, False otherwise + """ + cameras = self.enumerate_cameras() + + if not cameras: + warning("No cameras available to open") + return False + + # Try to open the first camera + return self.switch_camera(cameras[0]) + + def close_camera(self) -> bool: + """ + Close the currently active camera. + + Returns: + True if successful, False otherwise + """ + if self._active_camera is None: + info("No active camera to close") + return True + + info(f"Closing camera: {self._active_camera_info}") + + try: + # Close the camera + result = self._active_camera.close(wait=True) + + if result is not None: + success, _ = result + if not success: + warning("Camera close returned failure") + + # Stop the thread + if self._camera_thread_started: + self._active_camera.stop_thread(wait=True) + self._camera_thread_started = False + + # Clear active camera + self._active_camera = None + prev_info = self._active_camera_info + self._active_camera_info = None + + info(f"Camera closed: {prev_info}") + self.active_camera_changed.emit(None) + return True + + except Exception as e: + exception(f"Error closing camera: {e}") + + # Try to stop thread anyway + try: + if self._camera_thread_started and self._active_camera: + self._active_camera.stop_thread(wait=True) + except: + pass + + # Clear state + self._active_camera = None + self._active_camera_info = None + self._camera_thread_started = False + + self.active_camera_changed.emit(None) + return False + + def _create_camera_instance(self, camera_info: CameraInfo) -> Optional[BaseCamera]: + """ + Factory method to create camera instance based on camera info. + + Note: This only creates the camera instance. The camera must be + opened separately using camera.open(device_id). + + Args: + camera_info: Information about the camera to create + + Returns: + Camera instance or None if creation failed + """ + try: + if camera_info.camera_type == CameraType.AMSCOPE: + # Create camera instance (does not open it yet) + camera = AmscopeCamera() + return camera + + elif camera_info.camera_type == CameraType.GENERIC_USB: + # Future: Create generic USB camera + error("Generic USB camera not yet implemented") + return None + + else: + error(f"Unsupported camera type: {camera_info.camera_type}") + return None + + except Exception as e: + exception(f"Error creating camera instance: {e}") + return None + + def cleanup(self): + """Cleanup camera manager resources""" + info("Cleaning up camera manager") + + # Close active camera + self.close_camera() + + # Clear available cameras + self._available_cameras.clear() + self.camera_list_changed.emit() From e8ba94c652ab12f09b70a655c0389b8c782302c4 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Sat, 31 Jan 2026 05:31:24 -0900 Subject: [PATCH 21/46] updated camera imports and made camera class consistent --- app_context.py | 2 +- camera/camera_enumerator.py | 4 +--- camera/camera_manager.py | 6 +++--- camera/{ => cameras}/amscope_camera.py | 21 +-------------------- camera/{ => cameras}/base_camera.py | 23 ++++++++++------------- camera/threaded_camera.py | 2 +- 6 files changed, 17 insertions(+), 41 deletions(-) rename camera/{ => cameras}/amscope_camera.py (98%) rename camera/{ => cameras}/base_camera.py (98%) diff --git a/app_context.py b/app_context.py index 29dd412..bf3b2c5 100644 --- a/app_context.py +++ b/app_context.py @@ -7,7 +7,7 @@ from typing import TYPE_CHECKING from camera.camera_manager import CameraManager -from camera.base_camera import BaseCamera +from camera.cameras.base_camera import BaseCamera from logger import info, error, warning, debug from forgeConfig import ForgeSettingsManager, ForgeSettings diff --git a/camera/camera_enumerator.py b/camera/camera_enumerator.py index a794939..5b41624 100644 --- a/camera/camera_enumerator.py +++ b/camera/camera_enumerator.py @@ -11,6 +11,7 @@ from typing import List, Optional, Dict, Any from logger import error, exception, debug +from camera.cameras.amscope_camera import AmscopeCamera class CameraType(Enum): """Supported camera types""" @@ -98,7 +99,6 @@ def is_available(self) -> bool: return self._sdk is not None try: - from camera.amscope_camera import AmscopeCamera # Ensure SDK is loaded debug("Loading Amscope SDK...") @@ -147,8 +147,6 @@ def enumerate(self) -> List[CameraInfo]: cameras = [] try: - # Get SDK (should be loaded now) - from camera.amscope_camera import AmscopeCamera sdk = AmscopeCamera._get_sdk() if sdk is None: diff --git a/camera/camera_manager.py b/camera/camera_manager.py index 8d2d5ed..ec6f3ef 100644 --- a/camera/camera_manager.py +++ b/camera/camera_manager.py @@ -8,8 +8,8 @@ from typing import Optional, List, Callable from PySide6.QtCore import QObject, Signal -from camera.base_camera import BaseCamera -from camera.amscope_camera import AmscopeCamera +from camera.cameras.base_camera import BaseCamera +from camera.cameras.amscope_camera import AmscopeCamera from camera.threaded_camera import ThreadedCamera from camera.camera_enumerator import ( CameraEnumerator, @@ -166,7 +166,7 @@ def switch_camera(self, camera_info: CameraInfo) -> bool: # Set camera info if the camera supports it if hasattr(camera, 'set_camera_info'): # Create the old-style CameraInfo from our new CameraInfo - from camera.base_camera import CameraInfo as OldCameraInfo + from camera.cameras.base_camera import CameraInfo as OldCameraInfo old_camera_info = OldCameraInfo( id=camera_info.device_id, displayname=camera_info.display_name, diff --git a/camera/amscope_camera.py b/camera/cameras/amscope_camera.py similarity index 98% rename from camera/amscope_camera.py rename to camera/cameras/amscope_camera.py index f8b7aa8..cc49d13 100644 --- a/camera/amscope_camera.py +++ b/camera/cameras/amscope_camera.py @@ -9,7 +9,7 @@ import numpy as np import threading import gc -from camera.base_camera import BaseCamera, CameraResolution, CameraInfo +from camera.cameras.base_camera import BaseCamera, CameraResolution, CameraInfo from logger import get_logger # Module-level reference to the loaded SDK @@ -960,25 +960,6 @@ def get_frame_rate(self) -> Tuple[int, int, int]: except self._get_sdk().HRESULTException: return 0, 0, 0 - @classmethod - def enumerate_cameras(cls) -> list[CameraInfo]: - """Enumerate available Amscope cameras""" - # Ensure SDK is loaded - if not cls._sdk_loaded: - cls.ensure_sdk_loaded() - - amcam = cls._get_sdk_static() - cameras = [] - arr = amcam.Amcam.EnumV2() - for cam in arr: - info = CameraInfo( - id=cam.id, - displayname=cam.displayname, - model=cam.model - ) - cameras.append(info) - return cameras - @staticmethod def _get_sdk_static(): """Static method to get SDK (for use in classmethods)""" diff --git a/camera/base_camera.py b/camera/cameras/base_camera.py similarity index 98% rename from camera/base_camera.py rename to camera/cameras/base_camera.py index 395ec99..29a9db9 100644 --- a/camera/base_camera.py +++ b/camera/cameras/base_camera.py @@ -129,7 +129,7 @@ def stop_capture(self): pass @abstractmethod - def pull_image(self, buffer: bytes, bits_per_pixel: int = 24) -> bool: + def pull_image(self, buffer: bytes, bits_per_pixel: int = 24, timeout_ms: int = 1000) -> bool: """ Pull the latest image into provided buffer @@ -341,17 +341,6 @@ def get_frame_rate(self) -> Tuple[int, int, int]: """ pass - @staticmethod - @abstractmethod - def enumerate_cameras() -> list[CameraInfo]: - """ - Enumerate available cameras - - Returns: - List of available camera information - """ - pass - @abstractmethod def get_camera_metadata(self) -> Dict[str, Any]: """ @@ -439,6 +428,14 @@ def capture_and_save_stream( """ pass + @abstractmethod + def calculate_buffer_size(width: int, height: int, bits_per_pixel: int) -> int: + pass + + @abstractmethod + def calculate_stride(width: int, bits_per_pixel: int) -> int: + pass + def save_image( self, image_data: np.ndarray, @@ -686,4 +683,4 @@ def _save_png_with_metadata( # Save with metadata pil_image.save(filepath, format='PNG', pnginfo=pnginfo) - logger.debug(f"PNG metadata saved to {filepath}") \ No newline at end of file + logger.debug(f"PNG metadata saved to {filepath}") diff --git a/camera/threaded_camera.py b/camera/threaded_camera.py index 6b1da56..83485f7 100644 --- a/camera/threaded_camera.py +++ b/camera/threaded_camera.py @@ -10,7 +10,7 @@ from PySide6.QtCore import QObject, Signal -from camera.base_camera import BaseCamera +from camera.cameras.base_camera import BaseCamera from logger import info, error, warning, debug, exception T = TypeVar('T', bound=BaseCamera) From cbb69369fb81523de3ede33b08ec16601d0370f0 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Sat, 31 Jan 2026 19:07:53 -0900 Subject: [PATCH 22/46] updated import --- UI/widgets/camera_preview.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/UI/widgets/camera_preview.py b/UI/widgets/camera_preview.py index 5822a8f..26a28a0 100644 --- a/UI/widgets/camera_preview.py +++ b/UI/widgets/camera_preview.py @@ -7,7 +7,7 @@ from PySide6.QtWidgets import QFrame, QLabel, QVBoxLayout, QWidget, QSizePolicy from app_context import get_app_context -from camera.base_camera import BaseCamera +from camera.cameras.base_camera import BaseCamera from logger import info, error, warning class CameraPreview(QFrame): From f97cdf0311f64e9dfc2b75d93ce14ac7776033f8 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Sun, 1 Feb 2026 03:13:02 -0900 Subject: [PATCH 23/46] updated camera settings pattern --- UI/widgets/camera_controls_widget.py | 7 +- camera/camera_manager.py | 2 +- camera/camera_settings.py | 248 ----- camera/cameras/amscope_camera.py | 1412 +++++--------------------- camera/cameras/base_camera.py | 532 +++++----- camera/settings/amscope_settings.py | 737 ++++++++++++++ camera/settings/camera_settings.py | 741 ++++++++++++++ 7 files changed, 2023 insertions(+), 1656 deletions(-) delete mode 100644 camera/camera_settings.py create mode 100644 camera/settings/amscope_settings.py create mode 100644 camera/settings/camera_settings.py diff --git a/UI/widgets/camera_controls_widget.py b/UI/widgets/camera_controls_widget.py index 131eed7..18b8986 100644 --- a/UI/widgets/camera_controls_widget.py +++ b/UI/widgets/camera_controls_widget.py @@ -7,7 +7,7 @@ QPushButton, QLineEdit, QLabel, QFileDialog, QMessageBox, QComboBox ) from PySide6.QtCore import Slot, Signal -from logger import info, error, warning +from logger import info, error, warning, debug from app_context import get_app_context @@ -124,7 +124,7 @@ def _ensure_output_folder(self): """Ensure the output folder exists""" try: self._current_folder.mkdir(parents=True, exist_ok=True) - info(f"Output folder ready: {self._current_folder}") + debug(f"Output folder ready: {self._current_folder}") except Exception as e: error(f"Failed to create output folder: {e}") # Show toast for error @@ -257,7 +257,6 @@ def on_capture_complete(success: bool, result): filepath=filepath, resolution_index=0, # Highest resolution additional_metadata={ - "timestamp": datetime.now().isoformat(), "source": "still_capture" }, timeout_ms=5000, @@ -285,12 +284,10 @@ def _on_photo_captured(self, success: bool, filepath: str): self._capture_button.setEnabled(True) if success: - info(f"Photo captured and saved successfully: {filepath}") toast.success(f"Saved to: {Path(filepath).name}", title="Image Captured", duration=10000) # Clear custom filename after successful capture self._filename_edit.clear() else: - error(f"Failed to capture photo to: {filepath}") toast.error("Unable to capture image from camera", title="Capture Failed") diff --git a/camera/camera_manager.py b/camera/camera_manager.py index ec6f3ef..158059b 100644 --- a/camera/camera_manager.py +++ b/camera/camera_manager.py @@ -293,7 +293,7 @@ def _create_camera_instance(self, camera_info: CameraInfo) -> Optional[BaseCamer try: if camera_info.camera_type == CameraType.AMSCOPE: # Create camera instance (does not open it yet) - camera = AmscopeCamera() + camera = AmscopeCamera(camera_info.model) return camera elif camera_info.camera_type == CameraType.GENERIC_USB: diff --git a/camera/camera_settings.py b/camera/camera_settings.py deleted file mode 100644 index 3db7032..0000000 --- a/camera/camera_settings.py +++ /dev/null @@ -1,248 +0,0 @@ -from __future__ import annotations - -from dataclasses import dataclass -from enum import Enum -from typing import NamedTuple, Union -from pathlib import Path - -from generic_config import ConfigManager, ConfigValidationError - -from app_context import get_app_context - -# ------------------------- -# Enums for type safety -# ------------------------- -class CurveType(str, Enum): - """Tone mapping curve types.""" - LOGARITHMIC = 'Logarithmic' - POLYNOMIAL = 'Polynomial' - OFF = 'Off' - - -class FileFormat(str, Enum): - """Supported image file formats.""" - PNG = 'png' - TIFF = 'tiff' - JPEG = 'jpeg' - BMP = 'bmp' - - -# ------------------------- -# Type-safe tuples -# ------------------------- -class RGBALevel(NamedTuple): - """RGBA level range values (0-255 each).""" - r: int - g: int - b: int - a: int - - def validate(self) -> None: - """Ensure all values are in valid range.""" - for name, value in [('r', self.r), ('g', self.g), ('b', self.b), ('a', self.a)]: - if not (0 <= value <= 255): - raise ValueError(f"RGBALevel.{name} must be in range [0, 255], got {value}") - - -class RGBGain(NamedTuple): - """RGB white balance gain values (-127 to 127 each).""" - r: int - g: int - b: int - - def validate(self) -> None: - """Ensure all values are in valid range.""" - for name, value in [('r', self.r), ('g', self.g), ('b', self.b)]: - if not (-127 <= value <= 127): - raise ValueError(f"RGBGain.{name} must be in range [-127, 127], got {value}") - - -# ------------------------- -# Settings dataclass -# ------------------------- -# From the API documentation: -# .-[ DEFAULT VALUES FOR THE IMAGE ]--------------------------------. -# | Parameter | Range | Default | -# |-----------------------------------------------------------------| -# | Auto Exposure Target | 16~235 | 120 | -# | Temp | 2000~15000 | 6503 | -# | Tint | 200~2500 | 1000 | -# | LevelRange | 0~255 | Low = 0, High = 255 | -# | Contrast | -100~100 | 0 | -# | Hue | -180~180 | 0 | -# | Saturation | 0~255 | 128 | -# | Brightness | -64~64 | 0 | -# | Gamma | 20~180 | 100 | -# | WBGain | -127~127 | 0 | -# | Sharpening | 0~500 | 0 | -# | Linear Tone Mapping | 1/0 | 1 | -# | Curved Tone Mapping | Log/Pol/Off | 2 (Logarithmic) | -# '-----------------------------------------------------------------' - -@dataclass -class CameraSettings: - """Camera image processing settings with validation.""" - - # Version tracking (defaults to None, will be set to Forge version if missing) - version: str = get_app_context().settings.version - - # Image processing parameters - auto_expo: bool = False - exposure: int = 120 # Auto Exposure Target - temp: int = 11616 # White balance temperature - tint: int = 925 # White balance tint - contrast: int = 0 - hue: int = 0 - saturation: int = 126 - brightness: int = -64 - gamma: int = 100 - sharpening: int = 500 - - # Complex parameters (now type-safe) - levelrange_low: RGBALevel = RGBALevel(0, 0, 0, 0) - levelrange_high: RGBALevel = RGBALevel(255, 255, 255, 255) - wbgain: RGBGain = RGBGain(0, 0, 0) - - # Tone mapping and format - linear: int = 0 # 0/1 - curve: CurveType = CurveType.POLYNOMIAL - fformat: FileFormat = FileFormat.PNG - - @classmethod - def get_ranges(cls) -> dict: - """ - Return validation ranges for all numeric parameters. - - Returns: - Dictionary mapping parameter names to (min, max) tuples - """ - return { - 'exposure': (16, 220), - 'temp': (2000, 15000), - 'tint': (200, 2500), - 'levelrange': (0, 255), - 'contrast': (-100, 100), - 'hue': (-180, 180), - 'saturation': (0, 255), - 'brightness': (-64, 64), - 'gamma': (20, 180), - 'wbgain': (-127, 127), - 'sharpening': (0, 500), - 'linear': (0, 1), - } - - def validate(self) -> None: - """ - Validate all settings are within acceptable ranges. - - Raises: - ValueError: If any parameter is outside its valid range - """ - ranges = self.get_ranges() - - # Validate simple numeric parameters - for param, (min_val, max_val) in ranges.items(): - if param in ('levelrange', 'wbgain'): - continue # Handled separately - - value = getattr(self, param) - if not isinstance(value, bool) and not (min_val <= value <= max_val): - raise ValueError( - f"{param} = {value} is outside valid range [{min_val}, {max_val}]" - ) - - # Validate complex types (they have their own validate methods) - try: - self.levelrange_low.validate() - except ValueError as e: - raise ValueError(f"levelrange_low invalid: {e}") from e - - try: - self.levelrange_high.validate() - except ValueError as e: - raise ValueError(f"levelrange_high invalid: {e}") from e - - try: - self.wbgain.validate() - except ValueError as e: - raise ValueError(f"wbgain invalid: {e}") from e - - # Validate enum types - if not isinstance(self.curve, CurveType): - raise ValueError(f"curve must be a CurveType enum, got {type(self.curve)}") - - if not isinstance(self.fformat, FileFormat): - raise ValueError(f"fformat must be a FileFormat enum, got {type(self.fformat)}") - - def __post_init__(self) -> None: - """ - Post-initialization hook to ensure enums are converted from strings. - - This allows YAML deserialization to work correctly by converting - string values back to enum instances. - """ - # Convert string values to enums if needed - if isinstance(self.curve, str): - self.curve = CurveType(self.curve) - if isinstance(self.fformat, str): - self.fformat = FileFormat(self.fformat) - - # Convert tuples/lists to NamedTuples if needed - if isinstance(self.levelrange_low, (tuple, list)): - self.levelrange_low = RGBALevel(*self.levelrange_low) - if isinstance(self.levelrange_high, (tuple, list)): - self.levelrange_high = RGBALevel(*self.levelrange_high) - if isinstance(self.wbgain, (tuple, list)): - self.wbgain = RGBGain(*self.wbgain) - - -# ------------------------- -# Specialized manager -# ------------------------- -class CameraSettingsManager(ConfigManager[CameraSettings]): - """ - Specialized configuration manager for a single camera model. - - Each camera model should have its own manager instance. - This ensures settings don't bleed between incompatible models. - - Directory structure: - config/cameras/MU500/ - settings.yaml - default_settings.yaml - backups/ - - Example usage: - >>> # Create manager for MU500 - >>> mu500_mgr = CameraSettingsManager(model="MU500") - >>> settings = mu500_mgr.load() - >>> settings.exposure = 150 - >>> mu500_mgr.save(settings) - >>> - >>> # Create separate manager for MU3000 (different settings!) - >>> mu3000_mgr = CameraSettingsManager(model="MU3000") - >>> settings = mu3000_mgr.load() # Won't interfere with MU500 - """ - - def __init__( - self, - *, - model: str, - base_dir: Union[str, Path] = "./config/cameras", - default_filename: str = "default_settings.yaml", - backup_dirname: str = "backups", - backup_keep: int = 5, - ) -> None: - # Set root_dir to the model-specific directory - model_dir = Path(base_dir) / model - - super().__init__( - CameraSettings, - root_dir=model_dir, - default_filename=default_filename, - backup_dirname=backup_dirname, - backup_keep=backup_keep, - ) - - self.model = model - self._logger.info(f"Initialized CameraSettingsManager for model '{model}' at {model_dir}") diff --git a/camera/cameras/amscope_camera.py b/camera/cameras/amscope_camera.py index cc49d13..4236fa1 100644 --- a/camera/cameras/amscope_camera.py +++ b/camera/cameras/amscope_camera.py @@ -1,29 +1,32 @@ """ Amscope camera implementation using the amcam SDK. +Now with integrated settings management. """ -from typing import Tuple, Callable, Any, Optional, Dict, TYPE_CHECKING +from __future__ import annotations + +from typing import Callable, Any from types import SimpleNamespace from pathlib import Path import ctypes import numpy as np import threading import gc + from camera.cameras.base_camera import BaseCamera, CameraResolution, CameraInfo -from logger import get_logger +from logger import info, debug, error, exception, get_logger +from camera.settings.amscope_settings import AmscopeSettings # Module-level reference to the loaded SDK _amcam = None -# Type hints for IDE support (won't execute at runtime when checking types) -if TYPE_CHECKING: - import amcam # This is just for type hints, won't actually import - class AmscopeCamera(BaseCamera): """ Amscope camera implementation using the amcam SDK. - Wraps the amcam library to conform to the BaseCamera interface. + + Now includes integrated settings management with Amscope-specific + settings like fan control, TEC, low noise mode, etc. The SDK must be loaded before using this class: AmscopeCamera.ensure_sdk_loaded() @@ -34,18 +37,62 @@ class AmscopeCamera(BaseCamera): # Class-level flag to track SDK loading _sdk_loaded = False - def __init__(self): - super().__init__() + # Option constants (from amcam SDK documentation) + OPTION_FAN = 0x0a + OPTION_TEC = 0x08 + OPTION_TECTARGET = 0x0c + OPTION_LOW_NOISE = 0x53 + OPTION_HIGH_FULLWELL = 0x51 + OPTION_TESTPATTERN = 0x2c + OPTION_DEMOSAIC = 0x5a + OPTION_BYTEORDER = 0x01 + + def __init__(self, model: str): + """ + Initialize Amscope camera. + + Args: + model: Camera model name (default "Amscope") + """ + super().__init__(model=model) + + # Initialize logger + self._logger = get_logger() # Ensure SDK is loaded before instantiating if not AmscopeCamera._sdk_loaded: AmscopeCamera.ensure_sdk_loaded() - self._hcam: Optional[Any] = None # Will be amcam.Amcam after SDK loads - self._camera_info: Optional[CameraInfo] = None + self._hcam = None # Will be amcam.Amcam after SDK loads + self._camera_info = None # Must be set via set_camera_info() before opening + self._frame_buffer = None + + # ------------------------- + # Settings Integration + # ------------------------- + + # Settings are now managed by the base class + # The base class will automatically use AmscopeSettings + # through the CameraSettingsManager factory system + + @property + def settings(self) -> AmscopeSettings: + """ + Get settings with proper type hint for Amscope. + + Returns: + AmscopeSettings object + """ + if self._settings is None: + raise RuntimeError("Settings not initialized. Call initialize_settings() first.") + return self._settings + + # ------------------------- + # SDK Management + # ------------------------- @classmethod - def ensure_sdk_loaded(cls, sdk_path: Optional[Path] = None) -> bool: + def ensure_sdk_loaded(cls, sdk_path: Path | None = None) -> bool: """ Ensure the Amscope SDK is loaded and ready to use. @@ -61,8 +108,6 @@ def ensure_sdk_loaded(cls, sdk_path: Optional[Path] = None) -> bool: if cls._sdk_loaded and _amcam is not None: return True - logger = get_logger() - try: from camera.sdk_loaders.amscope_sdk_loader import AmscopeSdkLoader @@ -70,22 +115,22 @@ def ensure_sdk_loaded(cls, sdk_path: Optional[Path] = None) -> bool: _amcam = loader.load() cls._sdk_loaded = True - logger.info("Amscope SDK loaded successfully") + info("Amscope SDK loaded successfully") return True except Exception as e: - logger.warning(f"Failed to load Amscope SDK: {e}") - logger.info("Attempting fallback to direct import...") + error(f"Failed to load Amscope SDK: {e}") + info("Attempting fallback to direct import...") try: # Fallback to direct import if loader fails import amcam as amcam_module _amcam = amcam_module cls._sdk_loaded = True - logger.info("Amscope SDK loaded via direct import") + info("Amscope SDK loaded via direct import") return True except ImportError as ie: - logger.error(f"Direct import also failed: {ie}") + error(f"Direct import also failed: {ie}") return False @staticmethod @@ -98,16 +143,18 @@ def _get_sdk(): ) return _amcam - # Class-level event constant accessors + @classmethod + def _get_sdk_static(cls): + """Static version of _get_sdk for class methods""" + return cls._get_sdk() + + # ------------------------- + # Event Constants + # ------------------------- + @classmethod def get_event_constants(cls): - """ - Get event constants as a namespace object. - Useful for accessing events without a camera instance. - - Returns: - SimpleNamespace with event constants - """ + """Get event constants as a namespace object.""" amcam = cls._get_sdk_static() return SimpleNamespace( IMAGE=amcam.AMCAM_EVENT_IMAGE, @@ -118,7 +165,6 @@ def get_event_constants(cls): DISCONNECTED=amcam.AMCAM_EVENT_DISCONNECTED ) - # Event type constants - these are properties since SDK loads dynamically @property def EVENT_IMAGE(self): return self._get_sdk().AMCAM_EVENT_IMAGE @@ -142,12 +188,16 @@ def EVENT_ERROR(self): @property def EVENT_DISCONNECTED(self): return self._get_sdk().AMCAM_EVENT_DISCONNECTED - + @property - def handle(self) -> Optional[Any]: + def handle(self): """Get the underlying amcam handle""" return self._hcam + # ------------------------- + # Camera Control + # ------------------------- + def open(self, camera_id: str) -> bool: """Open connection to Amscope camera""" amcam = self._get_sdk() @@ -156,7 +206,7 @@ def open(self, camera_id: str) -> bool: if self._hcam: self._is_open = True # Set RGB byte order for Qt compatibility - self._hcam.put_Option(amcam.AMCAM_OPTION_BYTEORDER, 0) + self._hcam.put_Option(self.OPTION_BYTEORDER, 0) return True return False except self._get_sdk().HRESULTException: @@ -171,6 +221,7 @@ def close(self): self._callback = None self._callback_context = None self._camera_info = None + self._frame_buffer = None def start_capture(self, callback: Callable, context: Any) -> bool: """Start capturing frames with callback""" @@ -182,10 +233,9 @@ def start_capture(self, callback: Callable, context: Any) -> bool: # Get current resolution to allocate frame buffer res_index, width, height = self.get_current_resolution() - # Create persistent frame buffer (like manufacturer's self.pData) - # This will be continuously updated by the event callback + # Create persistent frame buffer buffer_size = amcam.TDIBWIDTHBYTES(width * 24) * height - self._frame_buffer = bytearray(buffer_size) # Use bytearray so it's mutable + self._frame_buffer = bytearray(buffer_size) self._callback = callback self._callback_context = context @@ -197,10 +247,9 @@ def start_capture(self, callback: Callable, context: Any) -> bool: def stop_capture(self): """Stop capturing frames""" if self._hcam: - amcam = self._get_sdk() try: self._hcam.Stop() - except self._get_sdk().HRESULTException: + except: pass def pull_image(self, buffer: ctypes.Array, bits_per_pixel: int = 24, timeout_ms: int = 1000) -> bool: @@ -210,7 +259,7 @@ def pull_image(self, buffer: ctypes.Array, bits_per_pixel: int = 24, timeout_ms: Args: buffer: ctypes buffer to receive image data bits_per_pixel: Bits per pixel (typically 24) - timeout_ms: Timeout in milliseconds to wait for frame (default 1000ms) + timeout_ms: Timeout in milliseconds to wait for frame Returns: True if successful, False otherwise @@ -223,50 +272,33 @@ def pull_image(self, buffer: ctypes.Array, bits_per_pixel: int = 24, timeout_ms: amcam = self._get_sdk() try: # Use WaitImageV4 to wait for a frame (bStill=0 for video stream) - # This is more reliable than PullImageV2 which may fail if no frame is ready + # This is more reliable than PullImageV4 which may fail if no frame is ready self._hcam.WaitImageV4(timeout_ms, buffer, 0, bits_per_pixel, 0, None) return True - except self._get_sdk().HRESULTException as e: + except amcam.HRESULTException as e: # If timeout or no frame available, log the error logger = get_logger() logger.error(f"Failed to pull image: {e}") return False def snap_image(self, resolution_index: int = 0) -> bool: - """Capture a still image""" + """Capture a still image at specified resolution""" if not self._hcam: return False - amcam = self._get_sdk() try: self._hcam.Snap(resolution_index) return True - except self._get_sdk().HRESULTException: + except: return False - def pull_still_image(self, buffer: ctypes.Array, bits_per_pixel: int = 24) -> Tuple[bool, int, int]: - """ - Pull a still image into buffer - - Args: - buffer: Buffer to receive image data (ctypes.create_string_buffer, should be large enough) - bits_per_pixel: Bits per pixel (typically 24) - - Returns: - Tuple of (success, width, height) - """ - if not self._hcam: - return False, 0, 0 - - amcam = self._get_sdk() - try: - # Get still resolution to return dimensions - w, h = self._hcam.get_StillResolution(0) - # Use PullStillImageV2 which works with ctypes.create_string_buffer - self._hcam.PullStillImageV2(buffer, bits_per_pixel, None) - return True, w, h - except self._get_sdk().HRESULTException: - return False, 0, 0 + # ------------------------- + # Resolution Management + # ------------------------- + + def set_camera_info(self, info: CameraInfo): + """Set camera information (needed before get_resolutions works)""" + self._camera_info = info def get_resolutions(self) -> list[CameraResolution]: """Get available preview resolutions""" @@ -276,7 +308,8 @@ def get_resolutions(self) -> list[CameraResolution]: resolutions = [] for i in range(self._camera_info.model.preview): res = self._camera_info.model.res[i] - resolutions.append(CameraResolution(res.width, res.height)) + resolutions.append(CameraResolution(width=res.width, height=res.height)) + return resolutions def get_current_resolution(self) -> Tuple[int, int, int]: @@ -293,1063 +326,208 @@ def set_resolution(self, resolution_index: int) -> bool: if not self._hcam: return False - amcam = self._get_sdk() try: self._hcam.put_eSize(resolution_index) return True - except self._get_sdk().HRESULTException: + except: return False - def get_exposure_range(self) -> Tuple[int, int, int]: - """Get exposure time range (min, max, default) in microseconds""" - if not self._hcam: - return 0, 0, 0 + def supports_still_capture(self) -> bool: + """Check if camera supports separate still image capture""" + if not self._camera_info or not self._camera_info.model: + return False - amcam = self._get_sdk() - try: - return self._hcam.get_ExpTimeRange() - except self._get_sdk().HRESULTException: - return 0, 0, 0 + return self._camera_info.model.still > 0 - def get_exposure_time(self) -> int: - """Get current exposure time in microseconds""" - amcam = self._get_sdk() - if not self._hcam: - return 0 + def get_still_resolutions(self) -> list[CameraResolution]: + """Get available still image resolutions""" + if not self._camera_info or not self._camera_info.model: + return [] - try: - return self._hcam.get_ExpoTime() - except self._get_sdk().HRESULTException: - return 0 - - def set_exposure_time(self, time_us: int) -> bool: - """Set exposure time in microseconds""" - amcam = self._get_sdk() - if not self._hcam: - return False + resolutions = [] + for i in range(self._camera_info.model.still): + res = self._camera_info.model.res[i] + resolutions.append(CameraResolution(width=res.width, height=res.height)) - try: - self._hcam.put_ExpoTime(time_us) - return True - except self._get_sdk().HRESULTException: - return False + return resolutions - def get_gain_range(self) -> Tuple[int, int, int]: - """Get gain range (min, max, default) in percent""" - amcam = self._get_sdk() - if not self._hcam: - return 0, 0, 0 + def pull_still_image(self, buffer: ctypes.Array, bits_per_pixel: int = 24) -> Tuple[bool, int, int]: + """ + Pull a still image into buffer - try: - return self._hcam.get_ExpoAGainRange() - except self._get_sdk().HRESULTException: - return 0, 0, 0 - - def get_gain(self) -> int: - """Get current gain in percent""" - amcam = self._get_sdk() + Args: + buffer: Buffer to receive image data (ctypes.create_string_buffer) + bits_per_pixel: Bits per pixel (typically 24) + + Returns: + Tuple of (success, width, height) + """ if not self._hcam: - return 0 + return False, 0, 0 - try: - return self._hcam.get_ExpoAGain() - except self._get_sdk().HRESULTException: - return 0 - - def set_gain(self, gain_percent: int) -> bool: - """Set gain in percent""" amcam = self._get_sdk() - if not self._hcam: - return False - try: - self._hcam.put_ExpoAGain(gain_percent) - return True - except self._get_sdk().HRESULTException: - return False + # Get still resolution to return dimensions + w, h = self._hcam.get_StillResolution(0) + # Use PullStillImageV2 which works with ctypes.create_string_buffer + self._hcam.PullStillImageV2(buffer, bits_per_pixel, None) + return True, w, h + except amcam.HRESULTException: + return False, 0, 0 - def get_auto_exposure(self) -> bool: - """Get auto exposure state""" - amcam = self._get_sdk() - if not self._hcam: - return False - - try: - return self._hcam.get_AutoExpoEnable() == 1 - except self._get_sdk().HRESULTException: - return False + # ------------------------- + # Metadata + # ------------------------- - def set_auto_exposure(self, enabled: bool) -> bool: - """Set auto exposure state""" - amcam = self._get_sdk() - if not self._hcam: - return False + def get_camera_metadata(self) -> dict[str, Any]: + """Get current camera metadata for image saving""" + metadata = { + 'model': self.model, + } + + # Get metadata from settings if available + if self._settings is not None: + metadata['exposure_time_us'] = self._settings.get_exposure_time() + metadata['gain_percent'] = self._settings.get_gain() + metadata['temperature'] = self._settings.temp + metadata['tint'] = self._settings.tint + # Add serial number if available try: - self._hcam.put_AutoExpoEnable(1 if enabled else 0) - return True - except self._get_sdk().HRESULTException: - return False - - def supports_white_balance(self) -> bool: - """Check if camera supports white balance (not monochrome)""" - if not self._camera_info or not self._camera_info.model: - return False + if self._hcam: + metadata['serial'] = self._hcam.get_SerialNumber() + except: + pass - amcam = self._get_sdk() - return (self._camera_info.model.flag & amcam.AMCAM_FLAG_MONO) == 0 + return metadata - def get_white_balance_range(self) -> Tuple[Tuple[int, int], Tuple[int, int]]: - """Get white balance range ((temp_min, temp_max), (tint_min, tint_max))""" - amcam = self._get_sdk() - return ((amcam.AMCAM_TEMP_MIN, amcam.AMCAM_TEMP_MAX), - (amcam.AMCAM_TINT_MIN, amcam.AMCAM_TINT_MAX)) + # ------------------------- + # Image Capture and Saving + # ------------------------- - def get_white_balance(self) -> Tuple[int, int]: - """Get current white balance (temperature, tint)""" - amcam = self._get_sdk() + def capture_and_save_still( + self, + filepath: Path, + resolution_index: int = 0, + additional_metadata: dict[str, Any] | None = None, + timeout_ms: int = 5000 + ) -> bool: + """Capture a still image and save it with metadata.""" if not self._hcam: - return amcam.AMCAM_TEMP_DEF, amcam.AMCAM_TINT_DEF + error("Camera not open") + return False - try: - return self._hcam.get_TempTint() - except self._get_sdk().HRESULTException: - return amcam.AMCAM_TEMP_DEF, amcam.AMCAM_TINT_DEF - - def set_white_balance(self, temperature: int, tint: int) -> bool: - """Set white balance""" amcam = self._get_sdk() - if not self._hcam: - return False try: - self._hcam.put_TempTint(temperature, tint) - return True - except self._get_sdk().HRESULTException: + # Allocate buffer for still image + width, height = self._hcam.get_StillResolution(resolution_index) + buffer_size = amcam.TDIBWIDTHBYTES(width * 24) * height + pData = bytes(buffer_size) + + # Setup threading for still capture + still_ready = threading.Event() + capture_success = {'success': False, 'width': 0, 'height': 0} + + # Save original callback + original_callback = self._callback + original_context = self._callback_context + + def still_callback(event, ctx): + if event == self.EVENT_STILLIMAGE: + # Pull the still image + info_struct = amcam.AmcamFrameInfoV3() + try: + self._hcam.PullImageV3(pData, 1, 24, 0, info_struct) + capture_success['success'] = True + capture_success['width'] = info_struct.width + capture_success['height'] = info_struct.height + except Exception as e: + error(f"Failed to pull still image: {e}") + capture_success['success'] = False + still_ready.set() + + # Call original callback if exists + if original_callback: + original_callback(event, original_context) + + # Temporarily replace callback + self._callback = still_callback + self._callback_context = None + + # Trigger still capture + if not self.snap_image(resolution_index): + error("Failed to trigger still capture") + self._callback = original_callback + self._callback_context = original_context + return False + + # Wait for still image + if not still_ready.wait(timeout_ms / 1000.0): + error(f"Still capture timed out after {timeout_ms}ms") + self._callback = original_callback + self._callback_context = original_context + return False + + # Restore original callback + self._callback = original_callback + self._callback_context = original_context + + if not capture_success['success']: + error("Failed to pull still image") + return False + + # Convert to numpy array + w = capture_success['width'] + h = capture_success['height'] + stride = amcam.TDIBWIDTHBYTES(w * 24) + image_data = np.frombuffer(pData, dtype=np.uint8).reshape((h, stride))[:, :w*3].reshape((h, w, 3)).copy() + + # Convert BGR to RGB + image_data = image_data[:, :, ::-1].copy() + + del pData + + # Save with metadata + success = self.save_image(image_data, filepath, additional_metadata) + + del image_data + gc.collect() + + if success: + info(f"Still image captured and saved: {filepath}") + else: + error(f"Failed to save still image: {filepath}") + + return success + + except Exception as e: + exception(f"Failed to capture and save still image: {filepath}") return False - def auto_white_balance(self) -> bool: - """Perform one-time auto white balance""" - amcam = self._get_sdk() - if not self._hcam: + def capture_and_save_stream( + self, + filepath: Path, + additional_metadata: dict[str, Any] | None = None + ) -> bool: + """Capture current frame from live stream and save it.""" + if not self._hcam or not self._is_open: + error("Camera not in capture mode") return False - try: - self._hcam.AwbOnce() - return True - except self._get_sdk().HRESULTException: + if not hasattr(self, '_frame_buffer') or self._frame_buffer is None: + error("No frame buffer available") return False - - # ======================================================================== - # Image Processing Parameters - # ======================================================================== - - def get_hue(self) -> int: - """ - Get hue value. - - Returns: - Hue value in range [-180, 180] - - Raises: - RuntimeError: If camera is not initialized - """ - if not self._hcam: - raise RuntimeError("Camera not initialized") try: - return self._hcam.get_Hue() - except self._get_sdk().HRESULTException as e: - raise RuntimeError(f"Failed to get hue: {e}") from e - - def set_hue(self, hue: int) -> bool: - """ - Set hue value. - - Args: - hue: Hue value in range [-180, 180] + # Get current resolution + res_index, width, height = self.get_current_resolution() - Returns: - True if successful, False otherwise - """ - if not self._hcam: - return False - - try: - self._hcam.put_Hue(hue) - return True - except self._get_sdk().HRESULTException: - return False - - def get_saturation(self) -> int: - """ - Get saturation value. - - Returns: - Saturation value in range [0, 255] - - Raises: - RuntimeError: If camera is not initialized - """ - if not self._hcam: - raise RuntimeError("Camera not initialized") - - try: - return self._hcam.get_Saturation() - except self._get_sdk().HRESULTException as e: - raise RuntimeError(f"Failed to get saturation: {e}") from e - - def set_saturation(self, saturation: int) -> bool: - """ - Set saturation value. - - Args: - saturation: Saturation value in range [0, 255] - - Returns: - True if successful, False otherwise - """ - if not self._hcam: - return False - - try: - self._hcam.put_Saturation(saturation) - return True - except self._get_sdk().HRESULTException: - return False - - def get_brightness(self) -> int: - """ - Get brightness value. - - Returns: - Brightness value in range [-64, 64] - - Raises: - RuntimeError: If camera is not initialized - """ - if not self._hcam: - raise RuntimeError("Camera not initialized") - - try: - return self._hcam.get_Brightness() - except self._get_sdk().HRESULTException as e: - raise RuntimeError(f"Failed to get brightness: {e}") from e - - def set_brightness(self, brightness: int) -> bool: - """ - Set brightness value. - - Args: - brightness: Brightness value in range [-64, 64] - - Returns: - True if successful, False otherwise - """ - if not self._hcam: - return False - - try: - self._hcam.put_Brightness(brightness) - return True - except self._get_sdk().HRESULTException: - return False - - def get_contrast(self) -> int: - """ - Get contrast value. - - Returns: - Contrast value in range [-100, 100] - - Raises: - RuntimeError: If camera is not initialized - """ - if not self._hcam: - raise RuntimeError("Camera not initialized") - - try: - return self._hcam.get_Contrast() - except self._get_sdk().HRESULTException as e: - raise RuntimeError(f"Failed to get contrast: {e}") from e - - def set_contrast(self, contrast: int) -> bool: - """ - Set contrast value. - - Args: - contrast: Contrast value in range [-100, 100] - - Returns: - True if successful, False otherwise - """ - if not self._hcam: - return False - - try: - self._hcam.put_Contrast(contrast) - return True - except self._get_sdk().HRESULTException: - return False - - def get_gamma(self) -> int: - """ - Get gamma value. - - Returns: - Gamma value in range [20, 180] - - Raises: - RuntimeError: If camera is not initialized - """ - if not self._hcam: - raise RuntimeError("Camera not initialized") - - try: - return self._hcam.get_Gamma() - except self._get_sdk().HRESULTException as e: - raise RuntimeError(f"Failed to get gamma: {e}") from e - - def set_gamma(self, gamma: int) -> bool: - """ - Set gamma value. - - Args: - gamma: Gamma value in range [20, 180] - - Returns: - True if successful, False otherwise - """ - if not self._hcam: - return False - - try: - self._hcam.put_Gamma(gamma) - return True - except self._get_sdk().HRESULTException: - return False - - def get_auto_exposure_target(self) -> int: - """ - Get auto exposure target brightness. - - Returns: - Auto exposure target in range [16, 235] - - Raises: - RuntimeError: If camera is not initialized - """ - if not self._hcam: - raise RuntimeError("Camera not initialized") - - try: - return self._hcam.get_AutoExpoTarget() - except self._get_sdk().HRESULTException as e: - raise RuntimeError(f"Failed to get auto exposure target: {e}") from e - - def set_auto_exposure_target(self, target: int) -> bool: - """ - Set auto exposure target brightness. - - Args: - target: Auto exposure target in range [16, 235] - - Returns: - True if successful, False otherwise - """ - if not self._hcam: - return False - - try: - self._hcam.put_AutoExpoTarget(target) - return True - except self._get_sdk().HRESULTException: - return False - - def get_white_balance_gain(self) -> Tuple[int, int, int]: - """ - Get RGB white balance gain values. - - Returns: - Tuple of (R, G, B) gain values in range [-127, 127] - - Raises: - RuntimeError: If camera is not initialized or not supported - - Note: - Only works in RGB Gain mode. - """ - if not self._hcam: - raise RuntimeError("Camera not initialized") - - try: - return self._hcam.get_WhiteBalanceGain() - except self._get_sdk().HRESULTException as e: - raise RuntimeError(f"Failed to get white balance gain (may not be supported in Temp/Tint mode): {e}") from e - - def set_white_balance_gain(self, r: int, g: int, b: int) -> bool: - """ - Set RGB white balance gain values. - - Args: - r: Red gain in range [-127, 127] - g: Green gain in range [-127, 127] - b: Blue gain in range [-127, 127] - - Returns: - True if successful, False otherwise - - Note: - Only works in RGB Gain mode. - """ - if not self._hcam: - return False - - try: - self._hcam.put_WhiteBalanceGain([r, g, b]) - return True - except self._get_sdk().HRESULTException: - return False - - def get_level_range(self) -> Tuple[Tuple[int, int, int, int], Tuple[int, int, int, int]]: - """ - Get level range (low and high) for RGBA channels. - - Returns: - Tuple of ((R_low, G_low, B_low, A_low), (R_high, G_high, B_high, A_high)) - Each value in range [0, 255] - - Raises: - RuntimeError: If camera is not initialized - """ - if not self._hcam: - raise RuntimeError("Camera not initialized") - - try: - low, high = self._hcam.get_LevelRange() - return (tuple(low), tuple(high)) - except self._get_sdk().HRESULTException as e: - raise RuntimeError(f"Failed to get level range: {e}") from e - - def set_level_range( - self, - low: Tuple[int, int, int, int], - high: Tuple[int, int, int, int] - ) -> bool: - """ - Set level range (low and high) for RGBA channels. - - Args: - low: Tuple of (R_low, G_low, B_low, A_low), each in range [0, 255] - high: Tuple of (R_high, G_high, B_high, A_high), each in range [0, 255] - - Returns: - True if successful, False otherwise - """ - if not self._hcam: - return False - - try: - self._hcam.put_LevelRange(list(low), list(high)) - return True - except self._get_sdk().HRESULTException: - return False - - def auto_level_range(self) -> bool: - """ - Perform automatic level range adjustment. - - Returns: - True if successful, False otherwise - """ - if not self._hcam: - return False - - try: - self._hcam.LevelRangeAuto() - return True - except self._get_sdk().HRESULTException: - return False - - def get_option(self, option: int) -> int: - """ - Get a camera option value. - - Args: - option: Option ID (use AMCAM_OPTION_* constants) - - Returns: - Option value - - Raises: - RuntimeError: If camera is not initialized - """ - if not self._hcam: - raise RuntimeError("Camera not initialized") - - try: - return self._hcam.get_Option(option) - except self._get_sdk().HRESULTException as e: - raise RuntimeError(f"Failed to get option {option}: {e}") from e - - def set_option(self, option: int, value: int) -> bool: - """ - Set a camera option value. - - Args: - option: Option ID (use AMCAM_OPTION_* constants) - value: Value to set - - Returns: - True if successful, False otherwise - """ - if not self._hcam: - return False - - try: - self._hcam.put_Option(option, value) - return True - except self._get_sdk().HRESULTException: - return False - - def get_sharpening(self) -> Tuple[int, int, int]: - """ - Get sharpening parameters. - - Returns: - Tuple of (strength, radius, threshold): - - strength: [0, 500], 0 = disabled - - radius: [1, 10] - - threshold: [0, 255] - - Raises: - RuntimeError: If camera is not initialized - """ - amcam = self._get_sdk() - if not self._hcam: - raise RuntimeError("Camera not initialized") - - try: - # Get sharpening option value - val = self._hcam.get_Option(amcam.AMCAM_OPTION_SHARPENING) - - # Extract components: (threshold << 24) | (radius << 16) | strength - strength = val & 0xFFFF - radius = (val >> 16) & 0xFF - threshold = (val >> 24) & 0xFF - - return (strength, radius, threshold) - except self._get_sdk().HRESULTException as e: - raise RuntimeError(f"Failed to get sharpening: {e}") from e - - def set_sharpening(self, strength: int, radius: int = 2, threshold: int = 0) -> bool: - """ - Set sharpening parameters. - - Args: - strength: Sharpening strength [0, 500], 0 = disabled - radius: Sharpening radius [1, 10], default 2 - threshold: Sharpening threshold [0, 255], default 0 - - Returns: - True if successful, False otherwise - """ - amcam = self._get_sdk() - if not self._hcam: - return False - - try: - # Combine into single value: (threshold << 24) | (radius << 16) | strength - val = (threshold << 24) | (radius << 16) | strength - self._hcam.put_Option(amcam.AMCAM_OPTION_SHARPENING, val) - return True - except self._get_sdk().HRESULTException: - return False - - def get_linear_tone_mapping(self) -> bool: - """ - Get linear tone mapping state. - - Returns: - True if enabled, False if disabled - - Raises: - RuntimeError: If camera is not initialized - """ - amcam = self._get_sdk() - if not self._hcam: - raise RuntimeError("Camera not initialized") - - try: - val = self._hcam.get_Option(amcam.AMCAM_OPTION_LINEAR) - return val == 1 - except self._get_sdk().HRESULTException as e: - raise RuntimeError(f"Failed to get linear tone mapping: {e}") from e - - def set_linear_tone_mapping(self, enabled: bool) -> bool: - """ - Set linear tone mapping on/off. - - Args: - enabled: True to enable, False to disable - - Returns: - True if successful, False otherwise - """ - amcam = self._get_sdk() - if not self._hcam: - return False - - try: - self._hcam.put_Option(amcam.AMCAM_OPTION_LINEAR, 1 if enabled else 0) - return True - except self._get_sdk().HRESULTException: - return False - - def get_curve_tone_mapping(self) -> int: - """ - Get curve tone mapping setting. - - Returns: - 0 = off, 1 = polynomial, 2 = logarithmic - - Raises: - RuntimeError: If camera is not initialized - """ - amcam = self._get_sdk() - if not self._hcam: - raise RuntimeError("Camera not initialized") - - try: - return self._hcam.get_Option(amcam.AMCAM_OPTION_CURVE) - except self._get_sdk().HRESULTException as e: - raise RuntimeError(f"Failed to get curve tone mapping: {e}") from e - - def set_curve_tone_mapping(self, curve_type: int) -> bool: - """ - Set curve tone mapping. - - Args: - curve_type: 0 = off, 1 = polynomial, 2 = logarithmic - - Returns: - True if successful, False otherwise - """ - amcam = self._get_sdk() - if not self._hcam: - return False - - try: - self._hcam.put_Option(amcam.AMCAM_OPTION_CURVE, curve_type) - return True - except self._get_sdk().HRESULTException: - return False - - # ======================================================================== - # End of Image Processing Parameters - # ======================================================================== - - def get_frame_rate(self) -> Tuple[int, int, int]: - """Get frame rate info (frames_in_period, time_period_ms, total_frames)""" - amcam = self._get_sdk() - if not self._hcam: - return 0, 0, 0 - - try: - return self._hcam.get_FrameRate() - except self._get_sdk().HRESULTException: - return 0, 0, 0 - - @staticmethod - def _get_sdk_static(): - """Static method to get SDK (for use in classmethods)""" - global _amcam - if _amcam is None: - raise RuntimeError( - "Amscope SDK not loaded. Call AmscopeCamera.ensure_sdk_loaded() first." - ) - return _amcam - - def set_camera_info(self, info: CameraInfo): - """Set camera information (needed before opening)""" - self._camera_info = info - - def supports_still_capture(self) -> bool: - """Check if camera supports separate still image capture""" - if not self._camera_info or not self._camera_info.model: - return False - - return self._camera_info.model.still > 0 - - def get_still_resolutions(self) -> list[CameraResolution]: - """Get available still image resolutions""" - if not self._camera_info or not self._camera_info.model: - return [] - - resolutions = [] - for i in range(self._camera_info.model.still): - res = self._camera_info.model.res[i] - resolutions.append(CameraResolution(res.width, res.height)) - return resolutions - - def get_camera_metadata(self) -> Dict[str, Any]: - """ - Get current camera settings as metadata - - Returns: - Dictionary containing current camera settings including: - - Camera identification (name, model, id) - - Resolution settings - - Exposure settings (time, gain, auto-exposure state) - - White balance settings (if supported) - - Image processing parameters (hue, saturation, brightness, etc.) - - Frame rate information - - Note: - If camera is not initialized or a parameter cannot be read, - that parameter will be omitted from the metadata dictionary. - """ - metadata = {} - - # Camera identification - if self._camera_info: - metadata["camera_name"] = self._camera_info.displayname - metadata["camera_id"] = self._camera_info.id - if self._camera_info.model: - metadata["model_name"] = getattr(self._camera_info.model, 'name', 'Unknown') - - # Helper function to safely get values - def safe_get(getter_func, key, default=None): - try: - return getter_func() - except (RuntimeError, Exception): - return default - - # Resolution - res_index, width, height = self.get_current_resolution() - metadata["resolution_index"] = res_index - metadata["width"] = width - metadata["height"] = height - metadata["resolution"] = f"{width}x{height}" - - # Exposure settings - metadata["exposure_time_us"] = self.get_exposure_time() - metadata["gain_percent"] = self.get_gain() - metadata["auto_exposure_enabled"] = self.get_auto_exposure() - - target = safe_get(self.get_auto_exposure_target, "auto_exposure_target") - if target is not None: - metadata["auto_exposure_target"] = target - - # Exposure range info - exp_min, exp_max, exp_def = self.get_exposure_range() - metadata["exposure_range_us"] = { - "min": exp_min, - "max": exp_max, - "default": exp_def - } - - # Gain range info - gain_min, gain_max, gain_def = self.get_gain_range() - metadata["gain_range_percent"] = { - "min": gain_min, - "max": gain_max, - "default": gain_def - } - - # White balance (if supported) - if self.supports_white_balance(): - temp, tint = self.get_white_balance() - metadata["white_balance_temperature"] = temp - metadata["white_balance_tint"] = tint - - (temp_min, temp_max), (tint_min, tint_max) = self.get_white_balance_range() - metadata["white_balance_range"] = { - "temperature": {"min": temp_min, "max": temp_max}, - "tint": {"min": tint_min, "max": tint_max} - } - - # RGB gain mode (may not work in Temp/Tint mode) - rgb_gain = safe_get(self.get_white_balance_gain, "white_balance_rgb_gain") - if rgb_gain is not None: - r_gain, g_gain, b_gain = rgb_gain - metadata["white_balance_rgb_gain"] = { - "red": r_gain, - "green": g_gain, - "blue": b_gain - } - else: - metadata["monochrome"] = True - - # Image processing parameters - hue = safe_get(self.get_hue, "hue") - if hue is not None: - metadata["hue"] = hue - - saturation = safe_get(self.get_saturation, "saturation") - if saturation is not None: - metadata["saturation"] = saturation - - brightness = safe_get(self.get_brightness, "brightness") - if brightness is not None: - metadata["brightness"] = brightness - - contrast = safe_get(self.get_contrast, "contrast") - if contrast is not None: - metadata["contrast"] = contrast - - gamma = safe_get(self.get_gamma, "gamma") - if gamma is not None: - metadata["gamma"] = gamma - - # Level range - level_range = safe_get(self.get_level_range, "level_range") - if level_range is not None: - low, high = level_range - metadata["level_range_low"] = { - "red": low[0], - "green": low[1], - "blue": low[2], - "alpha": low[3] - } - metadata["level_range_high"] = { - "red": high[0], - "green": high[1], - "blue": high[2], - "alpha": high[3] - } - - # Sharpening - sharpening = safe_get(self.get_sharpening, "sharpening") - if sharpening is not None: - strength, radius, threshold = sharpening - metadata["sharpening"] = { - "strength": strength, - "radius": radius, - "threshold": threshold - } - - # Tone mapping - linear = safe_get(self.get_linear_tone_mapping, "linear_tone_mapping") - if linear is not None: - metadata["linear_tone_mapping"] = linear - - curve = safe_get(self.get_curve_tone_mapping, "curve_tone_mapping") - if curve is not None: - curve_names = {0: "off", 1: "polynomial", 2: "logarithmic"} - metadata["curve_tone_mapping"] = curve_names.get(curve, "unknown") - metadata["curve_tone_mapping_value"] = curve - - # Frame rate - frames, period_ms, total = self.get_frame_rate() - if period_ms > 0: - metadata["frame_rate_fps"] = round(frames * 1000 / period_ms, 2) - metadata["frame_rate_info"] = { - "frames_in_period": frames, - "period_ms": period_ms, - "total_frames": total - } - - # SDK version if available - amcam = self._get_sdk() - try: - metadata["sdk_version"] = amcam.Amcam.Version() - except Exception: - metadata["sdk_version"] = "unknown" - - return metadata - - def capture_and_save_still( - self, - filepath: Path, - resolution_index: int = 0, - additional_metadata: Optional[Dict[str, Any]] = None, - timeout_ms: int = 5000 - ) -> bool: - """ - Capture a still image and save it to disk with metadata. - - This method handles the complete workflow: - 1. Triggers still image capture at specified resolution - 2. Waits for image to be ready (with timeout) - 3. Pulls image data and converts to numpy array - 4. Saves with full metadata - - Args: - filepath: Path where image should be saved - resolution_index: Resolution index for still capture (0 = highest) - additional_metadata: Optional dictionary of additional metadata to save - timeout_ms: Timeout in milliseconds to wait for capture (default 5000) - - Returns: - True if successful, False otherwise - """ - logger = get_logger() - - if not self._hcam: - logger.error("Camera not open") - return False - - if not self.supports_still_capture(): - logger.error("Camera does not support still capture") - logger.info(f"Camera model: {self._camera_info.model.name if self._camera_info else 'Unknown'}") - logger.info(f"Still resolution count: {self._camera_info.model.still if self._camera_info else 0}") - return False - - try: - # Get resolution for this still index - still_resolutions = self.get_still_resolutions() - if resolution_index >= len(still_resolutions): - logger.error(f"Invalid resolution index: {resolution_index}") - return False - - res = still_resolutions[resolution_index] - width, height = res.width, res.height - logger.debug(f"Still capture target resolution: {width}x{height}") - - # Use Python bytes instead of ctypes buffer - amcam = self._get_sdk() - buffer_size = amcam.TDIBWIDTHBYTES(width * 24) * height - pData = bytes(buffer_size) - - # Event to signal still image is ready - still_ready = threading.Event() - capture_success = {'success': False, 'width': 0, 'height': 0} - - # Store original callback - original_callback = self._callback - original_context = self._callback_context - - logger.debug(f"Original callback: {original_callback is not None}, context: {original_context is not None}") - - def still_callback(event, ctx): - logger.debug(f"Still callback received event: {event}, STILLIMAGE={self.EVENT_STILLIMAGE}, IMAGE={self.EVENT_IMAGE}") - if event == self.EVENT_STILLIMAGE: - # Pull the still image using PullImageV3 - info = amcam.AmcamFrameInfoV3() - try: - logger.debug("Attempting to pull still image...") - self._hcam.PullImageV3(pData, 1, 24, 0, info) - capture_success['success'] = True - capture_success['width'] = info.width - capture_success['height'] = info.height - logger.debug(f"Still image pulled successfully: {info.width}x{info.height}") - except Exception as e: - logger.error(f"Failed to pull still image: {e}") - capture_success['success'] = False - still_ready.set() - - # Also call original callback if it exists - if original_callback: - original_callback(event, original_context) - - # Temporarily replace callback - self._callback = still_callback - self._callback_context = None - - logger.debug("Triggering still capture...") - # Trigger still capture - if not self.snap_image(resolution_index): - logger.error("Failed to trigger still capture") - self._callback = original_callback - self._callback_context = original_context - return False - - logger.debug(f"Waiting for still image (timeout: {timeout_ms}ms)...") - # Wait for still image with timeout - if not still_ready.wait(timeout_ms / 1000.0): - logger.error(f"Still capture timed out after {timeout_ms}ms") - logger.error("STILLIMAGE event never received") - self._callback = original_callback - self._callback_context = original_context - return False - - logger.debug("Still image event received!") - - # Restore original callback - self._callback = original_callback - self._callback_context = original_context - - if not capture_success['success']: - logger.error("Failed to pull still image") - return False - - # Convert to numpy array - creates a copy - w = capture_success['width'] - h = capture_success['height'] - stride = amcam.TDIBWIDTHBYTES(w * 24) - image_data = np.frombuffer(pData, dtype=np.uint8).reshape((h, stride))[:, :w*3].reshape((h, w, 3)).copy() - - # Convert BGR to RGB - image_data = image_data[:, :, ::-1].copy() - - # Delete pData immediately - del pData - - # Save with metadata - success = self.save_image(image_data, filepath, additional_metadata) - - # Explicitly delete and force GC - del image_data - gc.collect() - - if success: - logger.info(f"Still image captured and saved: {filepath}") - else: - logger.error(f"Failed to save still image: {filepath}") - - return success - - except Exception as e: - logger.exception(f"Failed to capture and save still image: {filepath}") - return False - - def capture_and_save_stream( - self, - filepath: Path, - additional_metadata: Optional[Dict[str, Any]] = None - ) -> bool: - """ - Capture current frame from live stream and save it to disk with metadata. - - Uses the manufacturer's approach: directly saves the most recent frame - from the continuously-updated buffer (no waiting or pausing needed). - - Args: - filepath: Path where image should be saved - additional_metadata: Optional dictionary of additional metadata to save - - Returns: - True if successful, False otherwise - - Note: - Camera must be in capture mode (start_capture() must have been called) - """ - logger = get_logger() - - if not self._hcam: - logger.error("Camera not open") - return False - - if not self._is_open: - logger.error("Camera not in capture mode") - return False - - # Check if we have a frame buffer (set during start_capture) - if not hasattr(self, '_frame_buffer') or self._frame_buffer is None: - logger.error("No frame buffer available - camera may not be streaming") - return False - - try: - # Get current resolution - res_index, width, height = self.get_current_resolution() - - # Simply copy from the current frame buffer (updated continuously by callback) - # This is the manufacturer's approach - no waiting or pausing needed! + # Copy from frame buffer amcam = self._get_sdk() stride = amcam.TDIBWIDTHBYTES(width * 24) - # Create numpy array from the persistent buffer + # Create numpy array from buffer image_data = np.frombuffer(self._frame_buffer, dtype=np.uint8).reshape((height, stride))[:, :width*3].reshape((height, width, 3)).copy() # Convert BGR to RGB @@ -1358,66 +536,39 @@ def capture_and_save_stream( # Save with metadata success = self.save_image(image_data, filepath, additional_metadata) - # Explicitly delete image_data and force GC del image_data gc.collect() if success: - logger.info(f"Stream frame captured and saved: {filepath}") + info(f"Stream frame captured and saved: {filepath}") else: - logger.error(f"Failed to save stream frame: {filepath}") + error(f"Failed to save stream frame: {filepath}") return success except Exception as e: - logger.exception(f"Failed to capture and save stream frame: {filepath}") - return False - - except Exception as e: - logger.exception(f"Failed to capture and save stream frame: {filepath}") + exception(f"Failed to capture and save stream frame: {filepath}") return False + # ------------------------- + # Utility Methods + # ------------------------- + @staticmethod def calculate_buffer_size(width: int, height: int, bits_per_pixel: int = 24) -> int: - """ - Calculate required buffer size for image data - - Args: - width: Image width in pixels - height: Image height in pixels - bits_per_pixel: Bits per pixel (typically 24 for RGB) - - Returns: - Buffer size in bytes - """ + """Calculate required buffer size for image data""" amcam = AmscopeCamera._get_sdk_static() return amcam.TDIBWIDTHBYTES(width * bits_per_pixel) * height @staticmethod def calculate_stride(width: int, bits_per_pixel: int = 24) -> int: - """ - Calculate image stride (bytes per row) - - Args: - width: Image width in pixels - bits_per_pixel: Bits per pixel (typically 24 for RGB) - - Returns: - Stride in bytes - """ + """Calculate image stride (bytes per row)""" amcam = AmscopeCamera._get_sdk_static() return amcam.TDIBWIDTHBYTES(width * bits_per_pixel) @classmethod - def enable_gige(cls, callback: Optional[Callable] = None, context: Any = None): - """ - Enable GigE camera support - - Args: - callback: Optional callback for GigE events - context: Optional context for callback - """ - # Ensure SDK is loaded + def enable_gige(cls, callback: Callable | None = None, context: Any = None): + """Enable GigE camera support""" if not cls._sdk_loaded: cls.ensure_sdk_loaded() @@ -1425,21 +576,14 @@ def enable_gige(cls, callback: Optional[Callable] = None, context: Any = None): amcam.Amcam.GigeEnable(callback, context) def _event_callback_wrapper(self, event: int, context: Any): - """ - Internal wrapper for camera events. - Translates amcam events to the callback registered with start_capture. - Also updates the persistent frame buffer on IMAGE events (manufacturer's approach). - """ - # Update persistent frame buffer on IMAGE events - # This is how the manufacturer's example works - continuous buffer updates + """Internal wrapper for camera events.""" + # Update frame buffer on IMAGE events if event == self.EVENT_IMAGE and hasattr(self, '_frame_buffer') and self._frame_buffer is not None: try: - # Pull the latest frame into our persistent buffer self._hcam.PullImageV4(self._frame_buffer, 0, 24, 0, None) except: - pass # Silently ignore pull errors in callback + pass - # IMPORTANT: Always call the registered callback if it exists - # Don't check _callback_context because it might be None during still capture + # Call registered callback if self._callback: self._callback(event, self._callback_context) \ No newline at end of file diff --git a/camera/cameras/base_camera.py b/camera/cameras/base_camera.py index 29a9db9..5251dd0 100644 --- a/camera/cameras/base_camera.py +++ b/camera/cameras/base_camera.py @@ -4,16 +4,19 @@ """ from abc import ABC, abstractmethod -from typing import Optional, Tuple, Callable, Any, Dict -from dataclasses import dataclass +from typing import Callable, Any, TYPE_CHECKING +from dataclasses import dataclass, asdict from pathlib import Path from datetime import datetime import numpy as np from PIL import Image, ExifTags from PIL.Image import Exif -from PIL.TiffImagePlugin import ImageFileDirectory_v2 +from PIL import PngImagePlugin import json +from logger import info, debug, error, exception +from camera.settings.camera_settings import CameraSettings, CameraSettingsManager + @dataclass class CameraResolution: @@ -37,20 +40,27 @@ class BaseCamera(ABC): """ Abstract base class for camera operations. Defines the interface that all camera implementations must follow. - - Each camera implementation should handle its own SDK loading in the - ensure_sdk_loaded() method. This is typically called once before any - camera operations. """ # Class-level flag to track if SDK has been loaded _sdk_loaded = False - def __init__(self): + def __init__(self, model: str): + """ + Initialize camera base class. + + Args: + model: Camera model identifier (e.g., "MU500", "MU3000") + """ + self.model = model self._is_open = False self._callback = None self._callback_context = None + # Settings management (initialized after camera is opened) + self._settings_manager: CameraSettingsManager | None = None + self._settings: CameraSettings | None = None + @property def is_open(self) -> bool: """Check if camera is currently open""" @@ -58,7 +68,7 @@ def is_open(self) -> bool: @classmethod @abstractmethod - def ensure_sdk_loaded(cls, sdk_path: Optional[Path] = None) -> bool: + def ensure_sdk_loaded(cls, sdk_path: Path | None = None) -> bool: """ Ensure the camera SDK is loaded and ready to use. @@ -91,163 +101,202 @@ def is_sdk_loaded(cls) -> bool: """ return cls._sdk_loaded - @abstractmethod - def open(self, camera_id: str) -> bool: + def initialize_settings(self) -> None: """ - Open camera connection + Initialize the settings system for this camera. - Args: - camera_id: Identifier for the camera to open - - Returns: - True if successful, False otherwise - """ - pass - - @abstractmethod - def close(self): - """Close camera connection and cleanup resources""" - pass - - @abstractmethod - def start_capture(self, callback: Callable, context: Any) -> bool: - """ - Start capturing frames + This should be called after the camera is opened. + It creates a settings manager specific to this camera model, + loads the saved settings (or defaults if none exist), and + applies them to the camera hardware. - Args: - callback: Function to call when events occur - context: Context object to pass to callback - - Returns: - True if successful, False otherwise - """ - pass - - @abstractmethod - def stop_capture(self): - """Stop capturing frames""" - pass - - @abstractmethod - def pull_image(self, buffer: bytes, bits_per_pixel: int = 24, timeout_ms: int = 1000) -> bool: - """ - Pull the latest image into provided buffer + Note: + The settings manager expects a CameraSettings subclass specific + to this camera model. The subclass must implement all abstract + methods from CameraSettings and provide metadata via get_metadata(). - Args: - buffer: Pre-allocated buffer to receive image data - bits_per_pixel: Bits per pixel (typically 24 for RGB) - - Returns: - True if successful, False otherwise - """ - pass - - @abstractmethod - def snap_image(self, resolution_index: int = 0) -> bool: + Example: + >>> camera = MU500Camera() + >>> camera.open("camera_id") + >>> camera.initialize_settings() + >>> # Now camera.settings is available """ - Capture a still image at specified resolution - Args: - resolution_index: Index of resolution to use - - Returns: - True if successful, False otherwise - """ - pass + info(f"Initializing settings for {self.model}") + + # Create model-specific settings manager + self._settings_manager = CameraSettingsManager(model=self.model) + + # Load saved settings or create defaults + self._settings = self._settings_manager.load() + + # First refresh from camera to sync with current hardware state + self._settings.refresh_from_camera(self) + + # Then apply settings to camera hardware + self._settings.apply_to_camera(self) + + info("Settings initialized and applied to camera") - @abstractmethod - def get_resolutions(self) -> list[CameraResolution]: + @property + def settings(self) -> CameraSettings: """ - Get available camera resolutions + Get the current settings object. + + The GUI can use this to read and modify settings. Returns: - List of available resolutions + CameraSettings object for this camera + + Raises: + RuntimeError: If settings haven't been initialized yet + + Example: + >>> # GUI code + >>> settings = camera.settings + >>> settings.set_exposure(150) + >>> settings.set_contrast(10) + >>> # Changes are immediately applied to camera hardware """ - pass + if self._settings is None: + raise RuntimeError( + "Settings not initialized. Call initialize_settings() first." + ) + return self._settings - @abstractmethod - def get_current_resolution(self) -> Tuple[int, int, int]: + def save_settings(self) -> None: """ - Get current resolution + Save current settings to config file. - Returns: - Tuple of (resolution_index, width, height) + This creates a backup of the previous settings before saving. + Call this when the user clicks "Save" or "Apply" in the GUI. + + Example: + >>> # User adjusted settings via GUI + >>> camera.settings.set_exposure(150) + >>> camera.settings.set_contrast(10) + >>> # User clicks "Save" + >>> camera.save_settings() """ - pass + if self._settings is None or self._settings_manager is None: + raise RuntimeError("Settings not initialized") + + info(f"Saving settings for {self.model}") + self._settings_manager.save(self._settings) + info("Settings saved successfully") - @abstractmethod - def set_resolution(self, resolution_index: int) -> bool: + def load_settings(self, filepath: Path | str | None = None) -> None: """ - Set camera resolution + Load settings from file and apply to camera. Args: - resolution_index: Index of resolution to use + filepath: Optional path to load from. If None, loads from default location. - Returns: - True if successful, False otherwise + Example: + >>> # Load from default location + >>> camera.load_settings() + >>> + >>> # Load from specific file + >>> camera.load_settings("./saved_configs/night_mode.yaml") """ - pass - - @abstractmethod - def get_exposure_range(self) -> Tuple[int, int, int]: - """ - Get exposure time range + if self._settings_manager is None: + raise RuntimeError("Settings not initialized") - Returns: - Tuple of (min, max, default) values - """ - pass - - @abstractmethod - def get_exposure_time(self) -> int: - """ - Get current exposure time + info(f"Loading settings for {self.model}") - Returns: - Current exposure time in microseconds - """ - pass + if filepath is None: + # Load from default location + self._settings = self._settings_manager.load() + else: + # Load from specific file + self._settings = self._settings_manager.load_from_file(filepath) + + # Refresh to ensure we have camera reference + self._settings.refresh_from_camera(self) + + # Apply to camera hardware + self._settings.apply_to_camera(self) + + info("Settings loaded and applied to camera") - @abstractmethod - def set_exposure_time(self, time_us: int) -> bool: + def reset_settings(self) -> None: """ - Set exposure time + Reset settings to last saved state and apply to camera. - Args: - time_us: Exposure time in microseconds - - Returns: - True if successful, False otherwise + Call this when the user clicks "Cancel" or "Reset" in the GUI. + + Example: + >>> # User made changes but wants to discard them + >>> camera.reset_settings() """ - pass + if self._settings_manager is None: + raise RuntimeError("Settings not initialized") + + info(f"Resetting settings for {self.model}") + + # Reload from disk + self._settings = self._settings_manager.load() + + # Refresh to ensure camera reference + self._settings.refresh_from_camera(self) + + # Re-apply to camera + self._settings.apply_to_camera(self) + + info("Settings reset to saved state") - @abstractmethod - def get_gain_range(self) -> Tuple[int, int, int]: + def reset_to_defaults(self) -> None: """ - Get gain range + Reset settings to factory defaults and apply to camera. - Returns: - Tuple of (min, max, default) values in percent + This also saves the defaults as the current settings. + + Example: + >>> # User wants factory defaults + >>> camera.reset_to_defaults() """ - pass + if self._settings_manager is None: + raise RuntimeError("Settings not initialized") + + info(f"Resetting to factory defaults for {self.model}") + + # Restore defaults (this also saves them) + self._settings = self._settings_manager.restore_defaults() + + # Refresh to ensure camera reference + self._settings.refresh_from_camera(self) + + # Apply to camera + self._settings.apply_to_camera(self) + + info("Factory defaults restored and applied") - @abstractmethod - def get_gain(self) -> int: + def refresh_settings_from_camera(self) -> None: """ - Get current gain + Read current camera state and update settings object. - Returns: - Current gain in percent + Useful if the camera was adjusted outside of the settings system + (e.g., via hardware buttons or external software). + + Example: + >>> # Camera was adjusted externally + >>> camera.refresh_settings_from_camera() + >>> # Now settings object matches camera hardware """ - pass + if self._settings is None: + raise RuntimeError("Settings not initialized") + + info("Refreshing settings from camera hardware") + self._settings.refresh_from_camera(self) + info("Settings refreshed") @abstractmethod - def set_gain(self, gain_percent: int) -> bool: + def open(self, camera_id: str) -> bool: """ - Set gain + Open camera connection Args: - gain_percent: Gain in percent + camera_id: Identifier for the camera to open Returns: True if successful, False otherwise @@ -255,22 +304,18 @@ def set_gain(self, gain_percent: int) -> bool: pass @abstractmethod - def get_auto_exposure(self) -> bool: - """ - Get auto exposure state - - Returns: - True if auto exposure is enabled, False otherwise - """ + def close(self): + """Close camera connection and cleanup resources""" pass @abstractmethod - def set_auto_exposure(self, enabled: bool) -> bool: + def start_capture(self, callback: Callable, context: Any) -> bool: """ - Set auto exposure state + Start capturing frames Args: - enabled: True to enable, False to disable + callback: Function to call when events occur + context: Context object to pass to callback Returns: True if successful, False otherwise @@ -278,43 +323,19 @@ def set_auto_exposure(self, enabled: bool) -> bool: pass @abstractmethod - def supports_white_balance(self) -> bool: - """ - Check if camera supports white balance - - Returns: - True if white balance is supported, False otherwise - """ - pass - - @abstractmethod - def get_white_balance_range(self) -> Tuple[Tuple[int, int], Tuple[int, int]]: - """ - Get white balance range - - Returns: - Tuple of ((temp_min, temp_max), (tint_min, tint_max)) - """ - pass - - @abstractmethod - def get_white_balance(self) -> Tuple[int, int]: - """ - Get current white balance - - Returns: - Tuple of (temperature, tint) - """ + def stop_capture(self): + """Stop capturing frames""" pass @abstractmethod - def set_white_balance(self, temperature: int, tint: int) -> bool: + def pull_image(self, buffer: bytes, bits_per_pixel: int = 24, timeout_ms: int = 1000) -> bool: """ - Set white balance + Pull the latest image into provided buffer Args: - temperature: Color temperature value - tint: Tint value + buffer: Pre-allocated buffer to receive image data + bits_per_pixel: Bits per pixel (typically 24 for RGB) + timeout_ms: Timeout in milliseconds Returns: True if successful, False otherwise @@ -322,34 +343,50 @@ def set_white_balance(self, temperature: int, tint: int) -> bool: pass @abstractmethod - def auto_white_balance(self) -> bool: + def snap_image(self, resolution_index: int = 0) -> bool: """ - Perform one-time auto white balance + Capture a still image at specified resolution + Args: + resolution_index: Index of resolution to use + Returns: True if successful, False otherwise """ pass @abstractmethod - def get_frame_rate(self) -> Tuple[int, int, int]: + def get_camera_metadata(self) -> dict[str, Any]: """ - Get current frame rate information + Get camera metadata for image saving. - Returns: - Tuple of (frames_in_period, time_period_ms, total_frames) - """ - pass - - @abstractmethod - def get_camera_metadata(self) -> Dict[str, Any]: - """ - Get current camera settings as metadata + This method retrieves current camera settings and information + to be embedded in saved images. Returns: - Dictionary of camera settings (exposure, gain, white balance, etc.) + Dictionary containing camera metadata including: + - model: Camera model name + - All other camera settings from the settings object """ - pass + metadata = { + "model": self.model, + } + + # Get all dataclass fields as a dictionary + settings_dict = asdict(self._settings) + + # Remove internal fields and complex types that don't serialize well + settings_dict.pop("version", None) + + # Convert NamedTuples to dicts for better serialization + for key, value in settings_dict.items(): + if hasattr(value, "_asdict"): + settings_dict[key] = value._asdict() + + # Merge with metadata + metadata.update(settings_dict) + + return metadata @abstractmethod def supports_still_capture(self) -> bool: @@ -361,44 +398,26 @@ def supports_still_capture(self) -> bool: """ pass - @abstractmethod - def get_still_resolutions(self) -> list[CameraResolution]: - """ - Get available still image resolutions - - Returns: - List of available still resolutions - """ - pass - @abstractmethod def capture_and_save_still( self, filepath: Path, resolution_index: int = 0, - additional_metadata: Optional[Dict[str, Any]] = None, + additional_metadata: dict[str, Any] | None = None, timeout_ms: int = 5000 ) -> bool: """ - Capture a still image and save it to disk with metadata. - - This is a convenience method that handles the complete workflow: - 1. Triggers still image capture - 2. Waits for image to be ready - 3. Pulls image data - 4. Saves with metadata + Capture a still image and save it with metadata. Args: filepath: Path where image should be saved - resolution_index: Resolution index for still capture (0 = highest) - additional_metadata: Optional dictionary of additional metadata to save - timeout_ms: Timeout in milliseconds to wait for capture + resolution_index: Camera resolution to use (0 = highest) + additional_metadata: Optional dict of extra metadata to save + timeout_ms: Timeout for capture in milliseconds Returns: True if successful, False otherwise - Note: - Only works if supports_still_capture() returns True """ pass @@ -406,25 +425,18 @@ def capture_and_save_still( def capture_and_save_stream( self, filepath: Path, - additional_metadata: Optional[Dict[str, Any]] = None + additional_metadata: dict[str, Any] | None = None ) -> bool: """ - Capture current frame from live stream and save it to disk with metadata. - - This is a convenience method that handles the complete workflow: - 1. Pulls current frame from live stream - 2. Converts to numpy array - 3. Saves with metadata + Capture current stream frame and save it with metadata. Args: filepath: Path where image should be saved - additional_metadata: Optional dictionary of additional metadata to save + additional_metadata: Optional dict of extra metadata to save Returns: True if successful, False otherwise - Note: - Camera must be in capture mode (start_capture() must have been called) """ pass @@ -440,12 +452,10 @@ def save_image( self, image_data: np.ndarray, filepath: Path, - additional_metadata: Optional[Dict[str, Any]] = None + additional_metadata: dict[str, Any] | None = None ) -> bool: """ - Save image to disk with camera and optional additional metadata. - - Supports formats: TIFF, TIF, JPG, JPEG, PNG + Save image data with embedded metadata. Args: image_data: Image as numpy array (height, width, channels) or (height, width) @@ -456,15 +466,12 @@ def save_image( True if successful, False otherwise Note: - - TIFF/TIF: Metadata saved in TIFF tags and as JSON in ImageDescription + - TIFF/TIF: Metadata saved in TIFF tags and as JSON in UserComment - JPG/JPEG: Metadata saved in EXIF UserComment as JSON - PNG: Metadata saved in PNG text chunks """ pil_image = None try: - from logger import get_logger - logger = get_logger() - # Ensure filepath is a Path object filepath = Path(filepath) @@ -496,7 +503,7 @@ def save_image( elif image_data.shape[2] == 4: pil_image = Image.fromarray(image_data, mode='RGBA') else: - logger.error(f"Unsupported image shape: {image_data.shape}") + error(f"Unsupported image shape: {image_data.shape}") return False # Get file extension @@ -504,25 +511,20 @@ def save_image( # Save with format-specific metadata if ext in ['.tif', '.tiff']: - self._save_tiff_with_metadata(pil_image, filepath, full_metadata, logger) + self._save_tiff_with_metadata(pil_image, filepath, full_metadata) elif ext in ['.jpg', '.jpeg']: - self._save_jpeg_with_metadata(pil_image, filepath, full_metadata, logger) + self._save_jpeg_with_metadata(pil_image, filepath, full_metadata) elif ext == '.png': - self._save_png_with_metadata(pil_image, filepath, full_metadata, logger) + self._save_png_with_metadata(pil_image, filepath, full_metadata) else: - logger.error(f"Unsupported file format: {ext}") + error(f"Unsupported file format: {ext}") return False - logger.info(f"Image saved successfully: {filepath}") + debug(f"Image saved successfully: {filepath}") return True except Exception as e: - try: - from logger import get_logger - logger = get_logger() - logger.exception(f"Failed to save image to {filepath}") - except: - print(f"Failed to save image to {filepath}: {e}") + exception(f"Failed to save image to {filepath}") return False finally: # Explicitly close and delete PIL image to free memory @@ -534,18 +536,18 @@ def _save_tiff_with_metadata( self, pil_image: Image.Image, filepath: Path, - metadata: Dict[str, Any], - logger + metadata: dict[str, Any] ): - """Save TIFF with metadata in EXIF tags and ImageDescription""" + """Save TIFF with metadata in EXIF tags and UserComment""" # Get tag mappings from Base enum base_tags = {tag.name: tag.value for tag in ExifTags.Base} # Create Exif object exif = Exif() - # Add software information - placeholder for version - exif[base_tags['Software']] = "Forge - v{VERSION_PLACEHOLDER}" + # Add software information + from app_context import get_app_context + exif[base_tags['Software']] = f"Forge - v{get_app_context().settings.version}" # Add timestamp timestamp = metadata.get("timestamp", datetime.now().isoformat()) @@ -554,29 +556,28 @@ def _save_tiff_with_metadata( # Add camera metadata if available camera_meta = metadata.get("camera", {}) - # Camera Make and Model + # Camera Model if "model" in camera_meta: exif[base_tags['Model']] = str(camera_meta["model"]) # Get the EXIF IFD to add camera-specific tags exif_ifd = exif.get_ifd(ExifTags.IFD.Exif) - # Exposure time (tag ExposureTime) + # Exposure time if "exposure_time_us" in camera_meta: exposure_sec = camera_meta["exposure_time_us"] / 1_000_000 - # Store as rational (numerator, denominator) exif_ifd[base_tags['ExposureTime']] = (int(exposure_sec * 1_000_000), 1_000_000) - # ISO Speed (tag ISOSpeedRatings) + # ISO Speed (using gain as proxy) if "gain_percent" in camera_meta: iso_value = camera_meta["gain_percent"] exif_ifd[base_tags['ISOSpeedRatings']] = iso_value - # Add timestamp to EXIF IFD as well + # Add timestamp to EXIF IFD exif_ifd[base_tags['DateTimeOriginal']] = datetime.fromisoformat(timestamp).strftime("%Y:%m:%d %H:%M:%S") exif_ifd[base_tags['DateTimeDigitized']] = datetime.fromisoformat(timestamp).strftime("%Y:%m:%d %H:%M:%S") - # Image description from user-provided metadata only + # Image description from user metadata additional_meta = metadata.get("additional", {}) description_parts = [] @@ -585,43 +586,41 @@ def _save_tiff_with_metadata( if "sample_id" in additional_meta: description_parts.append(f"Sample: {additional_meta['sample_id']}") - # Only set ImageDescription if user provided a description if description_parts: exif[base_tags['ImageDescription']] = " | ".join(description_parts) - # Store complete metadata as JSON in UserComment instead + # Store complete metadata as JSON in UserComment metadata_json = json.dumps(metadata, indent=2) exif_ifd[base_tags['UserComment']] = metadata_json.encode('utf-16') # Save with EXIF pil_image.save(filepath, format='TIFF', exif=exif, compression='tiff_deflate') - logger.debug(f"TIFF with EXIF metadata saved to {filepath}") + debug(f"TIFF with EXIF metadata saved to {filepath}") def _save_jpeg_with_metadata( self, pil_image: Image.Image, filepath: Path, - metadata: Dict[str, Any], - logger + metadata: dict[str, Any] ): - """Save JPEG with metadata in proper EXIF tags""" + """Save JPEG with metadata in EXIF tags""" # Get tag mappings from Base enum base_tags = {tag.name: tag.value for tag in ExifTags.Base} # Create Exif object exif = Exif() - # Add software information - placeholder for version - exif[base_tags['Software']] = "Forge - v{VERSION_PLACEHOLDER}" + # Add software information + from app_context import get_app_context + exif[base_tags['Software']] = f"Forge - v{get_app_context().settings.version}" # Add timestamp timestamp = metadata.get("timestamp", datetime.now().isoformat()) exif[base_tags['DateTime']] = datetime.fromisoformat(timestamp).strftime("%Y:%m:%d %H:%M:%S") - # Add camera metadata if available + # Add camera metadata camera_meta = metadata.get("camera", {}) - # Camera Make and Model if "model" in camera_meta: exif[base_tags['Model']] = str(camera_meta["model"]) @@ -632,18 +631,16 @@ def _save_jpeg_with_metadata( elif "sample_id" in additional_meta: exif[base_tags['ImageDescription']] = f"Sample: {additional_meta['sample_id']}" - # Get the EXIF IFD to add camera-specific tags + # Get the EXIF IFD exif_ifd = exif.get_ifd(ExifTags.IFD.Exif) # Exposure time if "exposure_time_us" in camera_meta: exposure_sec = camera_meta["exposure_time_us"] / 1_000_000 - # Store as rational (numerator, denominator) exif_ifd[base_tags['ExposureTime']] = (int(exposure_sec * 1_000_000), 1_000_000) # ISO Speed if "gain_percent" in camera_meta: - # Map gain percent to ISO-like value iso_value = camera_meta["gain_percent"] exif_ifd[base_tags['ISOSpeedRatings']] = iso_value @@ -651,36 +648,35 @@ def _save_jpeg_with_metadata( exif_ifd[base_tags['DateTimeOriginal']] = datetime.fromisoformat(timestamp).strftime("%Y:%m:%d %H:%M:%S") exif_ifd[base_tags['DateTimeDigitized']] = datetime.fromisoformat(timestamp).strftime("%Y:%m:%d %H:%M:%S") - # Store complete metadata as JSON in UserComment for full data preservation + # Store complete metadata as JSON in UserComment metadata_json = json.dumps(metadata, indent=2) exif_ifd[base_tags['UserComment']] = metadata_json.encode('utf-16') # Save with EXIF pil_image.save(filepath, format='JPEG', exif=exif, quality=95) - logger.debug(f"JPEG with EXIF metadata saved to {filepath}") + debug(f"JPEG with EXIF metadata saved to {filepath}") def _save_png_with_metadata( self, pil_image: Image.Image, filepath: Path, - metadata: Dict[str, Any], - logger + metadata: dict[str, Any] ): """Save PNG with metadata in text chunks""" - from PIL import PngImagePlugin # Create PNG info pnginfo = PngImagePlugin.PngInfo() - # Add metadata as text chunks - pnginfo.add_text("Software", "Forge - v{VERSION_PLACEHOLDER}") + # Add software info + from app_context import get_app_context + pnginfo.add_text("Software", f"Forge - v{get_app_context().settings.version}") pnginfo.add_text("Metadata", json.dumps(metadata, indent=2)) - # Add individual camera settings as separate chunks for easier access + # Add individual camera settings as separate chunks camera_meta = metadata.get("camera", {}) for key, value in camera_meta.items(): pnginfo.add_text(f"Camera.{key}", str(value)) # Save with metadata pil_image.save(filepath, format='PNG', pnginfo=pnginfo) - logger.debug(f"PNG metadata saved to {filepath}") + debug(f"PNG metadata saved to {filepath}") \ No newline at end of file diff --git a/camera/settings/amscope_settings.py b/camera/settings/amscope_settings.py new file mode 100644 index 0000000..eafbd55 --- /dev/null +++ b/camera/settings/amscope_settings.py @@ -0,0 +1,737 @@ +""" +AmScope camera settings implementation. + +Provides settings management for AmScope cameras with hardware-specific +controls like fan, TEC, low noise mode, and demosaic settings. +""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from typing import TYPE_CHECKING + +from camera.settings.camera_settings import ( + CameraSettings, + SettingMetadata, + SettingType, + RGBALevel, + RGBGain, + FileFormat, +) +from logger import info, debug, error, exception + +if TYPE_CHECKING: + from base_camera import BaseCamera, CameraResolution + + +@dataclass +class AmscopeSettings(CameraSettings): + """ + Settings for AmScope cameras. + + Extends base CameraSettings with AmScope-specific hardware controls: + - Fan control for cooling + - TEC (Thermoelectric Cooler) control and target temperature + - Low noise mode and high full-well capacity + - Test pattern for diagnostics + - Demosaic algorithm selection + """ + + # AmScope-specific hardware controls + fan_enabled: bool = field(default=False) + tec_enabled: bool = field(default=False) + tec_target: int = field(default=-10) # Target temperature in Celsius + low_noise_mode: bool = field(default=False) + high_fullwell: bool = field(default=False) + test_pattern: bool = field(default=False) + demosaic_algorithm: int = field(default=0) # 0=RGGB, 1=BGGR, 2=GRBG, 3=GBRG + + # Internal camera reference (not serialized to YAML) + _camera: BaseCamera | None = field(default=None, init=False, repr=False, compare=False) + + @classmethod + def get_metadata(cls) -> list[SettingMetadata]: + """ + Get metadata for all AmScope settings. + + This is the SINGLE SOURCE OF TRUTH for: + - GUI generation (widget types, labels, groups) + - Validation (min/max ranges) + - Organization (grouping related settings) + + Returns: + List of SettingMetadata for all settings + """ + return [ + # Exposure settings + SettingMetadata( + name="auto_expo", + display_name="Auto Exposure", + setting_type=SettingType.BOOL, + description="Enable automatic exposure control", + group="Exposure", + runtime_changeable=True, + ), + SettingMetadata( + name="exposure", + display_name="Exposure Target", + setting_type=SettingType.RANGE, + description="Target brightness for auto exposure (16-235)", + min_value=16, + max_value=235, + group="Exposure", + runtime_changeable=True, + ), + SettingMetadata( + name="exposure_time_us", + display_name="Exposure Time (µs)", + setting_type=SettingType.RANGE, + description="Manual exposure time in microseconds", + min_value=1, + max_value=1000000, + group="Exposure", + runtime_changeable=True, + ), + SettingMetadata( + name="gain_percent", + display_name="Gain (%)", + setting_type=SettingType.RANGE, + description="Sensor gain in percent (100-1600)", + min_value=100, + max_value=1600, + group="Exposure", + runtime_changeable=True, + ), + + # White balance settings + SettingMetadata( + name="temp", + display_name="Color Temperature", + setting_type=SettingType.RANGE, + description="White balance temperature in Kelvin (2000-15000)", + min_value=2000, + max_value=15000, + group="White Balance", + runtime_changeable=True, + ), + SettingMetadata( + name="tint", + display_name="Tint", + setting_type=SettingType.RANGE, + description="White balance tint adjustment (200-2500)", + min_value=200, + max_value=2500, + group="White Balance", + runtime_changeable=True, + ), + SettingMetadata( + name="wbgain", + display_name="RGB Gain", + setting_type=SettingType.RGB_GAIN, + description="Fine-tune RGB white balance gains (-127 to 127)", + group="White Balance", + runtime_changeable=True, + ), + + # Color and image quality + SettingMetadata( + name="hue", + display_name="Hue", + setting_type=SettingType.RANGE, + description="Color hue adjustment (-180 to 180)", + min_value=-180, + max_value=180, + group="Color", + runtime_changeable=True, + ), + SettingMetadata( + name="saturation", + display_name="Saturation", + setting_type=SettingType.RANGE, + description="Color saturation (0-255)", + min_value=0, + max_value=255, + group="Color", + runtime_changeable=True, + ), + SettingMetadata( + name="brightness", + display_name="Brightness", + setting_type=SettingType.RANGE, + description="Image brightness adjustment (-64 to 64)", + min_value=-64, + max_value=64, + group="Color", + runtime_changeable=True, + ), + SettingMetadata( + name="contrast", + display_name="Contrast", + setting_type=SettingType.RANGE, + description="Image contrast adjustment (-100 to 100)", + min_value=-100, + max_value=100, + group="Color", + runtime_changeable=True, + ), + SettingMetadata( + name="gamma", + display_name="Gamma", + setting_type=SettingType.RANGE, + description="Gamma correction (0-180)", + min_value=0, + max_value=180, + group="Color", + runtime_changeable=True, + ), + + # Level range + SettingMetadata( + name="levelrange_low", + display_name="Black Point", + setting_type=SettingType.RGBA_LEVEL, + description="Output level for darkest input values (0-255)", + group="Levels", + runtime_changeable=True, + ), + SettingMetadata( + name="levelrange_high", + display_name="White Point", + setting_type=SettingType.RGBA_LEVEL, + description="Output level for brightest input values (0-255)", + group="Levels", + runtime_changeable=True, + ), + + # Resolution + SettingMetadata( + name="resolution_index", + display_name="Resolution", + setting_type=SettingType.RANGE, + description="Camera resolution index", + min_value=0, + max_value=10, # Will be validated against actual camera resolutions + group="Capture", + runtime_changeable=False, # Requires restart + ), + + # File format + SettingMetadata( + name="fformat", + display_name="File Format", + setting_type=SettingType.DROPDOWN, + description="Default file format for saved images", + choices=["png", "tiff", "jpeg", "bmp"], + group="Capture", + runtime_changeable=True, + ), + + # AmScope-specific hardware controls + SettingMetadata( + name="fan_enabled", + display_name="Cooling Fan", + setting_type=SettingType.BOOL, + description="Enable camera cooling fan", + group="Hardware", + runtime_changeable=True, + ), + SettingMetadata( + name="tec_enabled", + display_name="TEC Cooler", + setting_type=SettingType.BOOL, + description="Enable thermoelectric cooler", + group="Hardware", + runtime_changeable=True, + ), + SettingMetadata( + name="tec_target", + display_name="TEC Target (°C)", + setting_type=SettingType.RANGE, + description="Target temperature for TEC in Celsius (-40 to 20)", + min_value=-40, + max_value=20, + group="Hardware", + runtime_changeable=True, + ), + SettingMetadata( + name="low_noise_mode", + display_name="Low Noise Mode", + setting_type=SettingType.BOOL, + description="Enable low noise mode (reduces read noise)", + group="Hardware", + runtime_changeable=True, + ), + SettingMetadata( + name="high_fullwell", + display_name="High Full-Well", + setting_type=SettingType.BOOL, + description="Enable high full-well capacity mode", + group="Hardware", + runtime_changeable=True, + ), + SettingMetadata( + name="test_pattern", + display_name="Test Pattern", + setting_type=SettingType.BOOL, + description="Enable test pattern for diagnostics", + group="Advanced", + runtime_changeable=True, + ), + SettingMetadata( + name="demosaic_algorithm", + display_name="Demosaic Algorithm", + setting_type=SettingType.RANGE, + description="Bayer demosaic algorithm (0=RGGB, 1=BGGR, 2=GRBG, 3=GBRG)", + min_value=0, + max_value=3, + group="Advanced", + runtime_changeable=True, + ), + ] + + def _validate_and_set(self, param_name: str, value: int | bool) -> None: + """ + Validate a parameter value against metadata ranges. + + Args: + param_name: Name of the parameter + value: Value to validate + + Raises: + ValueError: If value is outside valid range + """ + metadata_dict = {m.name: m for m in self.get_metadata()} + + if param_name not in metadata_dict: + raise ValueError(f"Unknown parameter: {param_name}") + + meta = metadata_dict[param_name] + + # Validate based on type + if meta.setting_type == SettingType.RANGE: + if not isinstance(value, (int, float)): + raise ValueError(f"{param_name} must be numeric") + if not (meta.min_value <= value <= meta.max_value): + raise ValueError( + f"{param_name} = {value} is outside valid range " + f"[{meta.min_value}, {meta.max_value}]" + ) + elif meta.setting_type == SettingType.BOOL: + if not isinstance(value, bool): + raise ValueError(f"{param_name} must be boolean") + + def _apply_to_sdk(self, param_name: str, value) -> None: + """ + Apply a setting to the camera SDK. + + Args: + param_name: Parameter name + value: Value to apply + """ + if self._camera is None or not hasattr(self._camera, '_hcam'): + debug(f"Camera not available, skipping SDK update for {param_name}") + return + + hcam = self._camera._hcam + if hcam is None: + debug(f"Camera handle not available, skipping SDK update for {param_name}") + return + + try: + # Map parameter names to SDK calls + if param_name == "auto_expo": + hcam.put_AutoExpoEnable(1 if value else 0) + elif param_name == "exposure": + hcam.put_AutoExpoTarget(value) + elif param_name == "exposure_time_us": + hcam.put_ExpoTime(value) + elif param_name == "gain_percent": + hcam.put_ExpoAGain(value) + elif param_name == "temp" and hasattr(self, 'tint'): + hcam.put_TempTint(value, self.tint) + elif param_name == "tint" and hasattr(self, 'temp'): + hcam.put_TempTint(self.temp, value) + elif param_name == "hue": + hcam.put_Hue(value) + elif param_name == "saturation": + hcam.put_Saturation(value) + elif param_name == "brightness": + hcam.put_Brightness(value) + elif param_name == "contrast": + hcam.put_Contrast(value) + elif param_name == "gamma": + hcam.put_Gamma(value) + elif param_name == "wbgain": + hcam.put_WhiteBalanceGain([value.r, value.g, value.b]) + elif param_name in ["levelrange_low", "levelrange_high"]: + low = self.levelrange_low + high = self.levelrange_high + hcam.put_LevelRange([low.r, low.g, low.b, low.a], + [high.r, high.g, high.b, high.a]) + elif param_name == "fan_enabled": + hcam.put_Option(0x0a, 1 if value else 0) # OPTION_FAN + elif param_name == "tec_enabled": + hcam.put_Option(0x08, 1 if value else 0) # OPTION_TEC + elif param_name == "tec_target": + hcam.put_Option(0x0c, value) # OPTION_TECTARGET + elif param_name == "low_noise_mode": + hcam.put_Option(0x53, 1 if value else 0) # OPTION_LOW_NOISE + elif param_name == "high_fullwell": + hcam.put_Option(0x51, 1 if value else 0) # OPTION_HIGH_FULLWELL + elif param_name == "test_pattern": + hcam.put_Option(0x2c, 1 if value else 0) # OPTION_TESTPATTERN + elif param_name == "demosaic_algorithm": + hcam.put_Option(0x5a, value) # OPTION_DEMOSAIC + + debug(f"Applied {param_name} = {value} to camera SDK") + + except Exception as e: + error(f"Failed to apply {param_name} to camera: {e}") + + # Required abstract method implementations + + def set_auto_exposure(self, enabled: bool) -> None: + """Enable or disable automatic exposure.""" + self._validate_and_set("auto_expo", enabled) + self.auto_expo = enabled + self._apply_to_sdk("auto_expo", enabled) + + def set_exposure(self, value: int) -> None: + """Set auto exposure target value.""" + self._validate_and_set("exposure", value) + self.exposure = value + self._apply_to_sdk("exposure", value) + + def set_temp(self, value: int) -> None: + """Set white balance temperature.""" + self._validate_and_set("temp", value) + self.temp = value + self._apply_to_sdk("temp", value) + + def set_tint(self, value: int) -> None: + """Set white balance tint.""" + self._validate_and_set("tint", value) + self.tint = value + self._apply_to_sdk("tint", value) + + def set_white_balance_gain(self, gain: RGBGain) -> None: + """Set RGB white balance gains.""" + gain.validate() + self.wbgain = gain + self._apply_to_sdk("wbgain", gain) + + def set_hue(self, value: int) -> None: + """Set hue adjustment.""" + self._validate_and_set("hue", value) + self.hue = value + self._apply_to_sdk("hue", value) + + def set_saturation(self, value: int) -> None: + """Set saturation.""" + self._validate_and_set("saturation", value) + self.saturation = value + self._apply_to_sdk("saturation", value) + + def set_brightness(self, value: int) -> None: + """Set brightness.""" + self._validate_and_set("brightness", value) + self.brightness = value + self._apply_to_sdk("brightness", value) + + def set_contrast(self, value: int) -> None: + """Set contrast.""" + self._validate_and_set("contrast", value) + self.contrast = value + self._apply_to_sdk("contrast", value) + + def set_gamma(self, value: int) -> None: + """Set gamma correction.""" + self._validate_and_set("gamma", value) + self.gamma = value + self._apply_to_sdk("gamma", value) + + def set_level_range(self, low: RGBALevel, high: RGBALevel) -> None: + """Set level range mapping.""" + low.validate() + high.validate() + self.levelrange_low = low + self.levelrange_high = high + self._apply_to_sdk("levelrange_low", low) + + # Resolution methods + + def get_resolutions(self) -> list[CameraResolution]: + """Get available camera resolutions.""" + if self._camera is None or not hasattr(self._camera, '_hcam'): + return [] + + from base_camera import CameraResolution + + try: + resolutions = [] + hcam = self._camera._hcam + + # AmScope cameras typically have multiple resolutions + count = hcam.ResolutionNumber + for i in range(count): + width, height = hcam.get_Resolution(i) + resolutions.append(CameraResolution(width=width, height=height)) + + return resolutions + except Exception as e: + error(f"Failed to get resolutions: {e}") + return [] + + def get_current_resolution(self) -> tuple[int, int, int]: + """Get current resolution as (index, width, height).""" + if self._camera is None or not hasattr(self._camera, '_hcam'): + return (0, 0, 0) + + try: + hcam = self._camera._hcam + index = hcam.get_eSize() + width, height = hcam.get_Size() + return (index, width, height) + except Exception as e: + error(f"Failed to get current resolution: {e}") + return (0, 0, 0) + + def set_resolution(self, resolution_index: int) -> bool: + """Set camera resolution.""" + if self._camera is None or not hasattr(self._camera, '_hcam'): + return False + + try: + hcam = self._camera._hcam + + # Validate index + if not (0 <= resolution_index < hcam.ResolutionNumber): + error(f"Invalid resolution index: {resolution_index}") + return False + + # Apply resolution + hcam.put_eSize(resolution_index) + self.resolution_index = resolution_index + info(f"Resolution set to index {resolution_index}") + return True + + except Exception as e: + error(f"Failed to set resolution: {e}") + return False + + def get_still_resolutions(self) -> list[CameraResolution]: + """Get available still image resolutions.""" + if self._camera is None or not hasattr(self._camera, '_hcam'): + return [] + + from base_camera import CameraResolution + + try: + resolutions = [] + hcam = self._camera._hcam + + count = hcam.StillResolutionNumber + for i in range(count): + width, height = hcam.get_StillResolution(i) + resolutions.append(CameraResolution(width=width, height=height)) + + return resolutions + except Exception as e: + error(f"Failed to get still resolutions: {e}") + return [] + + # Exposure time methods + + def get_exposure_time(self) -> int: + """Get current exposure time in microseconds.""" + if self._camera is None or not hasattr(self._camera, '_hcam'): + return self.exposure_time_us + + try: + return self._camera._hcam.get_ExpoTime() + except Exception as e: + error(f"Failed to get exposure time: {e}") + return self.exposure_time_us + + def set_exposure_time(self, time_us: int) -> bool: + """Set exposure time in microseconds.""" + self._validate_and_set("exposure_time_us", time_us) + self.exposure_time_us = time_us + self._apply_to_sdk("exposure_time_us", time_us) + return True + + # Gain methods + + def get_gain(self) -> int: + """Get current gain in percent.""" + if self._camera is None or not hasattr(self._camera, '_hcam'): + return self.gain_percent + + try: + return self._camera._hcam.get_ExpoAGain() + except Exception as e: + error(f"Failed to get gain: {e}") + return self.gain_percent + + def set_gain(self, gain_percent: int) -> bool: + """Set gain in percent.""" + self._validate_and_set("gain_percent", gain_percent) + self.gain_percent = gain_percent + self._apply_to_sdk("gain_percent", gain_percent) + return True + + # AmScope-specific hardware control methods + + def set_fan(self, enabled: bool) -> None: + """Enable or disable cooling fan.""" + self._validate_and_set("fan_enabled", enabled) + self.fan_enabled = enabled + self._apply_to_sdk("fan_enabled", enabled) + info(f"Cooling fan {'enabled' if enabled else 'disabled'}") + + def set_tec(self, enabled: bool) -> None: + """Enable or disable TEC cooler.""" + self._validate_and_set("tec_enabled", enabled) + self.tec_enabled = enabled + self._apply_to_sdk("tec_enabled", enabled) + info(f"TEC cooler {'enabled' if enabled else 'disabled'}") + + def set_tec_target(self, temperature: int) -> None: + """Set TEC target temperature in Celsius.""" + self._validate_and_set("tec_target", temperature) + self.tec_target = temperature + self._apply_to_sdk("tec_target", temperature) + info(f"TEC target temperature set to {temperature}°C") + + def set_low_noise_mode(self, enabled: bool) -> None: + """Enable or disable low noise mode.""" + self._validate_and_set("low_noise_mode", enabled) + self.low_noise_mode = enabled + self._apply_to_sdk("low_noise_mode", enabled) + info(f"Low noise mode {'enabled' if enabled else 'disabled'}") + + def set_high_fullwell(self, enabled: bool) -> None: + """Enable or disable high full-well capacity mode.""" + self._validate_and_set("high_fullwell", enabled) + self.high_fullwell = enabled + self._apply_to_sdk("high_fullwell", enabled) + info(f"High full-well mode {'enabled' if enabled else 'disabled'}") + + def set_test_pattern(self, enabled: bool) -> None: + """Enable or disable test pattern.""" + self._validate_and_set("test_pattern", enabled) + self.test_pattern = enabled + self._apply_to_sdk("test_pattern", enabled) + info(f"Test pattern {'enabled' if enabled else 'disabled'}") + + def set_demosaic_algorithm(self, algorithm: int) -> None: + """Set demosaic algorithm (0=RGGB, 1=BGGR, 2=GRBG, 3=GBRG).""" + self._validate_and_set("demosaic_algorithm", algorithm) + self.demosaic_algorithm = algorithm + self._apply_to_sdk("demosaic_algorithm", algorithm) + info(f"Demosaic algorithm set to {algorithm}") + + # Apply and refresh methods + + def apply_to_camera(self, camera: BaseCamera) -> None: + """ + Apply all settings to camera hardware. + + Args: + camera: Camera instance to apply settings to + """ + self._camera = camera + info(f"Applying all settings to camera {camera.model}") + + try: + # Apply all settings in logical order + self.set_auto_exposure(self.auto_expo) + self.set_exposure(self.exposure) + self.set_exposure_time(self.exposure_time_us) + self.set_gain(self.gain_percent) + + self.set_temp(self.temp) + self.set_tint(self.tint) + self.set_white_balance_gain(self.wbgain) + + self.set_hue(self.hue) + self.set_saturation(self.saturation) + self.set_brightness(self.brightness) + self.set_contrast(self.contrast) + self.set_gamma(self.gamma) + + self.set_level_range(self.levelrange_low, self.levelrange_high) + + # AmScope-specific hardware controls + self.set_fan(self.fan_enabled) + self.set_tec(self.tec_enabled) + self.set_tec_target(self.tec_target) + self.set_low_noise_mode(self.low_noise_mode) + self.set_high_fullwell(self.high_fullwell) + self.set_test_pattern(self.test_pattern) + self.set_demosaic_algorithm(self.demosaic_algorithm) + + info("All settings applied successfully") + + except Exception as e: + exception(f"Failed to apply settings to camera") + + def refresh_from_camera(self, camera: BaseCamera) -> None: + """ + Read all current settings from camera hardware. + + Args: + camera: Camera instance to read from + """ + self._camera = camera + info(f"Refreshing settings from camera {camera.model}") + + if not hasattr(camera, '_hcam') or camera._hcam is None: + error("Camera not available for refresh") + return + + hcam = camera._hcam + + try: + # Read exposure settings + self.auto_expo = bool(hcam.get_AutoExpoEnable()) + self.exposure = hcam.get_AutoExpoTarget() + self.exposure_time_us = hcam.get_ExpoTime() + self.gain_percent = hcam.get_ExpoAGain() + + # Read white balance + temp, tint = hcam.get_TempTint() + self.temp = temp + self.tint = tint + + wb_gains = hcam.get_WhiteBalanceGain() + self.wbgain = RGBGain(r=wb_gains[0], g=wb_gains[1], b=wb_gains[2]) + + # Read color adjustments + self.hue = hcam.get_Hue() + self.saturation = hcam.get_Saturation() + self.brightness = hcam.get_Brightness() + self.contrast = hcam.get_Contrast() + self.gamma = hcam.get_Gamma() + + # Read level range + low, high = hcam.get_LevelRange() + self.levelrange_low = RGBALevel(r=low[0], g=low[1], b=low[2], a=low[3]) + self.levelrange_high = RGBALevel(r=high[0], g=high[1], b=high[2], a=high[3]) + + # Read resolution + self.resolution_index = hcam.get_eSize() + + # Read AmScope-specific hardware settings + self.fan_enabled = bool(hcam.get_Option(0x0a)) # OPTION_FAN + self.tec_enabled = bool(hcam.get_Option(0x08)) # OPTION_TEC + self.tec_target = hcam.get_Option(0x0c) # OPTION_TECTARGET + self.low_noise_mode = bool(hcam.get_Option(0x53)) # OPTION_LOW_NOISE + self.high_fullwell = bool(hcam.get_Option(0x51)) # OPTION_HIGH_FULLWELL + self.test_pattern = bool(hcam.get_Option(0x2c)) # OPTION_TESTPATTERN + self.demosaic_algorithm = hcam.get_Option(0x5a) # OPTION_DEMOSAIC + + info("Successfully refreshed all settings from camera") + + except Exception as e: + exception(f"Failed to refresh settings from camera") \ No newline at end of file diff --git a/camera/settings/camera_settings.py b/camera/settings/camera_settings.py new file mode 100644 index 0000000..0275572 --- /dev/null +++ b/camera/settings/camera_settings.py @@ -0,0 +1,741 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod + +from dataclasses import dataclass, field +from enum import Enum +from typing import NamedTuple, Union, TYPE_CHECKING +from pathlib import Path + +from generic_config import ConfigManager +from logger import info, debug, exception + +if TYPE_CHECKING: + from camera.cameras.base_camera import BaseCamera, CameraResolution + + +# ------------------------- +# Enums for type safety +# ------------------------- + +class FileFormat(str, Enum): + """Supported image file formats.""" + PNG = 'png' + TIFF = 'tiff' + JPEG = 'jpeg' + BMP = 'bmp' + + +# ------------------------- +# Type-safe tuples +# ------------------------- +class RGBALevel(NamedTuple): + """RGBA level range values (0-255 each).""" + r: int + g: int + b: int + a: int + + def validate(self) -> None: + """Ensure all values are in valid range.""" + for name, value in [('r', self.r), ('g', self.g), ('b', self.b), ('a', self.a)]: + if not (0 <= value <= 255): + raise ValueError(f"RGBALevel.{name} must be in range [0, 255], got {value}") + + +class RGBGain(NamedTuple): + """RGB white balance gain values (-127 to 127 each).""" + r: int + g: int + b: int + + def validate(self) -> None: + """Ensure all values are in valid range.""" + for name, value in [('r', self.r), ('g', self.g), ('b', self.b)]: + if not (-127 <= value <= 127): + raise ValueError(f"RGBGain.{name} must be in range [-127, 127], got {value}") + + +# ------------------------- +# GUI Metadata System +# ------------------------- +class SettingType(str, Enum): + """Types of settings for GUI rendering.""" + BOOL = "bool" # Checkbox + RANGE = "range" # Slider with min/max + DROPDOWN = "dropdown" # Combo box with choices + RGBA_LEVEL = "rgba_level" # Custom RGBA widget + RGB_GAIN = "rgb_gain" # Custom RGB widget + + +@dataclass +class SettingMetadata: + """ + Metadata describing a setting for GUI generation. + + This allows the GUI to automatically create appropriate controls + for each setting without hardcoding knowledge of the settings. + """ + name: str # Parameter name (e.g., "exposure") + display_name: str # Human-readable name (e.g., "Exposure") + setting_type: SettingType # Type of control to render + description: str = "" # Tooltip/help text + + # For RANGE type + min_value: int | None = None + max_value: int | None = None + + # For DROPDOWN type + choices: list[str] | None = None + + # Grouping for organized GUI + group: str = "General" + + # Whether this setting can be changed while camera is running + runtime_changeable: bool = True + + +# ------------------------- +# Settings dataclass +# ------------------------- +@dataclass +class CameraSettings(ABC): + """ + Abstract base camera settings class with validation and hardware manipulation. + + This is an abstract base class that MUST be subclassed for each camera type. + Subclasses must implement all abstract methods and provide camera-specific configuration. + + Architecture: + - CameraSettings owns settings storage, validation, and hardware access + - For cameras with SDKs (like AmScope): subclass accesses camera._sdk directly + - For cameras without SDKs (like USB): subclass implements direct hardware access + - BaseCamera provides camera operations, SDK loading, and settings management + - CameraSettingsManager handles loading/saving settings from YAML files + + Responsibilities of CameraSettings: + 1. Storage of settings values (dataclass fields) + 2. Validation of settings (using metadata from get_metadata()) + 3. High-level API (set_* methods with validation) + 4. Low-level hardware access (directly to SDK/hardware via abstract methods) + 5. Applying settings to camera hardware + 6. Reading settings from camera hardware + 7. Providing metadata for GUI generation and validation (single source of truth) + + Requirements for Subclasses: + + 1. Implement get_metadata() class method (SINGLE SOURCE OF TRUTH): + - Return SettingMetadata list for GUI generation AND validation + - Include ranges, descriptions, groups, types for ALL settings + - This replaces the old get_ranges() method - no duplication needed + - Example: + @classmethod + def get_metadata(cls) -> list[SettingMetadata]: + return [ + SettingMetadata( + name="exposure", + display_name="Exposure Target", + setting_type=SettingType.RANGE, + description="Target brightness for auto exposure", + min_value=16, + max_value=220, + group="Exposure", + ), + SettingMetadata( + name="temp", + display_name="Color Temperature", + setting_type=SettingType.RANGE, + description="White balance temperature in Kelvin", + min_value=2000, + max_value=15000, + group="White Balance", + ), + # ... all other settings + ] + + 2. Implement all abstract setter methods (set_exposure, set_temp, etc.): + - Get validation ranges from get_metadata() + - Validate input against those ranges + - Update the corresponding dataclass field + - Access hardware directly to apply the setting + - Example: + def set_exposure(self, value: int) -> None: + # Get range from metadata + metadata = {m.name: m for m in self.get_metadata()} + meta = metadata['exposure'] + if not (meta.min_value <= value <= meta.max_value): + raise ValueError(f"exposure must be in [{meta.min_value}, {meta.max_value}]") + + self.exposure = value + if self._camera and hasattr(self._camera, '_sdk'): + self._camera._sdk.put_AutoExpoTarget(self._camera._device, value) + + 3. Implement refresh_from_camera(): + - Read all current settings from camera hardware + - Update all dataclass fields with hardware values + - Example: + def refresh_from_camera(self, camera: BaseCamera) -> None: + self._camera = camera + if hasattr(camera, '_device'): + self.auto_expo = bool(camera._sdk.get_AutoExpoEnable(camera._device)) + self.exposure = camera._sdk.get_AutoExpoTarget(camera._device) + # ... read all other settings + + 4. Implement resolution and exposure/gain methods: + - get_resolutions(), set_resolution(), get_current_resolution() + - get_exposure_time(), set_exposure_time() + - get_gain(), set_gain() + + Note on Default Values: + - Default values are loaded from YAML files by CameraSettingsManager + - Subclasses do NOT need a create_default() method + - The YAML file serves as the default configuration + + Example Complete Subclass: + class AmScopeSettings(CameraSettings): + @classmethod + def get_metadata(cls) -> list[SettingMetadata]: + return [ + SettingMetadata(name="exposure", display_name="Exposure", ...), + # ... all settings with ranges + ] + + def set_exposure(self, value: int) -> None: + # Validate using metadata, update field, access SDK + pass + + def refresh_from_camera(self, camera: BaseCamera) -> None: + # Read all settings from SDK + pass + + # ... implement all other abstract methods + """ + + # Version tracking + version: str + + # Image processing parameters (subclasses must provide defaults via factory methods) + auto_expo: bool + exposure: int # Auto Exposure Target + exposure_time_us: int # Manual exposure time in microseconds + gain_percent: int # Gain in percent + resolution_index: int # Selected resolution index + temp: int # White balance temperature + tint: int # White balance tint + contrast: int + hue: int + saturation: int + brightness: int + gamma: int + + # Complex parameters (subclasses must provide defaults via factory methods) + levelrange_low: RGBALevel + levelrange_high: RGBALevel + wbgain: RGBGain + + # Tone mapping and format (subclasses must provide defaults via factory methods) + fformat: FileFormat + + # Private - reference to camera (set by apply_to_camera) + _camera: BaseCamera | None = field(default=None, repr=False, compare=False) + + @classmethod + @abstractmethod + def get_metadata(cls) -> list[SettingMetadata]: + """ + Get metadata for all settings to enable GUI generation and validation. + + This is the SINGLE SOURCE OF TRUTH for setting information including: + - Display names and descriptions + - Valid ranges (min/max values) + - Setting types (bool, range, dropdown, etc.) + - Grouping and organization + + Subclasses MUST override this method to provide metadata specific to their camera model. + + Returns: + List of SettingMetadata objects describing each setting + + Example implementation in AmScopeSettings: + @classmethod + def get_metadata(cls) -> list[SettingMetadata]: + return [ + SettingMetadata( + name="auto_expo", + display_name="Auto Exposure", + setting_type=SettingType.BOOL, + description="Enable automatic exposure control", + group="Exposure", + ), + SettingMetadata( + name="exposure", + display_name="Exposure Target", + setting_type=SettingType.RANGE, + description="Target brightness for auto exposure", + min_value=16, + max_value=220, + group="Exposure", + ), + SettingMetadata( + name="temp", + display_name="Color Temperature", + setting_type=SettingType.RANGE, + description="White balance temperature in Kelvin", + min_value=2000, + max_value=15000, + group="White Balance", + ), + # ... all other settings + ] + """ + pass + + def validate(self) -> None: + """ + Validate all settings are within acceptable ranges. + + Uses get_metadata() as the single source of truth for valid ranges. + + Raises: + ValueError: If any parameter is outside its valid range + """ + metadata_list = self.get_metadata() + metadata_by_name = {m.name: m for m in metadata_list} + + # Validate simple numeric parameters + for name, meta in metadata_by_name.items(): + if meta.setting_type == SettingType.RANGE: + value = getattr(self, name, None) + if value is not None and meta.min_value is not None and meta.max_value is not None: + if not (meta.min_value <= value <= meta.max_value): + raise ValueError( + f"{name} = {value} is outside valid range [{meta.min_value}, {meta.max_value}]" + ) + + # Validate complex types + try: + self.levelrange_low.validate() + except ValueError as e: + raise ValueError(f"levelrange_low invalid: {e}") from e + + try: + self.levelrange_high.validate() + except ValueError as e: + raise ValueError(f"levelrange_high invalid: {e}") from e + + try: + self.wbgain.validate() + except ValueError as e: + raise ValueError(f"wbgain invalid: {e}") from e + + # Validate enum types + if not isinstance(self.fformat, FileFormat): + raise ValueError(f"fformat must be a FileFormat enum, got {type(self.fformat)}") + + def __post_init__(self) -> None: + """ + Post-initialization hook to ensure enums are converted from strings. + """ + # Convert string values to enums if needed + if isinstance(self.fformat, str): + self.fformat = FileFormat(self.fformat) + + # Convert tuples/lists to NamedTuples if needed + if isinstance(self.levelrange_low, (tuple, list)): + self.levelrange_low = RGBALevel(*self.levelrange_low) + if isinstance(self.levelrange_high, (tuple, list)): + self.levelrange_high = RGBALevel(*self.levelrange_high) + if isinstance(self.wbgain, (tuple, list)): + self.wbgain = RGBGain(*self.wbgain) + + # ------------------------- + # Camera Manipulation + # ------------------------- + + def apply_to_camera(self, camera: BaseCamera) -> None: + """ + Apply all settings to the camera hardware. + + This is the main entry point for pushing settings to the camera. + It calls individual setter methods which handle the low-level + camera API calls. + + Args: + camera: The camera instance to apply settings to + + Example: + >>> settings = manager.load() + >>> settings.apply_to_camera(camera) + """ + self._camera = camera + info(f"Applying settings to camera {camera.model}") + + try: + # Apply each setting in logical order + self.set_auto_exposure(self.auto_expo) + self.set_exposure(self.exposure) + self.set_temperature(self.temp) + self.set_tint(self.tint) + self.set_wb_gain(self.wbgain) + self.set_contrast(self.contrast) + self.set_hue(self.hue) + self.set_saturation(self.saturation) + self.set_brightness(self.brightness) + self.set_gamma(self.gamma) + self.set_level_range(self.levelrange_low, self.levelrange_high) + + info("Successfully applied all settings to camera") + + except Exception as e: + exception(f"Failed to apply settings to camera: {e}") + raise + + # Abstract setter methods - subclasses MUST implement these + # Each method should: + # 1. Validate the input value + # 2. Update the corresponding field + # 3. Access the SDK/hardware directly to apply the change + # 4. Log the change with debug() + + @abstractmethod + def set_auto_exposure(self, enabled: bool) -> None: + """ + Enable/disable auto exposure. + + Subclasses must implement to access SDK/hardware directly. + + Example: + if self._camera and hasattr(self._camera, '_sdk'): + self._camera._sdk.put_AutoExpoEnable(self._camera._device, enabled) + """ + pass + + @abstractmethod + def set_exposure(self, value: int) -> None: + """ + Set exposure target. + + Subclasses must: + 1. Validate value against ranges from get_metadata() + 2. Update field: self.exposure = value + 3. Access hardware directly + """ + pass + + @abstractmethod + def set_temperature(self, value: int) -> None: + """ + Set white balance temperature. + + Subclasses must: + 1. Validate value against ranges from get_metadata() + 2. Update field: self.temp = value + 3. Access hardware directly + """ + pass + + @abstractmethod + def set_tint(self, value: int) -> None: + """ + Set white balance tint. + + Subclasses must: + 1. Validate value against ranges from get_metadata() + 2. Update field: self.tint = value + 3. Access hardware directly + """ + pass + + @abstractmethod + def set_wb_gain(self, gain: RGBGain) -> None: + """ + Set RGB white balance gains. + + Subclasses must: + 1. Validate: gain.validate() + 2. Update field: self.wbgain = gain + 3. Access hardware directly + """ + pass + + @abstractmethod + def set_contrast(self, value: int) -> None: + """ + Set contrast. + + Subclasses must: + 1. Validate value against ranges from get_metadata() + 2. Update field: self.contrast = value + 3. Access hardware directly + """ + pass + + @abstractmethod + def set_hue(self, value: int) -> None: + """ + Set hue. + + Subclasses must: + 1. Validate value against ranges from get_metadata() + 2. Update field: self.hue = value + 3. Access hardware directly + """ + pass + + @abstractmethod + def set_saturation(self, value: int) -> None: + """ + Set saturation. + + Subclasses must: + 1. Validate value against ranges from get_metadata() + 2. Update field: self.saturation = value + 3. Access hardware directly + """ + pass + + @abstractmethod + def set_brightness(self, value: int) -> None: + """ + Set brightness. + + Subclasses must: + 1. Validate value against ranges from get_metadata() + 2. Update field: self.brightness = value + 3. Access hardware directly + """ + pass + + @abstractmethod + def set_gamma(self, value: int) -> None: + """ + Set gamma correction. + + Subclasses must: + 1. Validate value against ranges from get_metadata() + 2. Update field: self.gamma = value + 3. Access hardware directly + """ + pass + + @abstractmethod + def set_level_range(self, low: RGBALevel, high: RGBALevel) -> None: + """ + Set level range mapping. + + Subclasses must: + 1. Validate: low.validate() and high.validate() + 2. Update fields: self.levelrange_low = low, self.levelrange_high = high + 3. Access hardware directly + """ + pass + + # Resolution settings + + @abstractmethod + def get_resolutions(self) -> list['CameraResolution']: + """ + Get available camera resolutions. + + Returns: + List of available resolutions + + Example: + return [ + CameraResolution(width=2592, height=1944), + CameraResolution(width=1920, height=1080), + CameraResolution(width=1280, height=720), + ] + """ + pass + + @abstractmethod + def get_current_resolution(self) -> tuple[int, int, int]: + """ + Get current resolution. + + Returns: + Tuple of (resolution_index, width, height) + """ + pass + + @abstractmethod + def set_resolution(self, resolution_index: int) -> bool: + """ + Set camera resolution. + + Args: + resolution_index: Index of resolution to use + + Returns: + True if successful, False otherwise + + Subclasses must: + 1. Validate resolution_index is valid + 2. Update field: self.resolution_index = resolution_index + 3. Access hardware to change resolution + """ + pass + + def get_still_resolutions(self) -> list['CameraResolution']: + """ + Get available still image resolutions. + + For cameras that support separate still image capture at + different resolutions than the video stream. + + Returns: + List of available still resolutions + """ + return [] + + # Exposure time settings (manual exposure) + + @abstractmethod + def get_exposure_time(self) -> int: + """ + Get current exposure time. + + Returns: + Current exposure time in microseconds + """ + pass + + @abstractmethod + def set_exposure_time(self, time_us: int) -> bool: + """ + Set exposure time (manual exposure control). + + Args: + time_us: Exposure time in microseconds + + Returns: + True if successful, False otherwise + + Subclasses must: + 1. Validate time_us against ranges from get_metadata() + 2. Update field: self.exposure_time_us = time_us + 3. Access hardware to set exposure time + """ + pass + + # Gain settings + + @abstractmethod + def get_gain(self) -> int: + """ + Get current gain. + + Returns: + Current gain in percent + """ + pass + + @abstractmethod + def set_gain(self, gain_percent: int) -> bool: + """ + Set gain. + + Args: + gain_percent: Gain in percent + + Returns: + True if successful, False otherwise + + Subclasses must: + 1. Validate gain_percent against ranges from get_metadata() + 2. Update field: self.gain_percent = gain_percent + 3. Access hardware to set gain + """ + pass + + # Getter methods - read current values from camera + + @abstractmethod + def refresh_from_camera(self, camera: BaseCamera) -> None: + """ + Read all current settings from camera hardware. + + Subclasses MUST override this method to read settings from their SDK/hardware. + + This is useful to sync the settings object with the actual + camera state, for example after manual adjustments or after + camera initialization. + + Args: + camera: The camera instance to read from + + Example implementation in AmScopeSettings: + def refresh_from_camera(self, camera: BaseCamera) -> None: + self._camera = camera + info(f"Refreshing settings from camera {camera.model}") + + if hasattr(camera, '_device'): + self.auto_expo = bool(camera._sdk.get_AutoExpoEnable(camera._device)) + self.exposure = camera._sdk.get_AutoExpoTarget(camera._device) + temp, tint = camera._sdk.get_TempTint(camera._device) + self.temp = temp + self.tint = tint + # ... read all other settings from SDK + + info("Successfully refreshed all settings from camera") + """ + pass + + +# ------------------------- +# Specialized manager +# ------------------------- +class CameraSettingsManager(ConfigManager[CameraSettings]): + """ + Specialized configuration manager for a single camera model. + + Each camera model should have its own manager instance. + This ensures settings don't bleed between incompatible models. + + Example usage: + >>> # Create manager for MU500 + >>> manager = CameraSettingsManager(model="MU500") + >>> + >>> # Load settings and apply to camera + >>> settings = manager.load() + >>> settings.apply_to_camera(camera) + >>> + >>> # User changes settings via GUI... + >>> settings.set_exposure(150) + >>> settings.set_contrast(10) + >>> + >>> # Save when user clicks "Save" + >>> manager.save(settings) + >>> + >>> # Reset to saved settings + >>> settings = manager.load() + >>> settings.apply_to_camera(camera) + >>> + >>> # Reset to factory defaults + >>> settings = manager.restore_defaults() + >>> settings.apply_to_camera(camera) + """ + + def __init__( + self, + *, + model: str, + base_dir: Union[str, Path] = "./config/cameras", + default_filename: str = "default_settings.yaml", + backup_dirname: str = "backups", + backup_keep: int = 5, + ) -> None: + # Set root_dir to the model-specific directory + model_dir = Path(base_dir) / model + + super().__init__( + CameraSettings, + root_dir=model_dir, + default_filename=default_filename, + backup_dirname=backup_dirname, + backup_keep=backup_keep, + ) + + self.model = model + info(f"Initialized CameraSettingsManager for model '{model}' at {model_dir}") \ No newline at end of file From 4b437f6e8e8d03c0b29eb12cea9bd979601d8736 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Tue, 3 Feb 2026 03:52:22 -0900 Subject: [PATCH 24/46] now happy with generic settings --- UI/main_window.py | 7 +- UI/settings/pages/camera_settings.py | 870 ++++++++++++++++++- UI/style.py | 5 - app_context.py | 22 +- camera/cameras/amscope_camera.py | 21 +- camera/cameras/base_camera.py | 26 +- camera/settings/amscope_settings.py | 506 +++-------- camera/settings/camera_settings.py | 559 ++---------- config/.gitignore | 2 + config/cameras/amscope/default_settings.yaml | 60 -- config/forge/default_settings.yaml | 4 +- forgeConfig.py | 107 ++- generic_config.py | 500 +++++++---- 13 files changed, 1537 insertions(+), 1152 deletions(-) create mode 100644 config/.gitignore delete mode 100644 config/cameras/amscope/default_settings.yaml diff --git a/UI/main_window.py b/UI/main_window.py index 4bd789a..fbe1fd0 100644 --- a/UI/main_window.py +++ b/UI/main_window.py @@ -33,11 +33,8 @@ def __init__(self) -> None: # Register this main window with app context (initializes toast manager) self.app_context.register_main_window(self) - # Set window title with version from config - version = "Unknown" - if self.app_context.settings: - version = self.app_context.settings.version - self.setWindowTitle(f"Forge - v{version}") + # Set window title with version + self.setWindowTitle(f"Forge - v{self.app_context.current_version}") self.resize(1920, 1080) self._state = State() diff --git a/UI/settings/pages/camera_settings.py b/UI/settings/pages/camera_settings.py index f207ace..ab8a311 100644 --- a/UI/settings/pages/camera_settings.py +++ b/UI/settings/pages/camera_settings.py @@ -1,18 +1,874 @@ from __future__ import annotations +from pathlib import Path + from PySide6.QtWidgets import ( QVBoxLayout, QWidget, QFormLayout, QGroupBox, + QComboBox, + QSlider, + QCheckBox, + QSpinBox, + QDoubleSpinBox, + QLabel, + QHBoxLayout, + QPushButton, + QFileDialog, ) +from PySide6.QtCore import Qt, Signal, Slot + +from app_context import get_app_context +from logger import info, error, warning, debug + + +class CameraSettingsWidget(QWidget): + """Widget for displaying and editing camera settings""" + + settings_loaded = Signal(bool, object) # success, result + + def __init__(self, parent: QWidget | None = None) -> None: + super().__init__(parent) + + self.ctx = get_app_context() + self._settings_widgets: dict[str, QWidget] = {} + self._updating_from_camera = False + self._modified_settings: set[str] = set() # Track which settings have been modified + self._saved_values: dict[str, any] = {} # Store saved values for comparison + + self._setup_ui() + self._connect_signals() + self._populate_camera_list() + + def _setup_ui(self) -> None: + """Setup the user interface""" + layout = QVBoxLayout(self) + + # Camera selection group + camera_group = QGroupBox("Camera Device") + camera_layout = QFormLayout(camera_group) + + # Camera combo with refresh button on same line + camera_select_layout = QHBoxLayout() + self.camera_combo = QComboBox() + self.camera_combo.setMinimumWidth(300) + camera_select_layout.addWidget(self.camera_combo) + + self.refresh_btn = QPushButton("Refresh") + self.refresh_btn.setMaximumWidth(80) + camera_select_layout.addWidget(self.refresh_btn) + + camera_layout.addRow("Select Camera:", camera_select_layout) + + layout.addWidget(camera_group) + + # Camera settings groups (will be populated dynamically) + self.settings_container = QWidget() + self.settings_layout = QVBoxLayout(self.settings_container) + self.settings_layout.setContentsMargins(0, 0, 0, 0) + + layout.addWidget(self.settings_container) + layout.addStretch() + + # Save/Load buttons + button_layout = QHBoxLayout() + self.save_btn = QPushButton("Save Settings") + self.load_btn = QPushButton("Load Settings") + self.reset_btn = QPushButton("Reset to Defaults") + + button_layout.addWidget(self.save_btn) + button_layout.addWidget(self.load_btn) + button_layout.addWidget(self.reset_btn) + button_layout.addStretch() + + layout.addLayout(button_layout) + + def _connect_signals(self) -> None: + """Connect signals and slots""" + self.camera_combo.currentIndexChanged.connect(self._on_camera_changed) + self.refresh_btn.clicked.connect(lambda: self._populate_camera_list(force_enumerate=True)) + self.save_btn.clicked.connect(self._save_settings) + self.load_btn.clicked.connect(self._load_settings) + self.reset_btn.clicked.connect(self._reset_settings) + self.settings_loaded.connect(self._on_settings_loaded) + + # Connect to camera manager signals + if self.ctx.camera_manager: + self.ctx.camera_manager.camera_list_changed.connect( + self._on_camera_list_changed + ) + self.ctx.camera_manager.active_camera_changed.connect( + self._on_active_camera_changed + ) + + def _populate_camera_list(self, force_enumerate: bool = False) -> None: + """Populate the camera dropdown with available cameras + + Args: + force_enumerate: If True, force re-enumeration. Otherwise use cached list. + """ + self.camera_combo.blockSignals(True) + self.camera_combo.clear() + + if not self.ctx.camera_manager: + self.camera_combo.addItem("No camera manager available") + self.camera_combo.setEnabled(False) + self.camera_combo.blockSignals(False) + return + + # Use cached list unless forced to enumerate + if force_enumerate: + cameras = self.ctx.camera_manager.enumerate_cameras() + else: + cameras = self.ctx.camera_manager.available_cameras + # If no cached cameras, enumerate once + if not cameras: + cameras = self.ctx.camera_manager.enumerate_cameras() + + if not cameras: + self.camera_combo.addItem("No cameras detected") + self.camera_combo.setEnabled(False) + else: + self.camera_combo.setEnabled(True) + + # Add cameras to dropdown + for camera_info in cameras: + display_text = f"{camera_info.display_name} ({camera_info.model})" + self.camera_combo.addItem(display_text, camera_info) + + # Select the active camera if any + active_info = self.ctx.camera_manager.active_camera_info + if active_info: + for i in range(self.camera_combo.count()): + info_at_index = self.camera_combo.itemData(i) + if info_at_index and info_at_index.device_id == active_info.device_id: + self.camera_combo.setCurrentIndex(i) + break + + self.camera_combo.blockSignals(False) + + # Only refresh settings if we have an active camera + if self.ctx.camera and self.ctx.camera.underlying_camera.is_open: + self._refresh_settings_display() + + @Slot(int) + def _on_camera_changed(self, index: int) -> None: + """Handle camera selection change""" + if index < 0: + return + + camera_info = self.camera_combo.itemData(index) + if not camera_info: + return + + # Switch to the selected camera + info(f"Switching to camera: {camera_info.display_name}") + success = self.ctx.camera_manager.switch_camera(camera_info) + + if success: + self._refresh_settings_display() + else: + error(f"Failed to switch to camera: {camera_info.display_name}") + + @Slot() + def _on_camera_list_changed(self) -> None: + """Handle camera list changes from camera manager""" + # Use cached list since camera_list_changed is emitted after enumeration + self._populate_camera_list(force_enumerate=False) + + @Slot(object) + def _on_active_camera_changed(self, camera_info) -> None: + """Handle active camera changes from camera manager""" + self._refresh_settings_display() + + # Update combo box selection + if camera_info: + self.camera_combo.blockSignals(True) + for i in range(self.camera_combo.count()): + info_at_index = self.camera_combo.itemData(i) + if info_at_index and info_at_index.device_id == camera_info.device_id: + self.camera_combo.setCurrentIndex(i) + break + self.camera_combo.blockSignals(False) + + def _refresh_settings_display(self) -> None: + """Refresh the settings display based on current camera""" + # Clear existing settings widgets + self._clear_settings_display() + + camera = self.ctx.camera + if not camera: + self._show_no_camera_message() + return + + # Check if camera is open + if not camera.underlying_camera.is_open: + self._show_camera_not_open_message() + return + + # Get settings metadata + try: + settings = camera.settings + metadata_list = settings.get_metadata() + + # Store current values as "saved" baseline + self._saved_values.clear() + self._modified_settings.clear() + for meta in metadata_list: + current_value = getattr(settings, meta.name, None) + self._saved_values[meta.name] = current_value + + # Group settings by category + grouped_settings = self._group_settings(metadata_list) + + # Create UI for each group + for group_name, settings_in_group in grouped_settings.items(): + group_box = self._create_settings_group(group_name, settings_in_group) + self.settings_layout.addWidget(group_box) + + # Enable buttons + self.save_btn.setEnabled(True) + self.load_btn.setEnabled(True) + self.reset_btn.setEnabled(True) + + except Exception as e: + error(f"Error loading camera settings: {e}") + self._show_error_message(str(e)) + + def _clear_settings_display(self) -> None: + """Clear all settings widgets""" + while self.settings_layout.count(): + item = self.settings_layout.takeAt(0) + if item.widget(): + item.widget().deleteLater() + + self._settings_widgets.clear() + + # Disable buttons + self.save_btn.setEnabled(False) + self.load_btn.setEnabled(False) + self.reset_btn.setEnabled(False) + + def _show_no_camera_message(self) -> None: + """Show message when no camera is available""" + label = QLabel("No camera selected. Please select a camera from the dropdown above.") + label.setWordWrap(True) + label.setStyleSheet("color: gray; padding: 20px;") + self.settings_layout.addWidget(label) + + def _show_camera_not_open_message(self) -> None: + """Show message when camera is not open""" + label = QLabel("Camera is not open. Please open the camera first.") + label.setWordWrap(True) + label.setStyleSheet("color: orange; padding: 20px;") + self.settings_layout.addWidget(label) + + def _show_error_message(self, error_msg: str) -> None: + """Show error message""" + label = QLabel(f"Error loading settings: {error_msg}") + label.setWordWrap(True) + label.setStyleSheet("color: red; padding: 20px;") + self.settings_layout.addWidget(label) + + def _group_settings(self, metadata_list: list) -> dict[str, list]: + """Group settings by their group property""" + grouped = {} + + for meta in metadata_list: + group = meta.group if hasattr(meta, 'group') and meta.group else "General" + + if group not in grouped: + grouped[group] = [] + + grouped[group].append(meta) + + return grouped + + def _create_settings_group(self, group_name: str, settings_list: list) -> QGroupBox: + """Create a group box for a category of settings""" + group_box = QGroupBox(group_name) + layout = QFormLayout(group_box) + + for setting_meta in settings_list: + widget = self._create_setting_widget(setting_meta) + if widget: + # Create label with tooltip + label = QLabel(setting_meta.display_name + ":") + if hasattr(setting_meta, 'description') and setting_meta.description: + label.setToolTip(setting_meta.description) + + layout.addRow(label, widget) + + # Store both widget and label for styling + # Create a container that holds both for easier styling + widget_container = QWidget() + widget_container.setProperty("label", label) + widget_container.setProperty("control", widget) + self._settings_widgets[setting_meta.name] = widget_container + + return group_box + + def _create_setting_widget(self, meta) -> QWidget | None: + """Create appropriate widget for a setting based on its metadata""" + camera = self.ctx.camera + if not camera: + return None + + settings = camera.settings + setting_type = meta.setting_type if hasattr(meta, 'setting_type') else None + + # Handle both enum and string values + if setting_type is None: + warning(f"No setting type for {meta.name}") + return None + + # Convert enum to string value if needed + type_str = setting_type.value if hasattr(setting_type, 'value') else str(setting_type) + + # Create widget based on type + if type_str == "bool": + return self._create_bool_widget(meta, settings) + elif type_str == "range": + return self._create_range_widget(meta, settings) + elif type_str == "dropdown": + return self._create_dropdown_widget(meta, settings) + elif type_str == "rgba_level": + return self._create_rgba_level_widget(meta, settings) + elif type_str == "rgb_gain": + # TODO: Implement custom RGB gain widget + warning(f"RGB_GAIN widget not yet implemented for {meta.name}") + return None + else: + warning(f"Unknown setting type: {type_str} for {meta.name}") + return None + + def _create_bool_widget(self, meta, settings) -> QCheckBox: + """Create checkbox for boolean settings""" + checkbox = QCheckBox() + + # Get current value + current_value = getattr(settings, meta.name, False) + checkbox.setChecked(current_value) + + # Set tooltip + if hasattr(meta, 'description') and meta.description: + checkbox.setToolTip(meta.description) + + # Connect to setter + setter_name = f"set_{meta.name}" + if hasattr(settings, setter_name): + checkbox.stateChanged.connect( + lambda state: self._on_bool_changed(setter_name, state == Qt.CheckState.Checked) + ) + + return checkbox + + def _create_range_widget(self, meta, settings) -> QWidget: + """Create slider with value display for range settings""" + container = QWidget() + layout = QHBoxLayout(container) + layout.setContentsMargins(0, 0, 0, 0) + + # Determine if we need float or int + is_float = hasattr(meta, 'min_value') and isinstance(meta.min_value, float) + + if is_float: + # Use double spin box for float values + spinbox = QDoubleSpinBox() + spinbox.setDecimals(2) + else: + # Use regular spin box for int values + spinbox = QSpinBox() + + # Set fixed width to accommodate 6 digits plus decimals/sign + spinbox.setFixedWidth(90) + + # Set range + if hasattr(meta, 'min_value') and hasattr(meta, 'max_value'): + spinbox.setMinimum(meta.min_value) + spinbox.setMaximum(meta.max_value) + + # Get current value + current_value = getattr(settings, meta.name, 0) + spinbox.setValue(current_value) + + # Set tooltip + if hasattr(meta, 'description') and meta.description: + spinbox.setToolTip(meta.description) + + # Create slider + slider = QSlider(Qt.Orientation.Horizontal) + + if is_float: + # For float values, scale to int range for slider + slider.setMinimum(0) + slider.setMaximum(1000) + slider.setValue( + int((current_value - meta.min_value) / (meta.max_value - meta.min_value) * 1000) + ) + else: + slider.setMinimum(int(meta.min_value) if hasattr(meta, 'min_value') else 0) + slider.setMaximum(int(meta.max_value) if hasattr(meta, 'max_value') else 100) + slider.setValue(int(current_value)) + + # Connect signals + setter_name = f"set_{meta.name}" + if hasattr(settings, setter_name): + if is_float: + spinbox.valueChanged.connect( + lambda val: self._on_float_changed(setter_name, val, slider, meta) + ) + slider.valueChanged.connect( + lambda val: self._on_slider_changed_float(setter_name, val, spinbox, meta) + ) + else: + spinbox.valueChanged.connect( + lambda val: self._on_int_changed(setter_name, val, slider) + ) + slider.valueChanged.connect( + lambda val: self._on_slider_changed_int(setter_name, val, spinbox) + ) + + layout.addWidget(slider) + layout.addWidget(spinbox) + + return container + + def _create_dropdown_widget(self, meta, settings) -> QComboBox: + """Create dropdown for choice settings""" + combo = QComboBox() + + # Add choices + if hasattr(meta, 'choices') and meta.choices: + for choice in meta.choices: + combo.addItem(str(choice), choice) + + # Set current value + current_value = getattr(settings, meta.name, None) + if current_value is not None: + index = combo.findData(current_value) + if index >= 0: + combo.setCurrentIndex(index) + + # Set tooltip + if hasattr(meta, 'description') and meta.description: + combo.setToolTip(meta.description) + + # Connect to setter + setter_name = f"set_{meta.name}" + if hasattr(settings, setter_name): + combo.currentIndexChanged.connect( + lambda idx: self._on_dropdown_changed(setter_name, combo.itemData(idx)) + ) + + return combo + + def _create_rgba_level_widget(self, meta, settings) -> QWidget: + """Create RGBA level widget with four spinboxes for R, G, B, A""" + container = QWidget() + layout = QHBoxLayout(container) + layout.setContentsMargins(0, 0, 0, 0) + + # Get current value (should be an RGBALevel object) + current_value = getattr(settings, meta.name, None) + + # Create spinboxes for each channel + spinboxes = {} + for channel in ['r', 'g', 'b', 'a']: + channel_layout = QVBoxLayout() + channel_layout.setSpacing(2) + + label = QLabel(channel.upper()) + label.setAlignment(Qt.AlignmentFlag.AlignCenter) + + spinbox = QSpinBox() + spinbox.setMinimum(0) + spinbox.setMaximum(255) + spinbox.setFixedWidth(60) + + # Set current value if available + if current_value and hasattr(current_value, channel): + spinbox.setValue(getattr(current_value, channel)) + + channel_layout.addWidget(label) + channel_layout.addWidget(spinbox) + + layout.addLayout(channel_layout) + spinboxes[channel] = spinbox + + # Connect to setter + setter_name = f"set_{meta.name}" + if hasattr(settings, setter_name): + # Create a function that updates all values when any spinbox changes + def on_rgba_changed(): + if self._updating_from_camera: + return + + # Import RGBALevel here to avoid circular imports + try: + from camera.settings.camera_settings import RGBALevel + + new_value = RGBALevel( + r=spinboxes['r'].value(), + g=spinboxes['g'].value(), + b=spinboxes['b'].value(), + a=spinboxes['a'].value() + ) + + setter = getattr(settings, setter_name) + setter(new_value) + + # Mark as modified + setting_name = setter_name.replace("set_", "") + self._mark_setting_modified(setting_name, new_value) + + debug(f"Set {setter_name} to {new_value}") + except Exception as e: + error(f"Error setting {setter_name}: {e}") + + # Connect all spinboxes to the same handler + for spinbox in spinboxes.values(): + spinbox.valueChanged.connect(on_rgba_changed) + + layout.addStretch() + return container + + def _mark_setting_modified(self, setting_name: str, current_value) -> None: + """Mark a setting as modified and update its widget styling""" + # Check if value actually changed from saved value + saved_value = self._saved_values.get(setting_name) + + # Handle different value types for comparison + is_modified = False + if saved_value is None: + is_modified = current_value is not None + elif hasattr(saved_value, '__dict__'): + # For objects like RGBALevel, compare attributes + is_modified = str(saved_value) != str(current_value) + else: + is_modified = saved_value != current_value + + # Update modified tracking + if is_modified: + self._modified_settings.add(setting_name) + else: + self._modified_settings.discard(setting_name) + + # Update widget styling + self._update_widget_styling(setting_name, is_modified) + + def _update_widget_styling(self, setting_name: str, is_modified: bool) -> None: + """Update the visual styling of a widget to indicate modification""" + widget = self._settings_widgets.get(setting_name) + if not widget: + return + + if is_modified: + # Orange text and slider for modified settings + # Apply styling to all child widgets + self._apply_orange_styling(widget, True) + else: + # Clear custom styling to revert to default + self._apply_orange_styling(widget, False) + + def _apply_orange_styling(self, widget: QWidget, orange: bool) -> None: + """Apply or remove orange styling to a widget and its children""" + # Get the actual control widget and label from the container + label = widget.property("label") + control = widget.property("control") + + if not control: + return + + if orange: + # Color the label text + if label: + label.setStyleSheet("QLabel { color: #FFA500; }") + + # For different widget types, apply orange color to slider only + if isinstance(control, QCheckBox): + control.setStyleSheet("QCheckBox { color: #FFA500; }") + elif isinstance(control, QComboBox): + control.setStyleSheet("QComboBox { color: #FFA500; }") + elif isinstance(control, QWidget): + # For container widgets (like the range widget container) + # Only style slider handle, not the groove + for child in control.findChildren(QSlider): + child.setStyleSheet(""" + QSlider::handle:horizontal { + background: #FFA500; + border: 1px solid #FFA500; + width: 18px; + margin: -2px 0; + border-radius: 3px; + } + """) + else: + # Clear styling + if label: + label.setStyleSheet("") + control.setStyleSheet("") + for child in control.findChildren(QWidget): + child.setStyleSheet("") + + def _clear_all_modifications(self) -> None: + """Clear all modification markers and update saved values""" + camera = self.ctx.camera + if not camera: + return + + settings = camera.settings + + # Update saved values to current values + for setting_name in list(self._modified_settings): + current_value = getattr(settings, setting_name, None) + self._saved_values[setting_name] = current_value + self._update_widget_styling(setting_name, False) + + self._modified_settings.clear() + + def _on_bool_changed(self, setter_name: str, value: bool) -> None: + """Handle boolean setting change""" + if self._updating_from_camera: + return + + camera = self.ctx.camera + if not camera: + return + + try: + setter = getattr(camera.settings, setter_name) + setter(value) + + # Extract setting name from setter name (remove "set_" prefix) + setting_name = setter_name.replace("set_", "") + self._mark_setting_modified(setting_name, value) + + debug(f"Set {setter_name} to {value}") + except Exception as e: + error(f"Error setting {setter_name}: {e}") + + def _on_int_changed(self, setter_name: str, value: int, slider: QSlider) -> None: + """Handle integer setting change from spinbox""" + if self._updating_from_camera: + return + + # Update slider + slider.blockSignals(True) + slider.setValue(value) + slider.blockSignals(False) + + camera = self.ctx.camera + if not camera: + return + + try: + setter = getattr(camera.settings, setter_name) + setter(value) + + # Extract setting name from setter name (remove "set_" prefix) + setting_name = setter_name.replace("set_", "") + self._mark_setting_modified(setting_name, value) + + debug(f"Set {setter_name} to {value}") + except Exception as e: + error(f"Error setting {setter_name}: {e}") + + def _on_float_changed(self, setter_name: str, value: float, slider: QSlider, meta) -> None: + """Handle float setting change from spinbox""" + if self._updating_from_camera: + return + + # Update slider + slider.blockSignals(True) + slider_val = int((value - meta.min_value) / (meta.max_value - meta.min_value) * 1000) + slider.setValue(slider_val) + slider.blockSignals(False) + + camera = self.ctx.camera + if not camera: + return + + try: + setter = getattr(camera.settings, setter_name) + setter(value) + + # Extract setting name from setter name (remove "set_" prefix) + setting_name = setter_name.replace("set_", "") + self._mark_setting_modified(setting_name, value) + + debug(f"Set {setter_name} to {value}") + except Exception as e: + error(f"Error setting {setter_name}: {e}") + + def _on_slider_changed_int(self, setter_name: str, value: int, spinbox: QSpinBox) -> None: + """Handle integer setting change from slider""" + if self._updating_from_camera: + return + + # Update spinbox + spinbox.blockSignals(True) + spinbox.setValue(value) + spinbox.blockSignals(False) + + camera = self.ctx.camera + if not camera: + return + + try: + setter = getattr(camera.settings, setter_name) + setter(value) + + # Extract setting name from setter name (remove "set_" prefix) + setting_name = setter_name.replace("set_", "") + self._mark_setting_modified(setting_name, value) + + debug(f"Set {setter_name} to {value}") + except Exception as e: + error(f"Error setting {setter_name}: {e}") + + def _on_slider_changed_float(self, setter_name: str, slider_val: int, + spinbox: QDoubleSpinBox, meta) -> None: + """Handle float setting change from slider""" + if self._updating_from_camera: + return + + # Convert slider value to float + value = meta.min_value + (slider_val / 1000.0) * (meta.max_value - meta.min_value) + + # Update spinbox + spinbox.blockSignals(True) + spinbox.setValue(value) + spinbox.blockSignals(False) + + camera = self.ctx.camera + if not camera: + return + + try: + setter = getattr(camera.settings, setter_name) + setter(value) + + # Extract setting name from setter name (remove "set_" prefix) + setting_name = setter_name.replace("set_", "") + self._mark_setting_modified(setting_name, value) + + debug(f"Set {setter_name} to {value}") + except Exception as e: + error(f"Error setting {setter_name}: {e}") + + def _on_dropdown_changed(self, setter_name: str, value) -> None: + """Handle dropdown setting change""" + if self._updating_from_camera: + return + + camera = self.ctx.camera + if not camera: + return + + try: + setter = getattr(camera.settings, setter_name) + setter(value) + + # Extract setting name from setter name (remove "set_" prefix) + setting_name = setter_name.replace("set_", "") + self._mark_setting_modified(setting_name, value) + + debug(f"Set {setter_name} to {value}") + except Exception as e: + error(f"Error setting {setter_name}: {e}") + + @Slot() + def _save_settings(self) -> None: + """Save current camera settings""" + camera = self.ctx.camera + if not camera: + warning("No camera to save settings from") + return + + try: + camera.save_settings() + info("Camera settings saved successfully") + + # Clear modification markers + self._clear_all_modifications() + + self.ctx.toast.info("Settings saved successfully", duration=2000) + except Exception as e: + error(f"Error saving camera settings: {e}") + self.ctx.toast.info(f"Error saving settings: {e}", duration=3000) + + @Slot() + def _load_settings(self) -> None: + """Load camera settings from file""" + camera = self.ctx.camera + if not camera: + warning("No camera to load settings to") + return + + # Open file picker for YAML files + file_path, _ = QFileDialog.getOpenFileName( + self, + "Load Camera Settings", + "config/cameras", + "YAML Files (*.yaml *.yml);;All Files (*)" + ) + + # If no file was selected, do nothing + if not file_path: + return + + # Convert to Path for the callback + selected_path = Path(file_path) + + def on_load_complete(success: bool, result): + """Callback runs on camera thread - emit signal to UI thread""" + self.settings_loaded.emit(success, (result, str(selected_path))) + + # Load settings with callback + camera.load_settings(selected_path, on_complete=on_load_complete) + + @Slot(bool, object) + def _on_settings_loaded(self, success: bool, data: tuple) -> None: + """Handle settings loaded callback on UI thread""" + result, file_path = data + + if success: + info(f"Camera settings loaded successfully from {file_path}") + + # Refresh the display to show loaded values + self._refresh_settings_display() + + self.ctx.toast.success("Settings loaded successfully", duration=2000) + else: + error(f"Error loading camera settings from {file_path}: {result}") + self.ctx.toast.error(f"Error loading settings: {result}", duration=3000) -def camera_page() ->QWidget: - w = QWidget() - layout = QVBoxLayout(w) + @Slot() + def _reset_settings(self) -> None: + """Reset camera settings to defaults""" + camera = self.ctx.camera + if not camera: + warning("No camera to reset settings on") + return + + try: + # Refresh from camera hardware (gets defaults) + settings = camera.settings + settings.refresh_from_camera(camera.underlying_camera) + + info("Camera settings reset to defaults") + + # Refresh the display + self._refresh_settings_display() + + self.ctx.toast.info("Settings reset to defaults", duration=2000) + except Exception as e: + error(f"Error resetting camera settings: {e}") + self.ctx.toast.info(f"Error resetting settings: {e}", duration=3000) - top = QGroupBox("Camera Device") - form = QFormLayout(top) - layout.addWidget(top) - return w \ No newline at end of file +def camera_page() -> QWidget: + """Create and return the camera settings page widget""" + return CameraSettingsWidget() \ No newline at end of file diff --git a/UI/style.py b/UI/style.py index 928f22f..6d4b7bf 100644 --- a/UI/style.py +++ b/UI/style.py @@ -114,14 +114,11 @@ def apply_style(app: QApplication) -> None: QFrame#CollapsibleSection {{ background: rgba(255,255,255,0.85); border: 1px solid rgba(0,0,0,0.10); - border-radius: 10px; }} /* Full-width header strip: dark grey */ QFrame#SectionHeader {{ background: #5f6368; - border-top-left-radius: 10px; - border-top-right-radius: 10px; border-bottom: 1px solid rgba(0,0,0,0.10); }} QLabel#SectionHeaderTitle, QFrame#SectionHeader QLabel {{ @@ -131,8 +128,6 @@ def apply_style(app: QApplication) -> None: /* When collapsed: header rounds bottom corners too (prevents “sticking out” corners) */ QFrame#SectionHeader[collapsed="true"] {{ - border-bottom-left-radius: 10px; - border-bottom-right-radius: 10px; border-bottom: none; }} diff --git a/app_context.py b/app_context.py index bf3b2c5..81d6c91 100644 --- a/app_context.py +++ b/app_context.py @@ -15,6 +15,9 @@ from UI.settings.settings_main import SettingsDialog from UI.widgets.toast_widget import ToastManager +# Current Forge version - update this when releasing new versions +FORGE_VERSION = "1.2" + class AppContext: """ @@ -78,6 +81,11 @@ def settings(self) -> ForgeSettings | None: """Get the Forge settings""" return self._settings + @property + def settings_manager(self) -> ForgeSettingsManager | None: + """Get the Forge settings manager""" + return self._settings_manager + @property def settings_dialog(self) -> SettingsDialog | None: """Get the settings dialog instance""" @@ -88,6 +96,11 @@ def toast(self) -> ToastManager | None: """Get the toast manager instance""" return self._toast_manager + @property + def current_version(self) -> str: + """Get the current Forge version""" + return FORGE_VERSION + def register_main_window(self, window): """Register the main window instance""" self._main_window = window @@ -118,7 +131,13 @@ def _load_settings(self): try: self._settings_manager = ForgeSettingsManager() self._settings = self._settings_manager.load() - info(f"Forge settings loaded - version: {self._settings.version}") + + info(f"Forge settings loaded - running v{FORGE_VERSION}") + + # Check if we should show patch notes + if self._settings.show_patchnotes: + info("New version detected - patch notes should be displayed") + except Exception as e: error(f"Failed to load Forge settings: {e}") # Create default settings if loading fails @@ -155,7 +174,6 @@ def _initialize_camera_manager(self): def cleanup(self): """Cleanup resources""" if self._camera_manager: - info("Cleaning up camera manager") self._camera_manager.cleanup() self._camera_manager = None diff --git a/camera/cameras/amscope_camera.py b/camera/cameras/amscope_camera.py index 4236fa1..c25d999 100644 --- a/camera/cameras/amscope_camera.py +++ b/camera/cameras/amscope_camera.py @@ -55,6 +55,9 @@ def __init__(self, model: str): model: Camera model name (default "Amscope") """ super().__init__(model=model) + + # Set Settings class + self._settings_class = AmscopeSettings # Initialize logger self._logger = get_logger() @@ -67,13 +70,15 @@ def __init__(self, model: str): self._camera_info = None # Must be set via set_camera_info() before opening self._frame_buffer = None - # ------------------------- - # Settings Integration - # ------------------------- - - # Settings are now managed by the base class - # The base class will automatically use AmscopeSettings - # through the CameraSettingsManager factory system + def _get_settings_class(self): + """ + Get the settings class for Amscope cameras. + + Returns: + AmscopeSettings class + """ + from camera.settings.amscope_settings import AmscopeSettings + return AmscopeSettings @property def settings(self) -> AmscopeSettings: @@ -207,6 +212,8 @@ def open(self, camera_id: str) -> bool: self._is_open = True # Set RGB byte order for Qt compatibility self._hcam.put_Option(self.OPTION_BYTEORDER, 0) + # Initialize settings + self.initialize_settings() return True return False except self._get_sdk().HRESULTException: diff --git a/camera/cameras/base_camera.py b/camera/cameras/base_camera.py index 5251dd0..c5b962c 100644 --- a/camera/cameras/base_camera.py +++ b/camera/cameras/base_camera.py @@ -101,6 +101,25 @@ def is_sdk_loaded(cls) -> bool: """ return cls._sdk_loaded + @abstractmethod + def _get_settings_class(self) -> type[CameraSettings]: + """ + Get the appropriate settings class for this camera. + + This method must be implemented by subclasses to return their + concrete settings class (e.g., AmscopeSettings, ToupcamSettings). + + Returns: + Concrete CameraSettings subclass for this camera type + + Example: + In AmscopeCamera: + >>> def _get_settings_class(self): + ... from camera.settings.amscope_settings import AmscopeSettings + ... return AmscopeSettings + """ + pass + def initialize_settings(self) -> None: """ Initialize the settings system for this camera. @@ -125,7 +144,10 @@ def initialize_settings(self) -> None: info(f"Initializing settings for {self.model}") # Create model-specific settings manager - self._settings_manager = CameraSettingsManager(model=self.model) + self._settings_manager = CameraSettingsManager( + model=self.model, + settings_class=self._get_settings_class() + ) # Load saved settings or create defaults self._settings = self._settings_manager.load() @@ -135,8 +157,6 @@ def initialize_settings(self) -> None: # Then apply settings to camera hardware self._settings.apply_to_camera(self) - - info("Settings initialized and applied to camera") @property def settings(self) -> CameraSettings: diff --git a/camera/settings/amscope_settings.py b/camera/settings/amscope_settings.py index eafbd55..386d02d 100644 --- a/camera/settings/amscope_settings.py +++ b/camera/settings/amscope_settings.py @@ -1,10 +1,3 @@ -""" -AmScope camera settings implementation. - -Provides settings management for AmScope cameras with hardware-specific -controls like fan, TEC, low noise mode, and demosaic settings. -""" - from __future__ import annotations from dataclasses import dataclass, field @@ -15,55 +8,44 @@ SettingMetadata, SettingType, RGBALevel, - RGBGain, FileFormat, ) -from logger import info, debug, error, exception +from logger import info, error, exception if TYPE_CHECKING: - from base_camera import BaseCamera, CameraResolution + from camera.cameras.base_camera import BaseCamera, CameraResolution @dataclass class AmscopeSettings(CameraSettings): - """ - Settings for AmScope cameras. + version: str = "0" + auto_expo: bool = True + exposure: int = 128 + exposure_time_us: int = 50000 + resolution_index: int = 0 + temp: int = 6500 + tint: int = 1000 + contrast: int = 0 + hue: int = 0 + saturation: int = 128 + brightness: int = 0 + gamma: int = 100 + gain_percent: int = 100 + levelrange_low: RGBALevel = RGBALevel(0, 0, 0, 0) + levelrange_high: RGBALevel = RGBALevel(255, 255, 255, 255) + fformat: FileFormat = FileFormat.TIFF - Extends base CameraSettings with AmScope-specific hardware controls: - - Fan control for cooling - - TEC (Thermoelectric Cooler) control and target temperature - - Low noise mode and high full-well capacity - - Test pattern for diagnostics - - Demosaic algorithm selection - """ - - # AmScope-specific hardware controls fan_enabled: bool = field(default=False) - tec_enabled: bool = field(default=False) - tec_target: int = field(default=-10) # Target temperature in Celsius - low_noise_mode: bool = field(default=False) high_fullwell: bool = field(default=False) - test_pattern: bool = field(default=False) - demosaic_algorithm: int = field(default=0) # 0=RGGB, 1=BGGR, 2=GRBG, 3=GBRG - # Internal camera reference (not serialized to YAML) - _camera: BaseCamera | None = field(default=None, init=False, repr=False, compare=False) + _camera: BaseCamera | None = field(default=None, repr=False, compare=False) + + def __post_init__(self) -> None: + super().__post_init__() @classmethod def get_metadata(cls) -> list[SettingMetadata]: - """ - Get metadata for all AmScope settings. - - This is the SINGLE SOURCE OF TRUTH for: - - GUI generation (widget types, labels, groups) - - Validation (min/max ranges) - - Organization (grouping related settings) - - Returns: - List of SettingMetadata for all settings - """ return [ - # Exposure settings SettingMetadata( name="auto_expo", display_name="Auto Exposure", @@ -76,7 +58,7 @@ def get_metadata(cls) -> list[SettingMetadata]: name="exposure", display_name="Exposure Target", setting_type=SettingType.RANGE, - description="Target brightness for auto exposure (16-235)", + description="Target brightness for auto exposure", min_value=16, max_value=235, group="Exposure", @@ -96,19 +78,17 @@ def get_metadata(cls) -> list[SettingMetadata]: name="gain_percent", display_name="Gain (%)", setting_type=SettingType.RANGE, - description="Sensor gain in percent (100-1600)", + description="Sensor gain in percent", min_value=100, max_value=1600, group="Exposure", runtime_changeable=True, ), - - # White balance settings SettingMetadata( name="temp", display_name="Color Temperature", setting_type=SettingType.RANGE, - description="White balance temperature in Kelvin (2000-15000)", + description="White balance temperature in Kelvin", min_value=2000, max_value=15000, group="White Balance", @@ -118,27 +98,17 @@ def get_metadata(cls) -> list[SettingMetadata]: name="tint", display_name="Tint", setting_type=SettingType.RANGE, - description="White balance tint adjustment (200-2500)", + description="White balance tint adjustment", min_value=200, max_value=2500, group="White Balance", runtime_changeable=True, ), - SettingMetadata( - name="wbgain", - display_name="RGB Gain", - setting_type=SettingType.RGB_GAIN, - description="Fine-tune RGB white balance gains (-127 to 127)", - group="White Balance", - runtime_changeable=True, - ), - - # Color and image quality SettingMetadata( name="hue", display_name="Hue", setting_type=SettingType.RANGE, - description="Color hue adjustment (-180 to 180)", + description="Color hue adjustment", min_value=-180, max_value=180, group="Color", @@ -148,7 +118,7 @@ def get_metadata(cls) -> list[SettingMetadata]: name="saturation", display_name="Saturation", setting_type=SettingType.RANGE, - description="Color saturation (0-255)", + description="Color saturation", min_value=0, max_value=255, group="Color", @@ -158,7 +128,7 @@ def get_metadata(cls) -> list[SettingMetadata]: name="brightness", display_name="Brightness", setting_type=SettingType.RANGE, - description="Image brightness adjustment (-64 to 64)", + description="Image brightness adjustment", min_value=-64, max_value=64, group="Color", @@ -168,7 +138,7 @@ def get_metadata(cls) -> list[SettingMetadata]: name="contrast", display_name="Contrast", setting_type=SettingType.RANGE, - description="Image contrast adjustment (-100 to 100)", + description="Image contrast adjustment", min_value=-100, max_value=100, group="Color", @@ -178,19 +148,17 @@ def get_metadata(cls) -> list[SettingMetadata]: name="gamma", display_name="Gamma", setting_type=SettingType.RANGE, - description="Gamma correction (0-180)", + description="Gamma correction", min_value=0, max_value=180, group="Color", runtime_changeable=True, ), - - # Level range SettingMetadata( name="levelrange_low", display_name="Black Point", setting_type=SettingType.RGBA_LEVEL, - description="Output level for darkest input values (0-255)", + description="Output level for darkest input values", group="Levels", runtime_changeable=True, ), @@ -198,24 +166,20 @@ def get_metadata(cls) -> list[SettingMetadata]: name="levelrange_high", display_name="White Point", setting_type=SettingType.RGBA_LEVEL, - description="Output level for brightest input values (0-255)", + description="Output level for brightest input values", group="Levels", runtime_changeable=True, ), - - # Resolution SettingMetadata( name="resolution_index", display_name="Resolution", setting_type=SettingType.RANGE, description="Camera resolution index", min_value=0, - max_value=10, # Will be validated against actual camera resolutions + max_value=10, group="Capture", - runtime_changeable=False, # Requires restart + runtime_changeable=False, ), - - # File format SettingMetadata( name="fformat", display_name="File Format", @@ -225,39 +189,11 @@ def get_metadata(cls) -> list[SettingMetadata]: group="Capture", runtime_changeable=True, ), - - # AmScope-specific hardware controls SettingMetadata( name="fan_enabled", display_name="Cooling Fan", setting_type=SettingType.BOOL, - description="Enable camera cooling fan", - group="Hardware", - runtime_changeable=True, - ), - SettingMetadata( - name="tec_enabled", - display_name="TEC Cooler", - setting_type=SettingType.BOOL, - description="Enable thermoelectric cooler", - group="Hardware", - runtime_changeable=True, - ), - SettingMetadata( - name="tec_target", - display_name="TEC Target (°C)", - setting_type=SettingType.RANGE, - description="Target temperature for TEC in Celsius (-40 to 20)", - min_value=-40, - max_value=20, - group="Hardware", - runtime_changeable=True, - ), - SettingMetadata( - name="low_noise_mode", - display_name="Low Noise Mode", - setting_type=SettingType.BOOL, - description="Enable low noise mode (reduces read noise)", + description="Enable cooling fan", group="Hardware", runtime_changeable=True, ), @@ -269,221 +205,133 @@ def get_metadata(cls) -> list[SettingMetadata]: group="Hardware", runtime_changeable=True, ), - SettingMetadata( - name="test_pattern", - display_name="Test Pattern", - setting_type=SettingType.BOOL, - description="Enable test pattern for diagnostics", - group="Advanced", - runtime_changeable=True, - ), - SettingMetadata( - name="demosaic_algorithm", - display_name="Demosaic Algorithm", - setting_type=SettingType.RANGE, - description="Bayer demosaic algorithm (0=RGGB, 1=BGGR, 2=GRBG, 3=GBRG)", - min_value=0, - max_value=3, - group="Advanced", - runtime_changeable=True, - ), ] - def _validate_and_set(self, param_name: str, value: int | bool) -> None: - """ - Validate a parameter value against metadata ranges. - - Args: - param_name: Name of the parameter - value: Value to validate - - Raises: - ValueError: If value is outside valid range - """ - metadata_dict = {m.name: m for m in self.get_metadata()} - - if param_name not in metadata_dict: - raise ValueError(f"Unknown parameter: {param_name}") - - meta = metadata_dict[param_name] - - # Validate based on type - if meta.setting_type == SettingType.RANGE: - if not isinstance(value, (int, float)): - raise ValueError(f"{param_name} must be numeric") + def _get_metadata_map(self) -> dict[str, SettingMetadata]: + return {m.name: m for m in self.get_metadata()} + + def _validate_range(self, name: str, value: int) -> None: + meta = self._get_metadata_map().get(name) + if meta and meta.setting_type == SettingType.RANGE: if not (meta.min_value <= value <= meta.max_value): raise ValueError( - f"{param_name} = {value} is outside valid range " - f"[{meta.min_value}, {meta.max_value}]" + f"{name} must be in [{meta.min_value}, {meta.max_value}], got {value}" ) - elif meta.setting_type == SettingType.BOOL: - if not isinstance(value, bool): - raise ValueError(f"{param_name} must be boolean") - - def _apply_to_sdk(self, param_name: str, value) -> None: - """ - Apply a setting to the camera SDK. - - Args: - param_name: Parameter name - value: Value to apply - """ - if self._camera is None or not hasattr(self._camera, '_hcam'): - debug(f"Camera not available, skipping SDK update for {param_name}") - return - - hcam = self._camera._hcam - if hcam is None: - debug(f"Camera handle not available, skipping SDK update for {param_name}") - return - - try: - # Map parameter names to SDK calls - if param_name == "auto_expo": - hcam.put_AutoExpoEnable(1 if value else 0) - elif param_name == "exposure": - hcam.put_AutoExpoTarget(value) - elif param_name == "exposure_time_us": - hcam.put_ExpoTime(value) - elif param_name == "gain_percent": - hcam.put_ExpoAGain(value) - elif param_name == "temp" and hasattr(self, 'tint'): - hcam.put_TempTint(value, self.tint) - elif param_name == "tint" and hasattr(self, 'temp'): - hcam.put_TempTint(self.temp, value) - elif param_name == "hue": - hcam.put_Hue(value) - elif param_name == "saturation": - hcam.put_Saturation(value) - elif param_name == "brightness": - hcam.put_Brightness(value) - elif param_name == "contrast": - hcam.put_Contrast(value) - elif param_name == "gamma": - hcam.put_Gamma(value) - elif param_name == "wbgain": - hcam.put_WhiteBalanceGain([value.r, value.g, value.b]) - elif param_name in ["levelrange_low", "levelrange_high"]: - low = self.levelrange_low - high = self.levelrange_high - hcam.put_LevelRange([low.r, low.g, low.b, low.a], - [high.r, high.g, high.b, high.a]) - elif param_name == "fan_enabled": - hcam.put_Option(0x0a, 1 if value else 0) # OPTION_FAN - elif param_name == "tec_enabled": - hcam.put_Option(0x08, 1 if value else 0) # OPTION_TEC - elif param_name == "tec_target": - hcam.put_Option(0x0c, value) # OPTION_TECTARGET - elif param_name == "low_noise_mode": - hcam.put_Option(0x53, 1 if value else 0) # OPTION_LOW_NOISE - elif param_name == "high_fullwell": - hcam.put_Option(0x51, 1 if value else 0) # OPTION_HIGH_FULLWELL - elif param_name == "test_pattern": - hcam.put_Option(0x2c, 1 if value else 0) # OPTION_TESTPATTERN - elif param_name == "demosaic_algorithm": - hcam.put_Option(0x5a, value) # OPTION_DEMOSAIC - - debug(f"Applied {param_name} = {value} to camera SDK") - - except Exception as e: - error(f"Failed to apply {param_name} to camera: {e}") - - # Required abstract method implementations def set_auto_exposure(self, enabled: bool) -> None: - """Enable or disable automatic exposure.""" - self._validate_and_set("auto_expo", enabled) self.auto_expo = enabled - self._apply_to_sdk("auto_expo", enabled) + if self._camera and hasattr(self._camera, '_hcam'): + self._camera._hcam.put_AutoExpoEnable(1 if enabled else 0) def set_exposure(self, value: int) -> None: - """Set auto exposure target value.""" - self._validate_and_set("exposure", value) + self._validate_range("exposure", value) self.exposure = value - self._apply_to_sdk("exposure", value) + if self._camera and hasattr(self._camera, '_hcam'): + self._camera._hcam.put_AutoExpoTarget(value) + + def set_exposure_time(self, time_us: int) -> bool: + self._validate_range("exposure_time_us", time_us) + self.exposure_time_us = time_us + if self._camera and hasattr(self._camera, '_hcam'): + self._camera._hcam.put_ExpoTime(time_us) + return True + + def set_gain(self, gain_percent: int) -> None: + self._validate_range("gain_percent", gain_percent) + self.gain_percent = gain_percent + if self._camera and hasattr(self._camera, '_hcam'): + self._camera._hcam.put_ExpoAGain(gain_percent) def set_temp(self, value: int) -> None: - """Set white balance temperature.""" - self._validate_and_set("temp", value) + self._validate_range("temp", value) self.temp = value - self._apply_to_sdk("temp", value) + if self._camera and hasattr(self._camera, '_hcam'): + self._camera._hcam.put_TempTint(value, self.tint) def set_tint(self, value: int) -> None: - """Set white balance tint.""" - self._validate_and_set("tint", value) + self._validate_range("tint", value) self.tint = value - self._apply_to_sdk("tint", value) + if self._camera and hasattr(self._camera, '_hcam'): + self._camera._hcam.put_TempTint(self.temp, value) - def set_white_balance_gain(self, gain: RGBGain) -> None: - """Set RGB white balance gains.""" - gain.validate() - self.wbgain = gain - self._apply_to_sdk("wbgain", gain) + def set_temp_tint(self, temp: int, tint: int) -> None: + self._validate_range("temp", temp) + self._validate_range("tint", tint) + self.temp = temp + self.tint = tint + if self._camera and hasattr(self._camera, '_hcam'): + self._camera._hcam.put_TempTint(temp, tint) def set_hue(self, value: int) -> None: - """Set hue adjustment.""" - self._validate_and_set("hue", value) + self._validate_range("hue", value) self.hue = value - self._apply_to_sdk("hue", value) + if self._camera and hasattr(self._camera, '_hcam'): + self._camera._hcam.put_Hue(value) def set_saturation(self, value: int) -> None: - """Set saturation.""" - self._validate_and_set("saturation", value) + self._validate_range("saturation", value) self.saturation = value - self._apply_to_sdk("saturation", value) + if self._camera and hasattr(self._camera, '_hcam'): + self._camera._hcam.put_Saturation(value) def set_brightness(self, value: int) -> None: - """Set brightness.""" - self._validate_and_set("brightness", value) + self._validate_range("brightness", value) self.brightness = value - self._apply_to_sdk("brightness", value) + if self._camera and hasattr(self._camera, '_hcam'): + self._camera._hcam.put_Brightness(value) def set_contrast(self, value: int) -> None: - """Set contrast.""" - self._validate_and_set("contrast", value) + self._validate_range("contrast", value) self.contrast = value - self._apply_to_sdk("contrast", value) + if self._camera and hasattr(self._camera, '_hcam'): + self._camera._hcam.put_Contrast(value) def set_gamma(self, value: int) -> None: - """Set gamma correction.""" - self._validate_and_set("gamma", value) + self._validate_range("gamma", value) self.gamma = value - self._apply_to_sdk("gamma", value) + if self._camera and hasattr(self._camera, '_hcam'): + self._camera._hcam.put_Gamma(value) def set_level_range(self, low: RGBALevel, high: RGBALevel) -> None: - """Set level range mapping.""" low.validate() high.validate() self.levelrange_low = low self.levelrange_high = high - self._apply_to_sdk("levelrange_low", low) + if self._camera and hasattr(self._camera, '_hcam'): + self._camera._hcam.put_LevelRange( + (low.r, low.g, low.b, low.a), + (high.r, high.g, high.b, high.a) + ) - # Resolution methods + def set_fan(self, enabled: bool) -> None: + self.fan_enabled = enabled + if self._camera and hasattr(self._camera, '_hcam'): + self._camera._hcam.put_Option(0x0a, 1 if enabled else 0) + + def set_high_fullwell(self, enabled: bool) -> None: + self.high_fullwell = enabled + if self._camera and hasattr(self._camera, '_hcam'): + self._camera._hcam.put_Option(0x51, 1 if enabled else 0) def get_resolutions(self) -> list[CameraResolution]: - """Get available camera resolutions.""" if self._camera is None or not hasattr(self._camera, '_hcam'): return [] - from base_camera import CameraResolution + from camera.cameras.base_camera import CameraResolution try: resolutions = [] hcam = self._camera._hcam - - # AmScope cameras typically have multiple resolutions count = hcam.ResolutionNumber for i in range(count): width, height = hcam.get_Resolution(i) resolutions.append(CameraResolution(width=width, height=height)) - return resolutions except Exception as e: error(f"Failed to get resolutions: {e}") return [] def get_current_resolution(self) -> tuple[int, int, int]: - """Get current resolution as (index, width, height).""" if self._camera is None or not hasattr(self._camera, '_hcam'): return (0, 0, 0) @@ -497,53 +345,41 @@ def get_current_resolution(self) -> tuple[int, int, int]: return (0, 0, 0) def set_resolution(self, resolution_index: int) -> bool: - """Set camera resolution.""" if self._camera is None or not hasattr(self._camera, '_hcam'): return False try: hcam = self._camera._hcam - - # Validate index if not (0 <= resolution_index < hcam.ResolutionNumber): error(f"Invalid resolution index: {resolution_index}") return False - # Apply resolution hcam.put_eSize(resolution_index) self.resolution_index = resolution_index - info(f"Resolution set to index {resolution_index}") return True - except Exception as e: error(f"Failed to set resolution: {e}") return False def get_still_resolutions(self) -> list[CameraResolution]: - """Get available still image resolutions.""" if self._camera is None or not hasattr(self._camera, '_hcam'): return [] - from base_camera import CameraResolution + from camera.cameras.base_camera import CameraResolution try: resolutions = [] hcam = self._camera._hcam - count = hcam.StillResolutionNumber for i in range(count): width, height = hcam.get_StillResolution(i) resolutions.append(CameraResolution(width=width, height=height)) - return resolutions except Exception as e: error(f"Failed to get still resolutions: {e}") return [] - # Exposure time methods - def get_exposure_time(self) -> int: - """Get current exposure time in microseconds.""" if self._camera is None or not hasattr(self._camera, '_hcam'): return self.exposure_time_us @@ -553,106 +389,17 @@ def get_exposure_time(self) -> int: error(f"Failed to get exposure time: {e}") return self.exposure_time_us - def set_exposure_time(self, time_us: int) -> bool: - """Set exposure time in microseconds.""" - self._validate_and_set("exposure_time_us", time_us) - self.exposure_time_us = time_us - self._apply_to_sdk("exposure_time_us", time_us) - return True - - # Gain methods - - def get_gain(self) -> int: - """Get current gain in percent.""" - if self._camera is None or not hasattr(self._camera, '_hcam'): - return self.gain_percent - - try: - return self._camera._hcam.get_ExpoAGain() - except Exception as e: - error(f"Failed to get gain: {e}") - return self.gain_percent - - def set_gain(self, gain_percent: int) -> bool: - """Set gain in percent.""" - self._validate_and_set("gain_percent", gain_percent) - self.gain_percent = gain_percent - self._apply_to_sdk("gain_percent", gain_percent) - return True - - # AmScope-specific hardware control methods - - def set_fan(self, enabled: bool) -> None: - """Enable or disable cooling fan.""" - self._validate_and_set("fan_enabled", enabled) - self.fan_enabled = enabled - self._apply_to_sdk("fan_enabled", enabled) - info(f"Cooling fan {'enabled' if enabled else 'disabled'}") - - def set_tec(self, enabled: bool) -> None: - """Enable or disable TEC cooler.""" - self._validate_and_set("tec_enabled", enabled) - self.tec_enabled = enabled - self._apply_to_sdk("tec_enabled", enabled) - info(f"TEC cooler {'enabled' if enabled else 'disabled'}") - - def set_tec_target(self, temperature: int) -> None: - """Set TEC target temperature in Celsius.""" - self._validate_and_set("tec_target", temperature) - self.tec_target = temperature - self._apply_to_sdk("tec_target", temperature) - info(f"TEC target temperature set to {temperature}°C") - - def set_low_noise_mode(self, enabled: bool) -> None: - """Enable or disable low noise mode.""" - self._validate_and_set("low_noise_mode", enabled) - self.low_noise_mode = enabled - self._apply_to_sdk("low_noise_mode", enabled) - info(f"Low noise mode {'enabled' if enabled else 'disabled'}") - - def set_high_fullwell(self, enabled: bool) -> None: - """Enable or disable high full-well capacity mode.""" - self._validate_and_set("high_fullwell", enabled) - self.high_fullwell = enabled - self._apply_to_sdk("high_fullwell", enabled) - info(f"High full-well mode {'enabled' if enabled else 'disabled'}") - - def set_test_pattern(self, enabled: bool) -> None: - """Enable or disable test pattern.""" - self._validate_and_set("test_pattern", enabled) - self.test_pattern = enabled - self._apply_to_sdk("test_pattern", enabled) - info(f"Test pattern {'enabled' if enabled else 'disabled'}") - - def set_demosaic_algorithm(self, algorithm: int) -> None: - """Set demosaic algorithm (0=RGGB, 1=BGGR, 2=GRBG, 3=GBRG).""" - self._validate_and_set("demosaic_algorithm", algorithm) - self.demosaic_algorithm = algorithm - self._apply_to_sdk("demosaic_algorithm", algorithm) - info(f"Demosaic algorithm set to {algorithm}") - - # Apply and refresh methods - def apply_to_camera(self, camera: BaseCamera) -> None: - """ - Apply all settings to camera hardware. - - Args: - camera: Camera instance to apply settings to - """ self._camera = camera - info(f"Applying all settings to camera {camera.model}") + info(f"Applying settings to camera {camera.model}") try: - # Apply all settings in logical order self.set_auto_exposure(self.auto_expo) self.set_exposure(self.exposure) self.set_exposure_time(self.exposure_time_us) self.set_gain(self.gain_percent) - self.set_temp(self.temp) - self.set_tint(self.tint) - self.set_white_balance_gain(self.wbgain) + self.set_temp_tint(self.temp, self.tint) self.set_hue(self.hue) self.set_saturation(self.saturation) @@ -662,27 +409,15 @@ def apply_to_camera(self, camera: BaseCamera) -> None: self.set_level_range(self.levelrange_low, self.levelrange_high) - # AmScope-specific hardware controls self.set_fan(self.fan_enabled) - self.set_tec(self.tec_enabled) - self.set_tec_target(self.tec_target) - self.set_low_noise_mode(self.low_noise_mode) self.set_high_fullwell(self.high_fullwell) - self.set_test_pattern(self.test_pattern) - self.set_demosaic_algorithm(self.demosaic_algorithm) - - info("All settings applied successfully") + info("Successfully applied all settings to camera") except Exception as e: - exception(f"Failed to apply settings to camera") + exception(f"Failed to apply settings to camera: {e}") + raise def refresh_from_camera(self, camera: BaseCamera) -> None: - """ - Read all current settings from camera hardware. - - Args: - camera: Camera instance to read from - """ self._camera = camera info(f"Refreshing settings from camera {camera.model}") @@ -693,45 +428,30 @@ def refresh_from_camera(self, camera: BaseCamera) -> None: hcam = camera._hcam try: - # Read exposure settings self.auto_expo = bool(hcam.get_AutoExpoEnable()) self.exposure = hcam.get_AutoExpoTarget() self.exposure_time_us = hcam.get_ExpoTime() self.gain_percent = hcam.get_ExpoAGain() - # Read white balance temp, tint = hcam.get_TempTint() self.temp = temp self.tint = tint - wb_gains = hcam.get_WhiteBalanceGain() - self.wbgain = RGBGain(r=wb_gains[0], g=wb_gains[1], b=wb_gains[2]) - - # Read color adjustments self.hue = hcam.get_Hue() self.saturation = hcam.get_Saturation() self.brightness = hcam.get_Brightness() self.contrast = hcam.get_Contrast() self.gamma = hcam.get_Gamma() - # Read level range low, high = hcam.get_LevelRange() self.levelrange_low = RGBALevel(r=low[0], g=low[1], b=low[2], a=low[3]) self.levelrange_high = RGBALevel(r=high[0], g=high[1], b=high[2], a=high[3]) - # Read resolution self.resolution_index = hcam.get_eSize() - # Read AmScope-specific hardware settings - self.fan_enabled = bool(hcam.get_Option(0x0a)) # OPTION_FAN - self.tec_enabled = bool(hcam.get_Option(0x08)) # OPTION_TEC - self.tec_target = hcam.get_Option(0x0c) # OPTION_TECTARGET - self.low_noise_mode = bool(hcam.get_Option(0x53)) # OPTION_LOW_NOISE - self.high_fullwell = bool(hcam.get_Option(0x51)) # OPTION_HIGH_FULLWELL - self.test_pattern = bool(hcam.get_Option(0x2c)) # OPTION_TESTPATTERN - self.demosaic_algorithm = hcam.get_Option(0x5a) # OPTION_DEMOSAIC + self.fan_enabled = bool(hcam.get_Option(0x0a)) + self.high_fullwell = bool(hcam.get_Option(0x51)) info("Successfully refreshed all settings from camera") - except Exception as e: - exception(f"Failed to refresh settings from camera") \ No newline at end of file + exception(f"Failed to refresh settings from camera: {e}") \ No newline at end of file diff --git a/camera/settings/camera_settings.py b/camera/settings/camera_settings.py index 0275572..813c83a 100644 --- a/camera/settings/camera_settings.py +++ b/camera/settings/camera_settings.py @@ -1,11 +1,10 @@ from __future__ import annotations from abc import ABC, abstractmethod - from dataclasses import dataclass, field from enum import Enum -from typing import NamedTuple, Union, TYPE_CHECKING from pathlib import Path +from typing import Any, NamedTuple, TYPE_CHECKING from generic_config import ConfigManager from logger import info, debug, exception @@ -14,90 +13,45 @@ from camera.cameras.base_camera import BaseCamera, CameraResolution -# ------------------------- -# Enums for type safety -# ------------------------- - class FileFormat(str, Enum): - """Supported image file formats.""" PNG = 'png' TIFF = 'tiff' JPEG = 'jpeg' BMP = 'bmp' -# ------------------------- -# Type-safe tuples -# ------------------------- class RGBALevel(NamedTuple): - """RGBA level range values (0-255 each).""" r: int g: int b: int a: int def validate(self) -> None: - """Ensure all values are in valid range.""" for name, value in [('r', self.r), ('g', self.g), ('b', self.b), ('a', self.a)]: if not (0 <= value <= 255): raise ValueError(f"RGBALevel.{name} must be in range [0, 255], got {value}") -class RGBGain(NamedTuple): - """RGB white balance gain values (-127 to 127 each).""" - r: int - g: int - b: int - - def validate(self) -> None: - """Ensure all values are in valid range.""" - for name, value in [('r', self.r), ('g', self.g), ('b', self.b)]: - if not (-127 <= value <= 127): - raise ValueError(f"RGBGain.{name} must be in range [-127, 127], got {value}") - - -# ------------------------- -# GUI Metadata System -# ------------------------- class SettingType(str, Enum): - """Types of settings for GUI rendering.""" - BOOL = "bool" # Checkbox - RANGE = "range" # Slider with min/max - DROPDOWN = "dropdown" # Combo box with choices - RGBA_LEVEL = "rgba_level" # Custom RGBA widget - RGB_GAIN = "rgb_gain" # Custom RGB widget + BOOL = "bool" + RANGE = "range" + DROPDOWN = "dropdown" + RGBA_LEVEL = "rgba_level" @dataclass class SettingMetadata: - """ - Metadata describing a setting for GUI generation. - - This allows the GUI to automatically create appropriate controls - for each setting without hardcoding knowledge of the settings. - """ - name: str # Parameter name (e.g., "exposure") - display_name: str # Human-readable name (e.g., "Exposure") - setting_type: SettingType # Type of control to render - description: str = "" # Tooltip/help text - - # For RANGE type + name: str + display_name: str + setting_type: SettingType + description: str = "" min_value: int | None = None max_value: int | None = None - - # For DROPDOWN type choices: list[str] | None = None - - # Grouping for organized GUI group: str = "General" - - # Whether this setting can be changed while camera is running runtime_changeable: bool = True -# ------------------------- -# Settings dataclass -# ------------------------- @dataclass class CameraSettings(ABC): """ @@ -141,15 +95,6 @@ def get_metadata(cls) -> list[SettingMetadata]: max_value=220, group="Exposure", ), - SettingMetadata( - name="temp", - display_name="Color Temperature", - setting_type=SettingType.RANGE, - description="White balance temperature in Kelvin", - min_value=2000, - max_value=15000, - group="White Balance", - ), # ... all other settings ] @@ -160,7 +105,6 @@ def get_metadata(cls) -> list[SettingMetadata]: - Access hardware directly to apply the setting - Example: def set_exposure(self, value: int) -> None: - # Get range from metadata metadata = {m.name: m for m in self.get_metadata()} meta = metadata['exposure'] if not (meta.min_value <= value <= meta.max_value): @@ -173,136 +117,47 @@ def set_exposure(self, value: int) -> None: 3. Implement refresh_from_camera(): - Read all current settings from camera hardware - Update all dataclass fields with hardware values - - Example: - def refresh_from_camera(self, camera: BaseCamera) -> None: - self._camera = camera - if hasattr(camera, '_device'): - self.auto_expo = bool(camera._sdk.get_AutoExpoEnable(camera._device)) - self.exposure = camera._sdk.get_AutoExpoTarget(camera._device) - # ... read all other settings - - 4. Implement resolution and exposure/gain methods: + + 4. Implement resolution and exposure methods: - get_resolutions(), set_resolution(), get_current_resolution() - get_exposure_time(), set_exposure_time() - - get_gain(), set_gain() Note on Default Values: - Default values are loaded from YAML files by CameraSettingsManager - Subclasses do NOT need a create_default() method - The YAML file serves as the default configuration - - Example Complete Subclass: - class AmScopeSettings(CameraSettings): - @classmethod - def get_metadata(cls) -> list[SettingMetadata]: - return [ - SettingMetadata(name="exposure", display_name="Exposure", ...), - # ... all settings with ranges - ] - - def set_exposure(self, value: int) -> None: - # Validate using metadata, update field, access SDK - pass - - def refresh_from_camera(self, camera: BaseCamera) -> None: - # Read all settings from SDK - pass - - # ... implement all other abstract methods """ - # Version tracking version: str - - # Image processing parameters (subclasses must provide defaults via factory methods) auto_expo: bool - exposure: int # Auto Exposure Target - exposure_time_us: int # Manual exposure time in microseconds - gain_percent: int # Gain in percent - resolution_index: int # Selected resolution index - temp: int # White balance temperature - tint: int # White balance tint + exposure: int + exposure_time_us: int + resolution_index: int + tint: int contrast: int hue: int saturation: int brightness: int gamma: int - - # Complex parameters (subclasses must provide defaults via factory methods) levelrange_low: RGBALevel levelrange_high: RGBALevel - wbgain: RGBGain - - # Tone mapping and format (subclasses must provide defaults via factory methods) fformat: FileFormat - # Private - reference to camera (set by apply_to_camera) _camera: BaseCamera | None = field(default=None, repr=False, compare=False) - @classmethod - @abstractmethod - def get_metadata(cls) -> list[SettingMetadata]: - """ - Get metadata for all settings to enable GUI generation and validation. - - This is the SINGLE SOURCE OF TRUTH for setting information including: - - Display names and descriptions - - Valid ranges (min/max values) - - Setting types (bool, range, dropdown, etc.) - - Grouping and organization - - Subclasses MUST override this method to provide metadata specific to their camera model. - - Returns: - List of SettingMetadata objects describing each setting + def __post_init__(self) -> None: + if isinstance(self.fformat, str): + self.fformat = FileFormat(self.fformat) - Example implementation in AmScopeSettings: - @classmethod - def get_metadata(cls) -> list[SettingMetadata]: - return [ - SettingMetadata( - name="auto_expo", - display_name="Auto Exposure", - setting_type=SettingType.BOOL, - description="Enable automatic exposure control", - group="Exposure", - ), - SettingMetadata( - name="exposure", - display_name="Exposure Target", - setting_type=SettingType.RANGE, - description="Target brightness for auto exposure", - min_value=16, - max_value=220, - group="Exposure", - ), - SettingMetadata( - name="temp", - display_name="Color Temperature", - setting_type=SettingType.RANGE, - description="White balance temperature in Kelvin", - min_value=2000, - max_value=15000, - group="White Balance", - ), - # ... all other settings - ] - """ - pass + if isinstance(self.levelrange_low, (tuple, list)): + self.levelrange_low = RGBALevel(*self.levelrange_low) + if isinstance(self.levelrange_high, (tuple, list)): + self.levelrange_high = RGBALevel(*self.levelrange_high) def validate(self) -> None: - """ - Validate all settings are within acceptable ranges. - - Uses get_metadata() as the single source of truth for valid ranges. - - Raises: - ValueError: If any parameter is outside its valid range - """ metadata_list = self.get_metadata() metadata_by_name = {m.name: m for m in metadata_list} - # Validate simple numeric parameters for name, meta in metadata_by_name.items(): if meta.setting_type == SettingType.RANGE: value = getattr(self, name, None) @@ -312,7 +167,6 @@ def validate(self) -> None: f"{name} = {value} is outside valid range [{meta.min_value}, {meta.max_value}]" ) - # Validate complex types try: self.levelrange_low.validate() except ValueError as e: @@ -322,61 +176,23 @@ def validate(self) -> None: self.levelrange_high.validate() except ValueError as e: raise ValueError(f"levelrange_high invalid: {e}") from e - - try: - self.wbgain.validate() - except ValueError as e: - raise ValueError(f"wbgain invalid: {e}") from e - - # Validate enum types + if not isinstance(self.fformat, FileFormat): raise ValueError(f"fformat must be a FileFormat enum, got {type(self.fformat)}") - def __post_init__(self) -> None: - """ - Post-initialization hook to ensure enums are converted from strings. - """ - # Convert string values to enums if needed - if isinstance(self.fformat, str): - self.fformat = FileFormat(self.fformat) - - # Convert tuples/lists to NamedTuples if needed - if isinstance(self.levelrange_low, (tuple, list)): - self.levelrange_low = RGBALevel(*self.levelrange_low) - if isinstance(self.levelrange_high, (tuple, list)): - self.levelrange_high = RGBALevel(*self.levelrange_high) - if isinstance(self.wbgain, (tuple, list)): - self.wbgain = RGBGain(*self.wbgain) - - # ------------------------- - # Camera Manipulation - # ------------------------- + @classmethod + @abstractmethod + def get_metadata(cls) -> list[SettingMetadata]: + pass def apply_to_camera(self, camera: BaseCamera) -> None: - """ - Apply all settings to the camera hardware. - - This is the main entry point for pushing settings to the camera. - It calls individual setter methods which handle the low-level - camera API calls. - - Args: - camera: The camera instance to apply settings to - - Example: - >>> settings = manager.load() - >>> settings.apply_to_camera(camera) - """ self._camera = camera info(f"Applying settings to camera {camera.model}") try: - # Apply each setting in logical order self.set_auto_exposure(self.auto_expo) self.set_exposure(self.exposure) - self.set_temperature(self.temp) self.set_tint(self.tint) - self.set_wb_gain(self.wbgain) self.set_contrast(self.contrast) self.set_hue(self.hue) self.set_saturation(self.saturation) @@ -390,352 +206,119 @@ def apply_to_camera(self, camera: BaseCamera) -> None: exception(f"Failed to apply settings to camera: {e}") raise - # Abstract setter methods - subclasses MUST implement these - # Each method should: - # 1. Validate the input value - # 2. Update the corresponding field - # 3. Access the SDK/hardware directly to apply the change - # 4. Log the change with debug() - @abstractmethod def set_auto_exposure(self, enabled: bool) -> None: - """ - Enable/disable auto exposure. - - Subclasses must implement to access SDK/hardware directly. - - Example: - if self._camera and hasattr(self._camera, '_sdk'): - self._camera._sdk.put_AutoExpoEnable(self._camera._device, enabled) - """ pass @abstractmethod def set_exposure(self, value: int) -> None: - """ - Set exposure target. - - Subclasses must: - 1. Validate value against ranges from get_metadata() - 2. Update field: self.exposure = value - 3. Access hardware directly - """ - pass - - @abstractmethod - def set_temperature(self, value: int) -> None: - """ - Set white balance temperature. - - Subclasses must: - 1. Validate value against ranges from get_metadata() - 2. Update field: self.temp = value - 3. Access hardware directly - """ pass @abstractmethod def set_tint(self, value: int) -> None: - """ - Set white balance tint. - - Subclasses must: - 1. Validate value against ranges from get_metadata() - 2. Update field: self.tint = value - 3. Access hardware directly - """ - pass - - @abstractmethod - def set_wb_gain(self, gain: RGBGain) -> None: - """ - Set RGB white balance gains. - - Subclasses must: - 1. Validate: gain.validate() - 2. Update field: self.wbgain = gain - 3. Access hardware directly - """ pass @abstractmethod def set_contrast(self, value: int) -> None: - """ - Set contrast. - - Subclasses must: - 1. Validate value against ranges from get_metadata() - 2. Update field: self.contrast = value - 3. Access hardware directly - """ pass @abstractmethod def set_hue(self, value: int) -> None: - """ - Set hue. - - Subclasses must: - 1. Validate value against ranges from get_metadata() - 2. Update field: self.hue = value - 3. Access hardware directly - """ pass @abstractmethod def set_saturation(self, value: int) -> None: - """ - Set saturation. - - Subclasses must: - 1. Validate value against ranges from get_metadata() - 2. Update field: self.saturation = value - 3. Access hardware directly - """ pass @abstractmethod def set_brightness(self, value: int) -> None: - """ - Set brightness. - - Subclasses must: - 1. Validate value against ranges from get_metadata() - 2. Update field: self.brightness = value - 3. Access hardware directly - """ pass @abstractmethod def set_gamma(self, value: int) -> None: - """ - Set gamma correction. - - Subclasses must: - 1. Validate value against ranges from get_metadata() - 2. Update field: self.gamma = value - 3. Access hardware directly - """ pass @abstractmethod def set_level_range(self, low: RGBALevel, high: RGBALevel) -> None: - """ - Set level range mapping. - - Subclasses must: - 1. Validate: low.validate() and high.validate() - 2. Update fields: self.levelrange_low = low, self.levelrange_high = high - 3. Access hardware directly - """ pass - # Resolution settings - @abstractmethod def get_resolutions(self) -> list['CameraResolution']: - """ - Get available camera resolutions. - - Returns: - List of available resolutions - - Example: - return [ - CameraResolution(width=2592, height=1944), - CameraResolution(width=1920, height=1080), - CameraResolution(width=1280, height=720), - ] - """ pass @abstractmethod def get_current_resolution(self) -> tuple[int, int, int]: - """ - Get current resolution. - - Returns: - Tuple of (resolution_index, width, height) - """ pass @abstractmethod def set_resolution(self, resolution_index: int) -> bool: - """ - Set camera resolution. - - Args: - resolution_index: Index of resolution to use - - Returns: - True if successful, False otherwise - - Subclasses must: - 1. Validate resolution_index is valid - 2. Update field: self.resolution_index = resolution_index - 3. Access hardware to change resolution - """ pass def get_still_resolutions(self) -> list['CameraResolution']: - """ - Get available still image resolutions. - - For cameras that support separate still image capture at - different resolutions than the video stream. - - Returns: - List of available still resolutions - """ return [] - # Exposure time settings (manual exposure) - @abstractmethod def get_exposure_time(self) -> int: - """ - Get current exposure time. - - Returns: - Current exposure time in microseconds - """ pass @abstractmethod def set_exposure_time(self, time_us: int) -> bool: - """ - Set exposure time (manual exposure control). - - Args: - time_us: Exposure time in microseconds - - Returns: - True if successful, False otherwise - - Subclasses must: - 1. Validate time_us against ranges from get_metadata() - 2. Update field: self.exposure_time_us = time_us - 3. Access hardware to set exposure time - """ - pass - - # Gain settings - - @abstractmethod - def get_gain(self) -> int: - """ - Get current gain. - - Returns: - Current gain in percent - """ - pass - - @abstractmethod - def set_gain(self, gain_percent: int) -> bool: - """ - Set gain. - - Args: - gain_percent: Gain in percent - - Returns: - True if successful, False otherwise - - Subclasses must: - 1. Validate gain_percent against ranges from get_metadata() - 2. Update field: self.gain_percent = gain_percent - 3. Access hardware to set gain - """ pass - # Getter methods - read current values from camera - @abstractmethod def refresh_from_camera(self, camera: BaseCamera) -> None: - """ - Read all current settings from camera hardware. - - Subclasses MUST override this method to read settings from their SDK/hardware. - - This is useful to sync the settings object with the actual - camera state, for example after manual adjustments or after - camera initialization. - - Args: - camera: The camera instance to read from - - Example implementation in AmScopeSettings: - def refresh_from_camera(self, camera: BaseCamera) -> None: - self._camera = camera - info(f"Refreshing settings from camera {camera.model}") - - if hasattr(camera, '_device'): - self.auto_expo = bool(camera._sdk.get_AutoExpoEnable(camera._device)) - self.exposure = camera._sdk.get_AutoExpoTarget(camera._device) - temp, tint = camera._sdk.get_TempTint(camera._device) - self.temp = temp - self.tint = tint - # ... read all other settings from SDK - - info("Successfully refreshed all settings from camera") - """ pass -# ------------------------- -# Specialized manager -# ------------------------- class CameraSettingsManager(ConfigManager[CameraSettings]): """ - Specialized configuration manager for a single camera model. - - Each camera model should have its own manager instance. - This ensures settings don't bleed between incompatible models. - - Example usage: - >>> # Create manager for MU500 - >>> manager = CameraSettingsManager(model="MU500") - >>> - >>> # Load settings and apply to camera - >>> settings = manager.load() - >>> settings.apply_to_camera(camera) - >>> - >>> # User changes settings via GUI... - >>> settings.set_exposure(150) - >>> settings.set_contrast(10) - >>> - >>> # Save when user clicks "Save" - >>> manager.save(settings) - >>> - >>> # Reset to saved settings - >>> settings = manager.load() - >>> settings.apply_to_camera(camera) - >>> - >>> # Reset to factory defaults - >>> settings = manager.restore_defaults() - >>> settings.apply_to_camera(camera) + Settings manager for camera configurations. + + Manages camera-specific settings directories and handles serialization + of camera settings with custom types (RGBALevel, RGBGain, FileFormat). """ - def __init__( - self, - *, - model: str, - base_dir: Union[str, Path] = "./config/cameras", - default_filename: str = "default_settings.yaml", - backup_dirname: str = "backups", - backup_keep: int = 5, - ) -> None: - # Set root_dir to the model-specific directory - model_dir = Path(base_dir) / model + def __init__(self, model: str, settings_class: type[CameraSettings]): + self.model = model + self.settings_class = settings_class + + root_dir = Path("./config/cameras") / model super().__init__( - CameraSettings, - root_dir=model_dir, - default_filename=default_filename, - backup_dirname=backup_dirname, - backup_keep=backup_keep, + config_type=f"camera_settings_{model}", + root_dir=root_dir ) - self.model = model - info(f"Initialized CameraSettingsManager for model '{model}' at {model_dir}") \ No newline at end of file + debug(f"Initialized CameraSettingsManager for model '{model}' at {self.root_dir}") + + def from_dict(self, data: dict[str, Any]) -> CameraSettings: + processed_data = data.copy() + + if 'levelrange_low' in processed_data and isinstance(processed_data['levelrange_low'], dict): + processed_data['levelrange_low'] = RGBALevel(**processed_data['levelrange_low']) + + if 'levelrange_high' in processed_data and isinstance(processed_data['levelrange_high'], dict): + processed_data['levelrange_high'] = RGBALevel(**processed_data['levelrange_high']) + + settings = self.settings_class(**processed_data) + return settings + + def to_dict(self, settings: CameraSettings) -> dict[str, Any]: + data = {} + + for field_name in settings.__dataclass_fields__: + if field_name.startswith('_'): + continue + + value = getattr(settings, field_name) + + if isinstance(value, RGBALevel): + data[field_name] = value._asdict() + elif isinstance(value, FileFormat): + data[field_name] = value.value + elif isinstance(value, Enum): + data[field_name] = value.value + else: + data[field_name] = value + + return data \ No newline at end of file diff --git a/config/.gitignore b/config/.gitignore new file mode 100644 index 0000000..83a2cad --- /dev/null +++ b/config/.gitignore @@ -0,0 +1,2 @@ +/cameras/* +/forge/* \ No newline at end of file diff --git a/config/cameras/amscope/default_settings.yaml b/config/cameras/amscope/default_settings.yaml deleted file mode 100644 index 3990878..0000000 --- a/config/cameras/amscope/default_settings.yaml +++ /dev/null @@ -1,60 +0,0 @@ -# Camera configuration (values + ranges). Ranges mirror settings.py. -auto_expo: false - -# Auto Exposure Target -exposure: 16 -exposure_min: 16 -exposure_max: 220 - -# White balance temperature -temp: 10794 -temp_min: 2000 -temp_max: 15000 - -# White balance tint -tint: 925 -tint_min: 200 -tint_max: 2500 - -# Level ranges (RGBA). Min/max apply per channel. -levelrange_low: [0, 0, 0, 0] -levelrange_high: [255, 255, 255, 255] -levelrange_min: 0 -levelrange_max: 255 - -contrast: 10 -contrast_min: -100 -contrast_max: 100 - -hue: 0 -hue_min: -180 -hue_max: 180 - -saturation: 45 -saturation_min: 0 -saturation_max: 255 - -brightness: -3 -brightness_min: -64 -brightness_max: 64 - -gamma: 100 -gamma_min: 20 -gamma_max: 180 - -# White balance gains (R, G, B). Min/max apply per channel. -wbgain: [0, 0, 0] -wbgain_min: -127 -wbgain_max: 127 - -sharpening: 48 -sharpening_min: 0 -sharpening_max: 500 - -# 0/1; range is discrete but included for completeness -linear: 1 -linear_min: 0 -linear_max: 1 - -curve: Polynomial -fformat: tiff diff --git a/config/forge/default_settings.yaml b/config/forge/default_settings.yaml index daf26b3..3afb528 100644 --- a/config/forge/default_settings.yaml +++ b/config/forge/default_settings.yaml @@ -1 +1,3 @@ -version: "1.2" \ No newline at end of file +config_type: forge_settings +config_version: '1.2' +version: '1.2' diff --git a/forgeConfig.py b/forgeConfig.py index 84ff883..c721321 100644 --- a/forgeConfig.py +++ b/forgeConfig.py @@ -1,16 +1,18 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Union from pathlib import Path +from typing import Any, Union from generic_config import ConfigManager - +from logger import info @dataclass class ForgeSettings: - """Forge application settings.""" - version: str = "1.2" + """Forge application settings""" + + version: str = "1.2" # Version from last startup + show_patchnotes: bool = False # Runtime flag - set when version changes, not saved def validate(self) -> None: """ @@ -19,7 +21,6 @@ def validate(self) -> None: Raises: ValueError: If any setting is invalid """ - # Add validation logic as needed if not isinstance(self.version, str) or not self.version: raise ValueError("version must be a non-empty string") @@ -27,18 +28,9 @@ def validate(self) -> None: class ForgeSettingsManager(ConfigManager[ForgeSettings]): """ Configuration manager for Forge application settings. - - Directory structure: - config/forge/ - settings.yaml - default_settings.yaml - backups/ - - Example usage: - >>> forge_mgr = ForgeSettingsManager() - >>> settings = forge_mgr.load() - >>> settings.version = "1.2" - >>> forge_mgr.save(settings) + + When a version mismatch is detected during load, the migration + updates the stored version and sets show_patchnotes flag. """ def __init__( @@ -50,9 +42,86 @@ def __init__( backup_keep: int = 5, ) -> None: super().__init__( - ForgeSettings, + config_type="forge_settings", root_dir=root_dir, default_filename=default_filename, backup_dirname=backup_dirname, backup_keep=backup_keep, - ) \ No newline at end of file + ) + + def migrate( + self, + data: dict[str, Any], + from_version: str, + to_version: str + ) -> dict[str, Any]: + """ + Migrate Forge settings and update version. + + When version changes: + 1. Updates the stored version to current + 2. Sets show_patchnotes flag (handled after from_dict) + + Args: + data: Dictionary containing settings data + from_version: Version from the file + to_version: Current Forge version + + Returns: + Migrated dictionary with updated version + """ + info(f"Forge version changed: {from_version} -> {to_version}") + + # Update version to current + data["version"] = to_version + + # Add any future version-specific migrations here + + return data + + def from_dict(self, data: dict[str, Any]) -> ForgeSettings: + """ + Convert dictionary to ForgeSettings object. + + Sets show_patchnotes flag if migration occurred. + + Args: + data: Dictionary containing settings data + + Returns: + ForgeSettings instance with show_patchnotes set if needed + """ + # Handle empty dict (fresh instance) + if not data: + settings = ForgeSettings() + else: + # Extract only valid fields for ForgeSettings + valid_fields = {"version"} + filtered_data = {k: v for k, v in data.items() if k in valid_fields} + settings = ForgeSettings(**filtered_data) + + # If migration happened, set the show_patchnotes flag + if settings.version != self.get_forge_version(): + settings.show_patchnotes = True + info("Patch notes flag set - new version detected") + + # Save the updated version + self.save(settings) + + return settings + + def to_dict(self, settings: ForgeSettings) -> dict[str, Any]: + """ + Convert ForgeSettings object to dictionary. + + Only includes fields that should be saved (excludes show_patchnotes). + + Args: + settings: ForgeSettings instance to convert + + Returns: + Dictionary representation + """ + return { + "version": settings.version, + } diff --git a/generic_config.py b/generic_config.py index c676c0a..ab70090 100644 --- a/generic_config.py +++ b/generic_config.py @@ -1,14 +1,15 @@ # config_manager.py from __future__ import annotations +from abc import ABC, abstractmethod from contextlib import contextmanager -from dataclasses import asdict, fields, is_dataclass from pathlib import Path -from typing import Any, Dict, Generic, Iterator, List, Type, TypeVar, Union +from typing import Any, Generic, Iterator, TypeVar, Union import shutil import time +import yaml -from logger import get_logger +from logger import info, debug, error, warning # File/dir names are generic—usable for ANY config ACTIVE_FILENAME = "settings.yaml" @@ -24,11 +25,15 @@ class ConfigValidationError(Exception): pass -class ConfigManager(Generic[S]): +class ConfigManager(Generic[S], ABC): """ Generic YAML-backed config manager for ANY dataclass-based settings. Manages a single configuration directory with active settings, defaults, and backups. + All config files include metadata fields: + - config_type: Identifies which config loader to use + - config_version: The Forge version that created/last modified this config + Directory structure: root_dir/ settings.yaml # Active settings @@ -37,6 +42,10 @@ class ConfigManager(Generic[S]): settings.20250128-143052.yaml settings.20250128-120301.yaml + Child classes must implement: + - from_dict(data: dict[str, Any]) -> S: Convert dictionary to settings object + - to_dict(settings: S) -> dict[str, Any]: Convert settings object to dictionary + Example: >>> @dataclass ... class MySettings: @@ -45,10 +54,20 @@ class ConfigManager(Generic[S]): ... if self.value < 0: ... raise ValueError("value must be non-negative") >>> - >>> manager = ConfigManager[MySettings]( - ... MySettings, - ... root_dir="./config/my_component" - ... ) + >>> class MySettingsManager(ConfigManager[MySettings]): + ... def __init__(self): + ... super().__init__( + ... config_type="my_settings", + ... root_dir="./config/my_component" + ... ) + ... + ... def from_dict(self, data: dict[str, Any]) -> MySettings: + ... return MySettings(**data) + ... + ... def to_dict(self, settings: MySettings) -> dict[str, Any]: + ... return asdict(settings) + >>> + >>> manager = MySettingsManager() >>> settings = manager.load() >>> settings.value = 20 >>> manager.save(settings) @@ -56,193 +75,353 @@ class ConfigManager(Generic[S]): def __init__( self, - schema_cls: Type[S], + config_type: str, *, root_dir: Union[str, Path] = "./config", default_filename: str = DEFAULT_FILENAME, backup_dirname: str = BACKUP_DIRNAME, backup_keep: int = BACKUP_KEEP, + save_defaults_on_init: bool = True, ) -> None: """ Initialize the config manager. Args: - schema_cls: Dataclass type defining the settings schema + config_type: Identifier for this config type (e.g., "camera_settings", "forge_settings") root_dir: Directory for config files (settings, defaults, backups) default_filename: Name for the defaults file backup_dirname: Name for the backups subdirectory backup_keep: Number of backup files to retain (oldest are deleted) + save_defaults_on_init: If True, saves default settings on initialization if none exist Raises: - TypeError: If schema_cls is not a dataclass + ValueError: If config_type is empty """ - if not is_dataclass(schema_cls): - logger = get_logger() - logger.error(f"Attempted to create ConfigManager with non-dataclass type: {schema_cls}") - raise TypeError(f"schema_cls must be a dataclass type, got {type(schema_cls).__name__}") + if not config_type: + raise ValueError("config_type must be a non-empty string") - self.schema_cls = schema_cls + self.config_type = config_type self.root_dir = Path(root_dir).resolve() self.root_dir.mkdir(parents=True, exist_ok=True) self.default_filename = default_filename self.backup_dirname = backup_dirname self.backup_keep = backup_keep - self._logger = get_logger() - self._logger.debug(f"Initialized ConfigManager for {schema_cls.__name__} at {self.root_dir}") - - # ------------------------- - # YAML (de)serialization - # ------------------------- - def _to_dict(self, settings: S) -> Dict[str, Any]: - """Convert settings dataclass to dictionary.""" - return asdict(settings) - - def _from_dict(self, data: Union[Dict[str, Any], None]) -> S: + debug(f"Initialized ConfigManager for '{config_type}' at {self.root_dir}") + + # Save default settings if no settings exist and save_defaults_on_init is True + if save_defaults_on_init: + dp = self.default_path() + ap = self.active_path() + if not dp.exists() and not ap.exists(): + try: + default_settings = self.from_dict({}) + self.write_defaults(default_settings) + info(f"Saved initial default settings for '{config_type}'") + except Exception as e: + warning(f"Failed to save initial default settings: {e}") + + @abstractmethod + def from_dict(self, data: dict[str, Any]) -> S: """ - Create settings instance from dictionary. - Only includes fields that are defined in the schema. + Convert a dictionary (loaded from YAML) into a settings object. + + The dictionary will NOT include config_type or config_version fields, + as those are handled separately by the base class. + + Args: + data: Dictionary containing the settings data + + Returns: + Settings object instance + + Raises: + Any exception appropriate for conversion failures """ - data = data or {} - allowed = {f.name for f in fields(self.schema_cls)} - filtered = {k: v for k, v in data.items() if k in allowed} - return self.schema_cls(**filtered) # type: ignore - - def _validate(self, settings: S, context: str = "") -> None: + pass + + @abstractmethod + def to_dict(self, settings: S) -> dict[str, Any]: """ - Validate settings if a validate() method exists. + Convert a settings object into a dictionary for YAML serialization. + + Do NOT include config_type or config_version in the returned dictionary, + as those are added automatically by the base class. Args: - settings: Settings instance to validate - context: Additional context for error messages + settings: Settings object to convert - Raises: - ConfigValidationError: If validation fails + Returns: + Dictionary containing the settings data """ - if hasattr(settings, 'validate') and callable(settings.validate): - try: - settings.validate() - self._logger.debug(f"Validation passed{' for ' + context if context else ''}") - except Exception as e: - error_msg = f"Settings validation failed{' for ' + context if context else ''}: {e}" - self._logger.error(error_msg) - raise ConfigValidationError(error_msg) from e - - # ------------------------- - # Path helpers - # ------------------------- + pass + + def migrate( + self, + data: dict[str, Any], + from_version: str, + to_version: str + ) -> dict[str, Any]: + """ + Migrate config data from one version to another. + + Override this method in child classes to handle version migrations. + The base implementation does nothing (no migration). + + Args: + data: Dictionary containing the config data (without metadata fields) + from_version: Version the config was created with + to_version: Current Forge version + + Returns: + Migrated dictionary (or original if no migration needed) + """ + return data + + def get_forge_version(self) -> str: + """Get the current Forge version.""" + from app_context import FORGE_VERSION + return FORGE_VERSION + def active_path(self) -> Path: - """Get the path to the active settings file.""" + """Return path to the active settings file.""" return self.root_dir / ACTIVE_FILENAME def default_path(self) -> Path: - """Get the path to the default settings file.""" + """Return path to the default settings file.""" return self.root_dir / self.default_filename def backup_dir(self) -> Path: - """Get the backup directory, creating it if needed.""" + """Return path to the backup directory.""" bd = self.root_dir / self.backup_dirname - bd.mkdir(parents=True, exist_ok=True) + bd.mkdir(exist_ok=True) return bd + + def _add_metadata(self, data: dict[str, Any]) -> dict[str, Any]: + """ + Add config_type and config_version to a data dictionary. + + Args: + data: Dictionary to add metadata to + + Returns: + New dictionary with metadata fields added + """ + return { + "config_type": self.config_type, + "config_version": self.get_forge_version(), + **data, + } + + def _extract_metadata( + self, data: dict[str, Any] + ) -> tuple[str | None, str | None, dict[str, Any]]: + """ + Extract metadata and migrate if needed. + + Args: + data: Dictionary loaded from YAML + + Returns: + Tuple of (config_type, config_version, remaining_data) + The remaining_data will be migrated if version mismatch detected + """ + data = data.copy() # Don't modify the original + + config_type = data.pop("config_type", None) + config_version = data.pop("config_version", None) + + # Validate config_type matches if present + if config_type is not None and config_type != self.config_type: + warning( + f"Config type mismatch: expected '{self.config_type}', " + f"got '{config_type}' in file" + ) + + # Handle migration if version mismatch + if config_version is not None: + current_version = self.get_forge_version() + + if config_version != current_version and current_version != "unknown": + info( + f"Config version mismatch: file has v{config_version}, " + f"current is v{current_version}. Running migration..." + ) + try: + data = self.migrate(data, config_version, current_version) + info("Migration completed successfully") + except Exception as e: + error(f"Migration failed: {e}") + # Continue with unmigrated data - child class should handle it + else: + debug(f"Config version matches: v{config_version}") + + return config_type, config_version, data + + def _load_dict_from_file(self, path: Path) -> dict[str, Any]: + """ + Load a dictionary from a YAML file. + + Args: + path: Path to the YAML file + + Returns: + Dictionary loaded from the file (empty dict if file is empty) + + Raises: + IOError: If file cannot be read or parsed + """ + + try: + with open(path, "r") as f: + data = yaml.safe_load(f) + return data or {} + except Exception as e: + error(f"Failed to load YAML from {path}: {e}") + raise IOError(f"Failed to load config from {path}") from e + + def _save_dict_to_file(self, data: dict[str, Any], path: Path) -> None: + """ + Save a dictionary to a YAML file. + + Args: + data: Dictionary to save + path: Path to save to + + Raises: + IOError: If file cannot be written + """ + + try: + with open(path, "w") as f: + yaml.safe_dump(data, f, sort_keys=False) + debug(f"Saved config to {path.name}") + except Exception as e: + error(f"Failed to save YAML to {path}: {e}") + raise IOError(f"Failed to save config to {path}") from e + + # ------------------------- + # Validation + # ------------------------- + + def _validate(self, settings: S, context: str = "") -> None: + """ + Validate settings if validate() method exists. + + Args: + settings: Settings instance to validate + context: Context string for logging (e.g., "after load", "before save") + + Raises: + ConfigValidationError: If validation fails + """ + if not hasattr(settings, "validate"): + return + + try: + settings.validate() + debug(f"Validation passed{f' ({context})' if context else ''}") + except Exception as e: + error( + f"Validation failed{f' ({context})' if context else ''}: {e}" + ) + raise ConfigValidationError(f"Settings validation failed: {e}") from e # ------------------------- # Backup management # ------------------------- - def _backup_if_exists(self) -> None: + + def _backup_if_exists(self) -> Path | None: """ Create a timestamped backup of the active settings file if it exists. - Also prunes old backups to maintain backup_keep limit. + Cleans up old backups to maintain backup_keep limit. + + Returns: + Path to the created backup, or None if no file to backup """ src = self.active_path() if not src.exists(): - return + return None - ts = time.strftime("%Y%m%d-%H%M%S") - dst = self.backup_dir() / f"{src.stem}.{ts}{src.suffix}" + backup_dir = self.backup_dir() + timestamp = time.strftime("%Y%m%d-%H%M%S") + backup_name = f"{ACTIVE_FILENAME.split('.')[0]}.{timestamp}.yaml" + dst = backup_dir / backup_name try: shutil.copy2(src, dst) - self._logger.info(f"Created backup: {dst.name}") + info(f"Created backup: {backup_name}") except Exception as e: - self._logger.error(f"Failed to create settings backup: {e}") - raise IOError("Failed to create backup") from e + warning(f"Failed to create backup: {e}") + return None - # Prune old backups - try: - backups: List[Path] = sorted( - self.backup_dir().glob(f"{src.stem}.*{src.suffix}"), - key=lambda p: p.stat().st_mtime, - reverse=True, - ) - - if len(backups) > self.backup_keep: - for old in backups[self.backup_keep:]: - try: - self._logger.debug(f"Pruning old backup: {old.name}") - old.unlink(missing_ok=True) - except Exception as e: - self._logger.warning(f"Failed to delete backup {old.name}: {e}") - - self._logger.info(f"Pruned {len(backups) - self.backup_keep} old backup(s)") - except Exception as e: - self._logger.warning(f"Failed to prune old backups: {e}") + # Clean up old backups + self._cleanup_old_backups() + + return dst + + def _cleanup_old_backups(self) -> None: + """Remove old backups beyond the configured limit.""" + backups = self.list_backups() + to_delete = backups[self.backup_keep:] + + for backup in to_delete: + try: + backup.unlink() + debug(f"Deleted old backup: {backup.name}") + except Exception as e: + warning(f"Failed to delete old backup {backup.name}: {e}") # ------------------------- # Public API # ------------------------- + def load(self) -> S: """ - Load settings from the active settings file. - - Attempts to load in order: - 1. Active settings file - 2. Default settings file - 3. Fresh instance from schema + Load settings with fallback chain: active → defaults → fresh instance. Returns: Settings instance (validated if validate() method exists) Raises: ConfigValidationError: If loaded settings fail validation + IOError: If all load attempts fail """ - import yaml - - p = self.active_path() - - # Try loading active settings - if p.exists(): + # Try active settings first + ap = self.active_path() + if ap.exists(): try: - with open(p, "r") as f: - data = yaml.safe_load(f) or {} - settings = self._from_dict(data) + data_dict = self._load_dict_from_file(ap) + _, _, clean_data = self._extract_metadata(data_dict) + settings = self.from_dict(clean_data) self._validate(settings, "active settings") - self._logger.info(f"Loaded active settings from {p.name}") + info(f"Loaded active settings from {ap.name}") return settings except ConfigValidationError: raise except Exception as e: - self._logger.error(f"Failed to load settings from {p}: {e}") + error(f"Failed to load active settings from {ap}: {e}") raise IOError("Failed to load active settings") from e # Fallback to defaults dp = self.default_path() if dp.exists(): try: - with open(dp, "r") as f: - data = yaml.safe_load(f) or {} - settings = self._from_dict(data) + data_dict = self._load_dict_from_file(dp) + _, _, clean_data = self._extract_metadata(data_dict) + settings = self.from_dict(clean_data) self._validate(settings, "default settings") - self._logger.info(f"Loaded default settings from {dp.name}") + info(f"Loaded default settings from {dp.name}") return settings except ConfigValidationError: raise except Exception as e: - self._logger.error(f"Failed to load default settings from {dp}: {e}") + error(f"Failed to load default settings from {dp}: {e}") raise IOError("Failed to load default settings") from e # Last resort: create fresh instance - self._logger.info("No existing settings found, using fresh instance") - settings = self.schema_cls() + info("No existing settings found, using fresh instance") + settings = self.from_dict({}) self._validate(settings, "fresh instance") return settings @@ -260,22 +439,34 @@ def load_from_file(self, path: Union[str, Path]) -> S: Raises: ConfigValidationError: If loaded settings fail validation - IOError: If file cannot be read + IOError: If file cannot be read or config type mismatch """ - import yaml - p = Path(path) try: - with open(p, "r") as f: - data = yaml.safe_load(f) or {} - settings = self._from_dict(data) + data_dict = self._load_dict_from_file(p) + file_config_type, _, clean_data = self._extract_metadata(data_dict) + + # Check if config_type matches + if file_config_type is not None and file_config_type != self.config_type: + error( + f"Config type mismatch when loading from {p}: " + f"expected '{self.config_type}', got '{file_config_type}'" + ) + raise IOError( + f"Config type mismatch: file is '{file_config_type}', " + f"but this manager expects '{self.config_type}'" + ) + + settings = self.from_dict(clean_data) self._validate(settings, f"file {p.name}") - self._logger.info(f"Loaded settings from file: {p}") + info(f"Loaded settings from file: {p}") return settings except ConfigValidationError: raise + except IOError: + raise except Exception as e: - self._logger.error(f"Failed to load settings from {p}: {e}") + error(f"Failed to load settings from {p}: {e}") raise IOError(f"Failed to load settings from {path}") from e def save(self, settings: S) -> None: @@ -283,6 +474,7 @@ def save(self, settings: S) -> None: Save settings to the active settings file. Creates a backup of existing settings before saving. + Automatically adds config_type and config_version metadata. Args: settings: Settings instance to save @@ -291,30 +483,27 @@ def save(self, settings: S) -> None: ConfigValidationError: If settings fail validation IOError: If file cannot be written """ - import yaml - # Validate before saving self._validate(settings, "before save") # Backup existing file self._backup_if_exists() - # Save new settings + # Convert to dict and add metadata + data = self.to_dict(settings) + data_with_metadata = self._add_metadata(data) + + # Save p = self.active_path() - try: - with open(p, "w") as f: - yaml.safe_dump(self._to_dict(settings), f, sort_keys=False) - self._logger.info(f"Saved settings to {p.name}") - except Exception as e: - self._logger.error(f"Failed to save settings to {p}: {e}") - raise IOError("Failed to save settings") from e + self._save_dict_to_file(data_with_metadata, p) + info(f"Saved settings to {p.name}") - def write_defaults(self, settings: Union[S, None] = None) -> Path: + def write_defaults(self, settings: S | None = None) -> Path: """ Write default settings file. Args: - settings: Settings to write as defaults. If None, uses fresh schema instance. + settings: Settings to write as defaults. If None, uses from_dict({}) Returns: Path to the written defaults file @@ -323,22 +512,16 @@ def write_defaults(self, settings: Union[S, None] = None) -> Path: ConfigValidationError: If settings fail validation IOError: If file cannot be written """ - import yaml - - settings_to_save = settings or self.schema_cls() + settings_to_save = settings if settings is not None else self.from_dict({}) self._validate(settings_to_save, "defaults") - payload = self._to_dict(settings_to_save) - dp = self.default_path() + data = self.to_dict(settings_to_save) + data_with_metadata = self._add_metadata(data) - try: - with open(dp, "w") as f: - yaml.safe_dump(payload, f, sort_keys=False) - self._logger.info(f"Wrote default settings to {dp.name}") - return dp - except Exception as e: - self._logger.error(f"Failed to write default settings to {dp}: {e}") - raise IOError("Failed to write default settings") from e + dp = self.default_path() + self._save_dict_to_file(data_with_metadata, dp) + info(f"Wrote default settings to {dp.name}") + return dp def restore_defaults(self) -> S: """ @@ -356,7 +539,7 @@ def restore_defaults(self) -> S: defaults = self.load_defaults() self._backup_if_exists() self.save(defaults) - self._logger.info("Restored defaults as active settings") + info("Restored defaults as active settings") return defaults def load_defaults(self) -> S: @@ -370,35 +553,28 @@ def load_defaults(self) -> S: ConfigValidationError: If default settings fail validation IOError: If defaults file cannot be read """ - import yaml - dp = self.default_path() if not dp.exists(): - self._logger.debug("No defaults file, using fresh instance") - settings = self.schema_cls() + debug("No defaults file, using fresh instance") + settings = self.from_dict({}) self._validate(settings, "fresh defaults") return settings try: - with open(dp, "r") as f: - data = yaml.safe_load(f) or {} - settings = self._from_dict(data) + data_dict = self._load_dict_from_file(dp) + _, _, clean_data = self._extract_metadata(data_dict) + settings = self.from_dict(clean_data) self._validate(settings, "defaults") - self._logger.info(f"Loaded default settings from {dp.name}") + info(f"Loaded default settings from {dp.name}") return settings except ConfigValidationError: raise except Exception as e: - self._logger.error(f"Failed to load default settings from {dp}: {e}") + error(f"Failed to load default settings from {dp}: {e}") raise IOError("Failed to load defaults") from e - def list_backups(self) -> List[Path]: - """ - List all backup files, most recent first. - - Returns: - List of backup file paths, sorted by modification time (newest first) - """ + def list_backups(self) -> list[Path]: + """List all backup files, sorted by last modified.""" bd = self.backup_dir() try: backups = sorted( @@ -406,10 +582,10 @@ def list_backups(self) -> List[Path]: key=lambda p: p.stat().st_mtime, reverse=True ) - self._logger.debug(f"Found {len(backups)} backup(s)") + debug(f"Found {len(backups)} backup(s)") return backups except Exception as e: - self._logger.warning(f"Failed to list backups: {e}") + warning(f"Failed to list backups: {e}") return [] @contextmanager @@ -432,13 +608,13 @@ def edit(self) -> Iterator[S]: ... settings.value = 150 # Auto-saves on successful exit """ - self._logger.debug("Starting edit transaction") + debug("Starting edit transaction") settings = self.load() try: yield settings except Exception as e: - self._logger.error(f"Edit transaction failed: {e}") + error(f"Edit transaction failed: {e}") raise else: self.save(settings) - self._logger.info("Edit transaction completed") + info("Edit transaction completed") From d0e8f31e2b2fe979b3edf65a30269ac55a6c781c Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Thu, 5 Feb 2026 14:04:23 -0900 Subject: [PATCH 25/46] expanded settings and fixed some camera settings --- UI/settings/pages/camera_settings.py | 153 ++++++++++++++---- UI/settings/settings_main.py | 222 +++++++++++++++++++++++---- UI/widgets/camera_preview.py | 22 ++- camera/cameras/amscope_camera.py | 12 +- camera/cameras/base_camera.py | 3 - camera/settings/amscope_settings.py | 83 ++++++---- camera/settings/camera_settings.py | 20 ++- 7 files changed, 414 insertions(+), 101 deletions(-) diff --git a/UI/settings/pages/camera_settings.py b/UI/settings/pages/camera_settings.py index ab8a311..02414ea 100644 --- a/UI/settings/pages/camera_settings.py +++ b/UI/settings/pages/camera_settings.py @@ -16,26 +16,33 @@ QHBoxLayout, QPushButton, QFileDialog, + QScrollArea, + QFrame, + QMessageBox, ) from PySide6.QtCore import Qt, Signal, Slot from app_context import get_app_context from logger import info, error, warning, debug - +from camera.cameras.base_camera import CameraResolution class CameraSettingsWidget(QWidget): """Widget for displaying and editing camera settings""" settings_loaded = Signal(bool, object) # success, result + modifications_changed = Signal(bool) # has_modifications - def __init__(self, parent: QWidget | None = None) -> None: + def __init__(self, parent_dialog=None, parent: QWidget | None = None) -> None: super().__init__(parent) + self.parent_dialog = parent_dialog self.ctx = get_app_context() self._settings_widgets: dict[str, QWidget] = {} self._updating_from_camera = False self._modified_settings: set[str] = set() # Track which settings have been modified self._saved_values: dict[str, any] = {} # Store saved values for comparison + self._group_names: list[str] = [] # Track group names in order + self._group_widgets: dict[str, QGroupBox] = {} # Map group names to widgets self._setup_ui() self._connect_signals() @@ -44,6 +51,25 @@ def __init__(self, parent: QWidget | None = None) -> None: def _setup_ui(self) -> None: """Setup the user interface""" layout = QVBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.setSpacing(0) + + # Scrollable content area + scroll = QScrollArea() + scroll.setWidgetResizable(True) + scroll.setFrameShape(QFrame.Shape.NoFrame) + + # Content widget inside scroll area with white background + content = QWidget() + content.setStyleSheet("background: white;") + content_layout = QVBoxLayout(content) + content_layout.setContentsMargins(10, 10, 10, 10) + content_layout.setSpacing(10) + + # Camera title with larger font + camera_title = QLabel("Camera") + camera_title.setStyleSheet("font-size: 24px; font-weight: bold; color: #5f6368;") + content_layout.addWidget(camera_title) # Camera selection group camera_group = QGroupBox("Camera Device") @@ -61,38 +87,50 @@ def _setup_ui(self) -> None: camera_layout.addRow("Select Camera:", camera_select_layout) - layout.addWidget(camera_group) + content_layout.addWidget(camera_group) # Camera settings groups (will be populated dynamically) self.settings_container = QWidget() self.settings_layout = QVBoxLayout(self.settings_container) self.settings_layout.setContentsMargins(0, 0, 0, 0) + self.settings_layout.setSpacing(10) - layout.addWidget(self.settings_container) - layout.addStretch() + content_layout.addWidget(self.settings_container) + content_layout.addStretch() - # Save/Load buttons + # Reset and Load buttons at bottom button_layout = QHBoxLayout() - self.save_btn = QPushButton("Save Settings") - self.load_btn = QPushButton("Load Settings") self.reset_btn = QPushButton("Reset to Defaults") + self.reset_btn.setEnabled(False) + self.load_btn = QPushButton("Load Settings") + self.load_btn.setEnabled(False) - button_layout.addWidget(self.save_btn) - button_layout.addWidget(self.load_btn) button_layout.addWidget(self.reset_btn) + button_layout.addWidget(self.load_btn) button_layout.addStretch() - layout.addLayout(button_layout) + content_layout.addLayout(button_layout) + + scroll.setWidget(content) + layout.addWidget(scroll) def _connect_signals(self) -> None: """Connect signals and slots""" self.camera_combo.currentIndexChanged.connect(self._on_camera_changed) self.refresh_btn.clicked.connect(lambda: self._populate_camera_list(force_enumerate=True)) - self.save_btn.clicked.connect(self._save_settings) - self.load_btn.clicked.connect(self._load_settings) self.reset_btn.clicked.connect(self._reset_settings) + self.load_btn.clicked.connect(self._load_settings) self.settings_loaded.connect(self._on_settings_loaded) + # Connect to parent dialog's save button if available + if self.parent_dialog: + if hasattr(self.parent_dialog, 'save_btn'): + self.parent_dialog.save_btn.clicked.connect(self._save_settings) + # Connect modifications_changed signal to enable/disable save button + self.modifications_changed.connect(self.parent_dialog.save_btn.setEnabled) + if hasattr(self.parent_dialog, 'save_camera_settings'): + self.parent_dialog.save_camera_settings.connect(self._save_settings) + # Connect to camera manager signals if self.ctx.camera_manager: self.ctx.camera_manager.camera_list_changed.connect( @@ -222,15 +260,32 @@ def _refresh_settings_display(self) -> None: # Group settings by category grouped_settings = self._group_settings(metadata_list) + # Clear and rebuild group tracking + self._group_names.clear() + self._group_widgets.clear() + # Create UI for each group for group_name, settings_in_group in grouped_settings.items(): group_box = self._create_settings_group(group_name, settings_in_group) self.settings_layout.addWidget(group_box) + + # Track the group + self._group_names.append(group_name) + self._group_widgets[group_name] = group_box + + # Register with parent dialog for scrolling + if self.parent_dialog and hasattr(self.parent_dialog, 'register_group_box'): + self.parent_dialog.register_group_box("Camera", group_name, group_box) + + # Update tree items in parent dialog + if self.parent_dialog and hasattr(self.parent_dialog, '_update_camera_groups'): + self.parent_dialog._update_camera_groups(self._group_names) # Enable buttons - self.save_btn.setEnabled(True) - self.load_btn.setEnabled(True) + if self.parent_dialog and hasattr(self.parent_dialog, 'save_btn'): + self.parent_dialog.save_btn.setEnabled(True) self.reset_btn.setEnabled(True) + self.load_btn.setEnabled(True) except Exception as e: error(f"Error loading camera settings: {e}") @@ -246,9 +301,10 @@ def _clear_settings_display(self) -> None: self._settings_widgets.clear() # Disable buttons - self.save_btn.setEnabled(False) - self.load_btn.setEnabled(False) + if self.parent_dialog and hasattr(self.parent_dialog, 'save_btn'): + self.parent_dialog.save_btn.setEnabled(False) self.reset_btn.setEnabled(False) + self.load_btn.setEnabled(False) def _show_no_camera_message(self) -> None: """Show message when no camera is available""" @@ -361,6 +417,8 @@ def _create_bool_widget(self, meta, settings) -> QCheckBox: checkbox.stateChanged.connect( lambda state: self._on_bool_changed(setter_name, state == Qt.CheckState.Checked) ) + else: + warning(f"No setter found: {setter_name}") return checkbox @@ -459,8 +517,10 @@ def _create_dropdown_widget(self, meta, settings) -> QComboBox: setter_name = f"set_{meta.name}" if hasattr(settings, setter_name): combo.currentIndexChanged.connect( - lambda idx: self._on_dropdown_changed(setter_name, combo.itemData(idx)) + lambda idx: self._on_dropdown_changed(setter_name, idx, combo.itemData(idx)) ) + else: + warning(f"No setter found: {setter_name}") return combo @@ -557,6 +617,13 @@ def _mark_setting_modified(self, setting_name: str, current_value) -> None: # Update widget styling self._update_widget_styling(setting_name, is_modified) + + # Update category color in parent dialog + if self.parent_dialog and hasattr(self.parent_dialog, 'set_category_modified'): + self.parent_dialog.set_category_modified("Camera", len(self._modified_settings) > 0) + + # Emit signal about modification state change + self.modifications_changed.emit(len(self._modified_settings) > 0) def _update_widget_styling(self, setting_name: str, is_modified: bool) -> None: """Update the visual styling of a widget to indicate modification""" @@ -586,14 +653,12 @@ def _apply_orange_styling(self, widget: QWidget, orange: bool) -> None: if label: label.setStyleSheet("QLabel { color: #FFA500; }") - # For different widget types, apply orange color to slider only + # For different widget types, apply orange if isinstance(control, QCheckBox): control.setStyleSheet("QCheckBox { color: #FFA500; }") elif isinstance(control, QComboBox): control.setStyleSheet("QComboBox { color: #FFA500; }") elif isinstance(control, QWidget): - # For container widgets (like the range widget container) - # Only style slider handle, not the groove for child in control.findChildren(QSlider): child.setStyleSheet(""" QSlider::handle:horizontal { @@ -627,6 +692,21 @@ def _clear_all_modifications(self) -> None: self._update_widget_styling(setting_name, False) self._modified_settings.clear() + + # Update category color in parent dialog + if self.parent_dialog and hasattr(self.parent_dialog, 'set_category_modified'): + self.parent_dialog.set_category_modified("Camera", False) + + # Emit signal about modification state change + self.modifications_changed.emit(False) + + def has_unsaved_changes(self) -> bool: + """Check if there are unsaved changes""" + return len(self._modified_settings) > 0 + + def get_group_names(self) -> list[str]: + """Get list of group names in the settings""" + return self._group_names.copy() def _on_bool_changed(self, setter_name: str, value: bool) -> None: """Handle boolean setting change""" @@ -758,8 +838,14 @@ def _on_slider_changed_float(self, setter_name: str, slider_val: int, except Exception as e: error(f"Error setting {setter_name}: {e}") - def _on_dropdown_changed(self, setter_name: str, value) -> None: - """Handle dropdown setting change""" + def _on_dropdown_changed(self, setter_name: str, index: int, value) -> None: + """Handle dropdown setting change + + Args: + setter_name: Name of the setter method (e.g., 'set_resolution') + index: Index of the selected item in the dropdown + value: Value associated with the selected item + """ if self._updating_from_camera: return @@ -769,13 +855,14 @@ def _on_dropdown_changed(self, setter_name: str, value) -> None: try: setter = getattr(camera.settings, setter_name) - setter(value) + # Pass both index and value to the setter + setter(index=index, value=value) # Extract setting name from setter name (remove "set_" prefix) setting_name = setter_name.replace("set_", "") self._mark_setting_modified(setting_name, value) - debug(f"Set {setter_name} to {value}") + debug(f"Set {setter_name} to index={index}, value={value}") except Exception as e: error(f"Error setting {setter_name}: {e}") @@ -853,6 +940,18 @@ def _reset_settings(self) -> None: warning("No camera to reset settings on") return + # Confirm reset with user + reply = QMessageBox.question( + self, + "Reset to Defaults", + "Are you sure you want to reset all camera settings to their default values? This cannot be undone.", + QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No, + QMessageBox.StandardButton.No + ) + + if reply != QMessageBox.StandardButton.Yes: + return + try: # Refresh from camera hardware (gets defaults) settings = camera.settings @@ -869,6 +968,6 @@ def _reset_settings(self) -> None: self.ctx.toast.info(f"Error resetting settings: {e}", duration=3000) -def camera_page() -> QWidget: +def camera_page(parent_dialog=None) -> QWidget: """Create and return the camera settings page widget""" - return CameraSettingsWidget() \ No newline at end of file + return CameraSettingsWidget(parent_dialog=parent_dialog) \ No newline at end of file diff --git a/UI/settings/settings_main.py b/UI/settings/settings_main.py index e31f061..219ee6f 100644 --- a/UI/settings/settings_main.py +++ b/UI/settings/settings_main.py @@ -1,6 +1,6 @@ from __future__ import annotations -from PySide6.QtCore import Qt +from PySide6.QtCore import Qt, Signal, Slot from PySide6.QtWidgets import ( QHBoxLayout, QLabel, @@ -12,6 +12,12 @@ QListWidget, QListWidgetItem, QStackedWidget, + QFrame, + QMessageBox, + QPushButton, + QTreeWidget, + QTreeWidgetItem, + QScrollArea, ) from .pages.camera_settings import camera_page @@ -31,47 +37,207 @@ def __init__(self, tooltip: str = "Settings", parent: QWidget | None = None)-> N self.setFixedHeight(26) class SettingsDialog(QDialog): + # Signal emitted when user wants to save camera settings + save_camera_settings = Signal() + def __init__(self, parent: QWidget | None = None) -> None: super().__init__(parent) self.setWindowTitle("Settings") self.resize(860, 580) - root = QHBoxLayout(self) - - self.sidebar = QListWidget() + root = QVBoxLayout(self) + root.setContentsMargins(0, 0, 0, 0) + root.setSpacing(0) + + # Dark grey header bar spanning the top + header = QFrame() + header.setObjectName("SectionHeader") + header.setFixedHeight(40) + header_layout = QHBoxLayout(header) + header_layout.setContentsMargins(16, 0, 16, 0) + + header_title = QLabel("Categories") + header_title.setObjectName("SectionHeaderTitle") + header_layout.addWidget(header_title) + header_layout.addStretch() + + root.addWidget(header) + + # Main content area + content = QWidget() + content_layout = QHBoxLayout(content) + content_layout.setContentsMargins(0, 0, 0, 0) + content_layout.setSpacing(0) + + # Tree sidebar - flush with left and extends to bottom + self.sidebar = QTreeWidget() self.sidebar.setFixedWidth(220) + self.sidebar.setHeaderHidden(True) + self.sidebar.setIndentation(15) + # Pages container - flush with right edge and white background self.pages = QStackedWidget() - - buttons = QDialogButtonBox(QDialogButtonBox.StandardButton.Close) - buttons.rejected.connect(self.reject) - - left = QVBoxLayout() - left.addWidget(QLabel("Categories")) - left.addWidget(self.sidebar) - - right = QVBoxLayout() - right.addWidget(self.pages) - right.addWidget(buttons) - - root.addLayout(left) - root.addLayout(right) - - self._add_page("Camera", camera_page()) + self.pages.setStyleSheet("QStackedWidget { background: white; }") + + content_layout.addWidget(self.sidebar) + content_layout.addWidget(self.pages) + + root.addWidget(content) + + # Bottom button bar with margins + button_container = QWidget() + button_container_layout = QHBoxLayout(button_container) + button_container_layout.setContentsMargins(10, 10, 10, 10) + + button_box = QDialogButtonBox() + + self.save_btn = QPushButton("Save Settings") + close_btn = QPushButton("Close") + + button_box.addButton(self.save_btn, QDialogButtonBox.ButtonRole.ActionRole) + button_box.addButton(close_btn, QDialogButtonBox.ButtonRole.RejectRole) + + close_btn.clicked.connect(self._on_close_clicked) + + button_container_layout.addWidget(button_box) + + root.addWidget(button_container) + + # Store page widgets and their group boxes for scrolling + self._page_widgets = {} + self._group_boxes = {} # Maps (page_name, group_name) -> QGroupBox widget + + self._add_page("Camera", camera_page(self)) self._add_page("Navigation", navigation_page()) self._add_page("Automation", automation_page()) self._add_page("Machine Vision", machine_vision_page()) - self.sidebar.currentRowChanged.connect(self.pages.setCurrentIndex) - self.sidebar.setCurrentRow(0) + self.sidebar.itemClicked.connect(self._on_tree_item_clicked) + + # Expand first item and select it + if self.sidebar.topLevelItemCount() > 0: + first_item = self.sidebar.topLevelItem(0) + first_item.setExpanded(True) + self.sidebar.setCurrentItem(first_item) + self.pages.setCurrentIndex(0) + + # Initially disable save button + self.save_btn.setEnabled(False) def open_to(self, category: str) -> None: - for i in range(self.sidebar.count()): - item = self.sidebar.item(i) - if item and item.text() == category: - self.sidebar.setCurrentRow(i) + for i in range(self.sidebar.topLevelItemCount()): + item = self.sidebar.topLevelItem(i) + if item and item.text(0) == category: + self.sidebar.setCurrentItem(item) + self._on_tree_item_clicked(item, 0) + return + + def set_category_modified(self, category: str, modified: bool) -> None: + """Update category text color to indicate modifications""" + from PySide6.QtGui import QColor + for i in range(self.sidebar.topLevelItemCount()): + item = self.sidebar.topLevelItem(i) + if item and item.text(0) == category: + if modified: + # Use orange color (#f28c28 from style.py) + item.setForeground(0, QColor("#f28c28")) + else: + # Reset to default + item.setData(0, Qt.ItemDataRole.ForegroundRole, None) + return + + def _update_camera_groups(self, group_names: list[str]) -> None: + """Update the Camera category tree item with actual group names""" + # Find the Camera item + for i in range(self.sidebar.topLevelItemCount()): + item = self.sidebar.topLevelItem(i) + if item and item.text(0) == "Camera": + # Remove existing children + item.takeChildren() + + # Add new children for each group + page_index = item.data(0, Qt.ItemDataRole.UserRole) + for group_name in group_names: + child_item = QTreeWidgetItem([group_name]) + child_item.setData(0, Qt.ItemDataRole.UserRole, page_index) + child_item.setData(0, Qt.ItemDataRole.UserRole + 1, group_name) + item.addChild(child_item) + + # Re-expand the item if it was expanded + if item.isExpanded() or self.sidebar.currentItem() == item: + item.setExpanded(True) + return + + def register_group_box(self, page_name: str, group_name: str, group_box: QWidget) -> None: + """Register a group box widget for a page so we can scroll to it""" + self._group_boxes[(page_name, group_name)] = group_box + + @Slot(QTreeWidgetItem, int) + def _on_tree_item_clicked(self, item: QTreeWidgetItem, column: int) -> None: + """Handle tree item clicks""" + # Get the stored data + page_index = item.data(0, Qt.ItemDataRole.UserRole) + group_name = item.data(0, Qt.ItemDataRole.UserRole + 1) + + if page_index is not None: + # Switch to the page + self.pages.setCurrentIndex(page_index) + + # If it's a group item, scroll to that group + if group_name: + # Get the page name from parent + parent = item.parent() + if parent: + page_name = parent.text(0) + group_box = self._group_boxes.get((page_name, group_name)) + + if group_box: + # Find the scroll area in the current page + current_page = self.pages.currentWidget() + scroll_area = current_page.findChild(QScrollArea) + + if scroll_area: + # Scroll to the group box + scroll_area.ensureWidgetVisible(group_box) + + def _on_close_clicked(self) -> None: + """Handle close button click with confirmation if settings modified""" + # Check if camera page has unsaved changes + camera_widget = self._page_widgets.get("Camera") + if camera_widget and hasattr(camera_widget, 'has_unsaved_changes') and camera_widget.has_unsaved_changes(): + reply = QMessageBox.question( + self, + "Unsaved Changes", + "You have unsaved camera settings. Do you want to save them?", + QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No | QMessageBox.StandardButton.Cancel + ) + + if reply == QMessageBox.StandardButton.Cancel: + return # Don't close + elif reply == QMessageBox.StandardButton.Yes: + # Save settings before closing + self.save_camera_settings.emit() + + self.reject() def _add_page(self, name: str, page: QWidget) -> None: - self.pages.addWidget(page) - self.sidebar.addItem(QListWidgetItem(name)) \ No newline at end of file + """Add a page and create tree items for it and its groups""" + page_index = self.pages.addWidget(page) + self._page_widgets[name] = page + + # Create parent tree item for the category + parent_item = QTreeWidgetItem([name]) + parent_item.setData(0, Qt.ItemDataRole.UserRole, page_index) + parent_item.setData(0, Qt.ItemDataRole.UserRole + 1, None) # No group name for parent + self.sidebar.addTopLevelItem(parent_item) + + # Find all group boxes in the page and add them as child items + if hasattr(page, 'get_group_names'): + # If the page provides a method to get group names + groups = page.get_group_names() + for group_name in groups: + child_item = QTreeWidgetItem([group_name]) + child_item.setData(0, Qt.ItemDataRole.UserRole, page_index) + child_item.setData(0, Qt.ItemDataRole.UserRole + 1, group_name) + parent_item.addChild(child_item) \ No newline at end of file diff --git a/UI/widgets/camera_preview.py b/UI/widgets/camera_preview.py index 26a28a0..5e0d0bf 100644 --- a/UI/widgets/camera_preview.py +++ b/UI/widgets/camera_preview.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Optional, Any +from typing import Any import numpy as np from PySide6.QtCore import Qt, Signal, QTimer, Slot from PySide6.QtGui import QImage, QPixmap @@ -30,7 +30,7 @@ def __init__(self, parent: QWidget | None = None) -> None: self._camera_info = None self._img_width = 0 self._img_height = 0 - self._img_buffer: Optional[bytes] = None + self._img_buffer: bytes | None = None self._is_streaming = False self._no_camera_logged = False @@ -212,6 +212,22 @@ def _handle_image_event(self): return try: + # Check if resolution has changed + base_camera = self._camera.underlying_camera + _, current_width, current_height = base_camera.get_current_resolution() + + # If resolution changed, update buffer + if current_width != self._img_width or current_height != self._img_height: + info(f"Preview: Resolution changed from {self._img_width}x{self._img_height} to {current_width}x{current_height}") + self._img_width = current_width + self._img_height = current_height + + # Recalculate buffer size + base_camera_class = type(base_camera) + buffer_size = base_camera_class.calculate_buffer_size(current_width, current_height, 24) + self._img_buffer = bytes(buffer_size) + info(f"Preview: Buffer resized to {buffer_size} bytes") + # Pull image into buffer from underlying camera if self._camera.underlying_camera.pull_image(self._img_buffer, 24): # Calculate stride using base camera class method @@ -292,4 +308,4 @@ def cleanup(self): # Stop using camera self._close_camera() - info("Preview cleanup complete") + info("Preview cleanup complete") \ No newline at end of file diff --git a/camera/cameras/amscope_camera.py b/camera/cameras/amscope_camera.py index c25d999..c53950d 100644 --- a/camera/cameras/amscope_camera.py +++ b/camera/cameras/amscope_camera.py @@ -209,11 +209,11 @@ def open(self, camera_id: str) -> bool: try: self._hcam = amcam.Amcam.Open(camera_id) if self._hcam: - self._is_open = True # Set RGB byte order for Qt compatibility self._hcam.put_Option(self.OPTION_BYTEORDER, 0) # Initialize settings self.initialize_settings() + self._is_open = True return True return False except self._get_sdk().HRESULTException: @@ -230,6 +230,16 @@ def close(self): self._camera_info = None self._frame_buffer = None + def _reallocate_frame_buffer(self): + """Reallocate frame buffer based on current resolution.""" + try: + width, height = self._hcam.get_Size() + buffer_size = self.calculate_buffer_size(width, height, 24) + self._frame_buffer = bytes(buffer_size) + info(f"Reallocated frame buffer: {width}x{height}, size={buffer_size}") + except Exception as e: + error(f"Failed to reallocate frame buffer: {e}") + def start_capture(self, callback: Callable, context: Any) -> bool: """Start capturing frames with callback""" if not self._hcam: diff --git a/camera/cameras/base_camera.py b/camera/cameras/base_camera.py index c5b962c..6f1141a 100644 --- a/camera/cameras/base_camera.py +++ b/camera/cameras/base_camera.py @@ -152,9 +152,6 @@ def initialize_settings(self) -> None: # Load saved settings or create defaults self._settings = self._settings_manager.load() - # First refresh from camera to sync with current hardware state - self._settings.refresh_from_camera(self) - # Then apply settings to camera hardware self._settings.apply_to_camera(self) diff --git a/camera/settings/amscope_settings.py b/camera/settings/amscope_settings.py index 386d02d..c5035b1 100644 --- a/camera/settings/amscope_settings.py +++ b/camera/settings/amscope_settings.py @@ -10,7 +10,7 @@ RGBALevel, FileFormat, ) -from logger import info, error, exception +from logger import info, error, exception, debug if TYPE_CHECKING: from camera.cameras.base_camera import BaseCamera, CameraResolution @@ -19,10 +19,10 @@ @dataclass class AmscopeSettings(CameraSettings): version: str = "0" - auto_expo: bool = True + auto_exposure: bool = True exposure: int = 128 exposure_time_us: int = 50000 - resolution_index: int = 0 + resolution: int = 0 temp: int = 6500 tint: int = 1000 contrast: int = 0 @@ -35,7 +35,7 @@ class AmscopeSettings(CameraSettings): levelrange_high: RGBALevel = RGBALevel(255, 255, 255, 255) fformat: FileFormat = FileFormat.TIFF - fan_enabled: bool = field(default=False) + fan: bool = field(default=False) high_fullwell: bool = field(default=False) _camera: BaseCamera | None = field(default=None, repr=False, compare=False) @@ -43,11 +43,18 @@ class AmscopeSettings(CameraSettings): def __post_init__(self) -> None: super().__post_init__() - @classmethod - def get_metadata(cls) -> list[SettingMetadata]: + def get_metadata(self) -> list[SettingMetadata]: + """ + Get metadata for all settings with dynamically populated resolution choices. + """ + # Get available resolutions from camera + resolutions = self.get_resolutions() + resolution_choices = [f"{res.width}x{res.height}" + for idx, res in enumerate(resolutions)] if resolutions else [] + return [ SettingMetadata( - name="auto_expo", + name="auto_exposure", display_name="Auto Exposure", setting_type=SettingType.BOOL, description="Enable automatic exposure control", @@ -171,12 +178,11 @@ def get_metadata(cls) -> list[SettingMetadata]: runtime_changeable=True, ), SettingMetadata( - name="resolution_index", + name="resolution", display_name="Resolution", - setting_type=SettingType.RANGE, - description="Camera resolution index", - min_value=0, - max_value=10, + setting_type=SettingType.DROPDOWN, + description="Camera resolution", + choices=resolution_choices, # Dynamically populated from camera group="Capture", runtime_changeable=False, ), @@ -185,12 +191,12 @@ def get_metadata(cls) -> list[SettingMetadata]: display_name="File Format", setting_type=SettingType.DROPDOWN, description="Default file format for saved images", - choices=["png", "tiff", "jpeg", "bmp"], + choices=self._file_formats, group="Capture", runtime_changeable=True, ), SettingMetadata( - name="fan_enabled", + name="fan", display_name="Cooling Fan", setting_type=SettingType.BOOL, description="Enable cooling fan", @@ -219,7 +225,7 @@ def _validate_range(self, name: str, value: int) -> None: ) def set_auto_exposure(self, enabled: bool) -> None: - self.auto_expo = enabled + self.auto_exposure = enabled if self._camera and hasattr(self._camera, '_hcam'): self._camera._hcam.put_AutoExpoEnable(1 if enabled else 0) @@ -304,7 +310,7 @@ def set_level_range(self, low: RGBALevel, high: RGBALevel) -> None: ) def set_fan(self, enabled: bool) -> None: - self.fan_enabled = enabled + self.fan = enabled if self._camera and hasattr(self._camera, '_hcam'): self._camera._hcam.put_Option(0x0a, 1 if enabled else 0) @@ -322,7 +328,7 @@ def get_resolutions(self) -> list[CameraResolution]: try: resolutions = [] hcam = self._camera._hcam - count = hcam.ResolutionNumber + count = hcam.ResolutionNumber() for i in range(count): width, height = hcam.get_Resolution(i) resolutions.append(CameraResolution(width=width, height=height)) @@ -344,18 +350,30 @@ def get_current_resolution(self) -> tuple[int, int, int]: error(f"Failed to get current resolution: {e}") return (0, 0, 0) - def set_resolution(self, resolution_index: int) -> bool: - if self._camera is None or not hasattr(self._camera, '_hcam'): - return False - + def set_resolution(self, index: int, value: str = "") -> bool: + """Set camera resolution. Requires camera restart.""" try: - hcam = self._camera._hcam - if not (0 <= resolution_index < hcam.ResolutionNumber): - error(f"Invalid resolution index: {resolution_index}") + if not (0 <= index < len(self.get_resolutions())): + error(f"Invalid resolution index: {index}") return False - hcam.put_eSize(resolution_index) - self.resolution_index = resolution_index + camera_was_open = self._camera.is_open + saved_callback = self._camera._callback + saved_context = self._camera._callback_context + + if camera_was_open: + info("Camera is open, stopping to set resolution") + self._camera.stop_capture() + + # Set resolution on the underlying camera + self._camera._hcam.put_eSize(index) + self.resolution = index + + if camera_was_open: + info("Restarting camera to set resolution") + self._camera.start_capture(saved_callback, saved_context) + + debug(f"Successfully changed resolution to index {index}") return True except Exception as e: error(f"Failed to set resolution: {e}") @@ -370,7 +388,7 @@ def get_still_resolutions(self) -> list[CameraResolution]: try: resolutions = [] hcam = self._camera._hcam - count = hcam.StillResolutionNumber + count = hcam.StillResolutionNumber() for i in range(count): width, height = hcam.get_StillResolution(i) resolutions.append(CameraResolution(width=width, height=height)) @@ -394,7 +412,8 @@ def apply_to_camera(self, camera: BaseCamera) -> None: info(f"Applying settings to camera {camera.model}") try: - self.set_auto_exposure(self.auto_expo) + self.set_resolution(self.resolution) + self.set_auto_exposure(self.auto_exposure) self.set_exposure(self.exposure) self.set_exposure_time(self.exposure_time_us) self.set_gain(self.gain_percent) @@ -409,7 +428,7 @@ def apply_to_camera(self, camera: BaseCamera) -> None: self.set_level_range(self.levelrange_low, self.levelrange_high) - self.set_fan(self.fan_enabled) + self.set_fan(self.fan) self.set_high_fullwell(self.high_fullwell) info("Successfully applied all settings to camera") @@ -428,7 +447,7 @@ def refresh_from_camera(self, camera: BaseCamera) -> None: hcam = camera._hcam try: - self.auto_expo = bool(hcam.get_AutoExpoEnable()) + self.auto_exposure = bool(hcam.get_AutoExpoEnable()) self.exposure = hcam.get_AutoExpoTarget() self.exposure_time_us = hcam.get_ExpoTime() self.gain_percent = hcam.get_ExpoAGain() @@ -447,9 +466,9 @@ def refresh_from_camera(self, camera: BaseCamera) -> None: self.levelrange_low = RGBALevel(r=low[0], g=low[1], b=low[2], a=low[3]) self.levelrange_high = RGBALevel(r=high[0], g=high[1], b=high[2], a=high[3]) - self.resolution_index = hcam.get_eSize() + self.resolution = hcam.get_eSize() - self.fan_enabled = bool(hcam.get_Option(0x0a)) + self.fan = bool(hcam.get_Option(0x0a)) self.high_fullwell = bool(hcam.get_Option(0x51)) info("Successfully refreshed all settings from camera") diff --git a/camera/settings/camera_settings.py b/camera/settings/camera_settings.py index 813c83a..f73dbdb 100644 --- a/camera/settings/camera_settings.py +++ b/camera/settings/camera_settings.py @@ -7,7 +7,7 @@ from typing import Any, NamedTuple, TYPE_CHECKING from generic_config import ConfigManager -from logger import info, debug, exception +from logger import info, debug, exception, error if TYPE_CHECKING: from camera.cameras.base_camera import BaseCamera, CameraResolution @@ -17,7 +17,6 @@ class FileFormat(str, Enum): PNG = 'png' TIFF = 'tiff' JPEG = 'jpeg' - BMP = 'bmp' class RGBALevel(NamedTuple): @@ -129,10 +128,10 @@ def set_exposure(self, value: int) -> None: """ version: str - auto_expo: bool + auto_exposure: bool exposure: int exposure_time_us: int - resolution_index: int + resolution: int tint: int contrast: int hue: int @@ -144,6 +143,7 @@ def set_exposure(self, value: int) -> None: fformat: FileFormat _camera: BaseCamera | None = field(default=None, repr=False, compare=False) + _file_formats: tuple[str] = (f.value for f in FileFormat) def __post_init__(self) -> None: if isinstance(self.fformat, str): @@ -180,7 +180,6 @@ def validate(self) -> None: if not isinstance(self.fformat, FileFormat): raise ValueError(f"fformat must be a FileFormat enum, got {type(self.fformat)}") - @classmethod @abstractmethod def get_metadata(cls) -> list[SettingMetadata]: pass @@ -190,7 +189,7 @@ def apply_to_camera(self, camera: BaseCamera) -> None: info(f"Applying settings to camera {camera.model}") try: - self.set_auto_exposure(self.auto_expo) + self.set_auto_exposure(self.auto_exposure) self.set_exposure(self.exposure) self.set_tint(self.tint) self.set_contrast(self.contrast) @@ -242,6 +241,13 @@ def set_gamma(self, value: int) -> None: def set_level_range(self, low: RGBALevel, high: RGBALevel) -> None: pass + def set_fformat(self, value: str, index: int | None = None) -> None: + try: + format_enum = FileFormat(value) + self.fformat = format_enum + except ValueError as e: + raise ValueError(f"Invalid file format: {value}. Must be one of: png, tiff, jpeg") from e + @abstractmethod def get_resolutions(self) -> list['CameraResolution']: pass @@ -251,7 +257,7 @@ def get_current_resolution(self) -> tuple[int, int, int]: pass @abstractmethod - def set_resolution(self, resolution_index: int) -> bool: + def set_resolution(self, index: int, value: str | None = None) -> bool: pass def get_still_resolutions(self) -> list['CameraResolution']: From 5fff389b1118f4a3fd69dd4f37e342f990bc2451 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Sat, 7 Feb 2026 03:46:39 -0900 Subject: [PATCH 26/46] fixed bunch of settings and added warnings --- UI/settings/pages/camera_settings.py | 142 +++++++++++++++------------ UI/widgets/camera_preview.py | 1 - camera/settings/amscope_settings.py | 86 +++++++++------- camera/settings/camera_settings.py | 34 +++---- 4 files changed, 147 insertions(+), 116 deletions(-) diff --git a/UI/settings/pages/camera_settings.py b/UI/settings/pages/camera_settings.py index 02414ea..849207f 100644 --- a/UI/settings/pages/camera_settings.py +++ b/UI/settings/pages/camera_settings.py @@ -399,8 +399,14 @@ def _create_setting_widget(self, meta) -> QWidget | None: warning(f"Unknown setting type: {type_str} for {meta.name}") return None - def _create_bool_widget(self, meta, settings) -> QCheckBox: + def _create_bool_widget(self, meta, settings) -> QCheckBox | None: """Create checkbox for boolean settings""" + # Check if setter exists first + setter_name = f"set_{meta.name}" + if not hasattr(settings, setter_name): + warning(f"No setter found: {setter_name} - skipping widget creation") + return None + checkbox = QCheckBox() # Get current value @@ -412,18 +418,20 @@ def _create_bool_widget(self, meta, settings) -> QCheckBox: checkbox.setToolTip(meta.description) # Connect to setter - setter_name = f"set_{meta.name}" - if hasattr(settings, setter_name): - checkbox.stateChanged.connect( - lambda state: self._on_bool_changed(setter_name, state == Qt.CheckState.Checked) - ) - else: - warning(f"No setter found: {setter_name}") + checkbox.stateChanged.connect( + lambda state: self._on_bool_changed(setter_name, state == Qt.CheckState.Checked) + ) return checkbox - def _create_range_widget(self, meta, settings) -> QWidget: + def _create_range_widget(self, meta, settings) -> QWidget | None: """Create slider with value display for range settings""" + # Check if setter exists first + setter_name = f"set_{meta.name}" + if not hasattr(settings, setter_name): + warning(f"No setter found: {setter_name} - skipping widget creation") + return None + container = QWidget() layout = QHBoxLayout(container) layout.setContentsMargins(0, 0, 0, 0) @@ -471,30 +479,34 @@ def _create_range_widget(self, meta, settings) -> QWidget: slider.setValue(int(current_value)) # Connect signals - setter_name = f"set_{meta.name}" - if hasattr(settings, setter_name): - if is_float: - spinbox.valueChanged.connect( - lambda val: self._on_float_changed(setter_name, val, slider, meta) - ) - slider.valueChanged.connect( - lambda val: self._on_slider_changed_float(setter_name, val, spinbox, meta) - ) - else: - spinbox.valueChanged.connect( - lambda val: self._on_int_changed(setter_name, val, slider) - ) - slider.valueChanged.connect( - lambda val: self._on_slider_changed_int(setter_name, val, spinbox) - ) + if is_float: + spinbox.valueChanged.connect( + lambda val: self._on_float_changed(setter_name, val, slider, meta) + ) + slider.valueChanged.connect( + lambda val: self._on_slider_changed_float(setter_name, val, spinbox, meta) + ) + else: + spinbox.valueChanged.connect( + lambda val: self._on_int_changed(setter_name, val, slider) + ) + slider.valueChanged.connect( + lambda val: self._on_slider_changed_int(setter_name, val, spinbox) + ) layout.addWidget(slider) layout.addWidget(spinbox) return container - def _create_dropdown_widget(self, meta, settings) -> QComboBox: + def _create_dropdown_widget(self, meta, settings) -> QComboBox | None: """Create dropdown for choice settings""" + # Check if setter exists first + setter_name = f"set_{meta.name}" + if not hasattr(settings, setter_name): + warning(f"No setter found: {setter_name} - skipping widget creation") + return None + combo = QComboBox() # Add choices @@ -514,18 +526,20 @@ def _create_dropdown_widget(self, meta, settings) -> QComboBox: combo.setToolTip(meta.description) # Connect to setter - setter_name = f"set_{meta.name}" - if hasattr(settings, setter_name): - combo.currentIndexChanged.connect( - lambda idx: self._on_dropdown_changed(setter_name, idx, combo.itemData(idx)) - ) - else: - warning(f"No setter found: {setter_name}") + combo.currentIndexChanged.connect( + lambda idx: self._on_dropdown_changed(setter_name, idx, combo.itemData(idx)) + ) return combo - def _create_rgba_level_widget(self, meta, settings) -> QWidget: + def _create_rgba_level_widget(self, meta, settings) -> QWidget | None: """Create RGBA level widget with four spinboxes for R, G, B, A""" + # Check if setter exists first + setter_name = f"set_{meta.name}" + if not hasattr(settings, setter_name): + warning(f"No setter found: {setter_name} - skipping widget creation") + return None + container = QWidget() layout = QHBoxLayout(container) layout.setContentsMargins(0, 0, 0, 0) @@ -558,38 +572,36 @@ def _create_rgba_level_widget(self, meta, settings) -> QWidget: spinboxes[channel] = spinbox # Connect to setter - setter_name = f"set_{meta.name}" - if hasattr(settings, setter_name): - # Create a function that updates all values when any spinbox changes - def on_rgba_changed(): - if self._updating_from_camera: - return + # Create a function that updates all values when any spinbox changes + def on_rgba_changed(): + if self._updating_from_camera: + return + + # Import RGBALevel here to avoid circular imports + try: + from camera.settings.camera_settings import RGBALevel - # Import RGBALevel here to avoid circular imports - try: - from camera.settings.camera_settings import RGBALevel - - new_value = RGBALevel( - r=spinboxes['r'].value(), - g=spinboxes['g'].value(), - b=spinboxes['b'].value(), - a=spinboxes['a'].value() - ) - - setter = getattr(settings, setter_name) - setter(new_value) - - # Mark as modified - setting_name = setter_name.replace("set_", "") - self._mark_setting_modified(setting_name, new_value) - - debug(f"Set {setter_name} to {new_value}") - except Exception as e: - error(f"Error setting {setter_name}: {e}") - - # Connect all spinboxes to the same handler - for spinbox in spinboxes.values(): - spinbox.valueChanged.connect(on_rgba_changed) + new_value = RGBALevel( + r=spinboxes['r'].value(), + g=spinboxes['g'].value(), + b=spinboxes['b'].value(), + a=spinboxes['a'].value() + ) + + setter = getattr(settings, setter_name) + setter(new_value) + + # Mark as modified + setting_name = setter_name.replace("set_", "") + self._mark_setting_modified(setting_name, new_value) + + debug(f"Set {setter_name} to {new_value}") + except Exception as e: + error(f"Error setting {setter_name}: {e}") + + # Connect all spinboxes to the same handler + for spinbox in spinboxes.values(): + spinbox.valueChanged.connect(on_rgba_changed) layout.addStretch() return container diff --git a/UI/widgets/camera_preview.py b/UI/widgets/camera_preview.py index 5e0d0bf..90edb5b 100644 --- a/UI/widgets/camera_preview.py +++ b/UI/widgets/camera_preview.py @@ -226,7 +226,6 @@ def _handle_image_event(self): base_camera_class = type(base_camera) buffer_size = base_camera_class.calculate_buffer_size(current_width, current_height, 24) self._img_buffer = bytes(buffer_size) - info(f"Preview: Buffer resized to {buffer_size} bytes") # Pull image into buffer from underlying camera if self._camera.underlying_camera.pull_image(self._img_buffer, 24): diff --git a/camera/settings/amscope_settings.py b/camera/settings/amscope_settings.py index c5035b1..a6b32ce 100644 --- a/camera/settings/amscope_settings.py +++ b/camera/settings/amscope_settings.py @@ -21,7 +21,7 @@ class AmscopeSettings(CameraSettings): version: str = "0" auto_exposure: bool = True exposure: int = 128 - exposure_time_us: int = 50000 + exposure_time: int = 50000 resolution: int = 0 temp: int = 6500 tint: int = 1000 @@ -30,9 +30,9 @@ class AmscopeSettings(CameraSettings): saturation: int = 128 brightness: int = 0 gamma: int = 100 - gain_percent: int = 100 - levelrange_low: RGBALevel = RGBALevel(0, 0, 0, 0) - levelrange_high: RGBALevel = RGBALevel(255, 255, 255, 255) + gain: int = 100 + level_range_low: RGBALevel = RGBALevel(0, 0, 0, 0) + level_range_high: RGBALevel = RGBALevel(255, 255, 255, 255) fformat: FileFormat = FileFormat.TIFF fan: bool = field(default=False) @@ -72,7 +72,7 @@ def get_metadata(self) -> list[SettingMetadata]: runtime_changeable=True, ), SettingMetadata( - name="exposure_time_us", + name="exposure_time", display_name="Exposure Time (µs)", setting_type=SettingType.RANGE, description="Manual exposure time in microseconds", @@ -82,12 +82,12 @@ def get_metadata(self) -> list[SettingMetadata]: runtime_changeable=True, ), SettingMetadata( - name="gain_percent", - display_name="Gain (%)", + name="gain", + display_name="Gain", setting_type=SettingType.RANGE, - description="Sensor gain in percent", + description="Sensor gain", min_value=100, - max_value=1600, + max_value=300, group="Exposure", runtime_changeable=True, ), @@ -162,7 +162,7 @@ def get_metadata(self) -> list[SettingMetadata]: runtime_changeable=True, ), SettingMetadata( - name="levelrange_low", + name="level_range_low", display_name="Black Point", setting_type=SettingType.RGBA_LEVEL, description="Output level for darkest input values", @@ -170,7 +170,7 @@ def get_metadata(self) -> list[SettingMetadata]: runtime_changeable=True, ), SettingMetadata( - name="levelrange_high", + name="level_range_high", display_name="White Point", setting_type=SettingType.RGBA_LEVEL, description="Output level for brightest input values", @@ -236,17 +236,17 @@ def set_exposure(self, value: int) -> None: self._camera._hcam.put_AutoExpoTarget(value) def set_exposure_time(self, time_us: int) -> bool: - self._validate_range("exposure_time_us", time_us) - self.exposure_time_us = time_us + self._validate_range("exposure_time", time_us) + self.exposure_time = time_us if self._camera and hasattr(self._camera, '_hcam'): self._camera._hcam.put_ExpoTime(time_us) return True - def set_gain(self, gain_percent: int) -> None: - self._validate_range("gain_percent", gain_percent) - self.gain_percent = gain_percent + def set_gain(self, gain: int) -> None: + self._validate_range("gain", gain) + self.gain = gain if self._camera and hasattr(self._camera, '_hcam'): - self._camera._hcam.put_ExpoAGain(gain_percent) + self._camera._hcam.put_ExpoAGain(gain) def set_temp(self, value: int) -> None: self._validate_range("temp", value) @@ -301,13 +301,34 @@ def set_gamma(self, value: int) -> None: def set_level_range(self, low: RGBALevel, high: RGBALevel) -> None: low.validate() high.validate() - self.levelrange_low = low - self.levelrange_high = high - if self._camera and hasattr(self._camera, '_hcam'): + self.level_range_low = low + self.level_range_high = high + if self._camera: + self._camera._hcam.put_LevelRange( + (low.r, low.g, low.b, low.a), + (high.r, high.g, high.b, high.a) + ) + + def set_level_range_low(self, low: RGBALevel) -> None: + low.validate() + if self._camera: + high = self.level_range_high self._camera._hcam.put_LevelRange( (low.r, low.g, low.b, low.a), (high.r, high.g, high.b, high.a) ) + self.level_range_low = low + + def set_level_range_high(self, high: RGBALevel) -> None: + high.validate() + if self._camera: + low = self.level_range_low + self._camera._hcam.put_LevelRange( + (low.r, low.g, low.b, low.a), + (high.r, high.g, high.b, high.a) + ) + self.level_range_high = high + def set_fan(self, enabled: bool) -> None: self.fan = enabled @@ -362,7 +383,7 @@ def set_resolution(self, index: int, value: str = "") -> bool: saved_context = self._camera._callback_context if camera_was_open: - info("Camera is open, stopping to set resolution") + debug("Camera is open, stopping to set resolution") self._camera.stop_capture() # Set resolution on the underlying camera @@ -370,7 +391,7 @@ def set_resolution(self, index: int, value: str = "") -> bool: self.resolution = index if camera_was_open: - info("Restarting camera to set resolution") + debug("Restarting camera to set resolution") self._camera.start_capture(saved_callback, saved_context) debug(f"Successfully changed resolution to index {index}") @@ -399,13 +420,13 @@ def get_still_resolutions(self) -> list[CameraResolution]: def get_exposure_time(self) -> int: if self._camera is None or not hasattr(self._camera, '_hcam'): - return self.exposure_time_us + return self.exposure_time try: return self._camera._hcam.get_ExpoTime() except Exception as e: error(f"Failed to get exposure time: {e}") - return self.exposure_time_us + return self.exposure_time def apply_to_camera(self, camera: BaseCamera) -> None: self._camera = camera @@ -415,8 +436,8 @@ def apply_to_camera(self, camera: BaseCamera) -> None: self.set_resolution(self.resolution) self.set_auto_exposure(self.auto_exposure) self.set_exposure(self.exposure) - self.set_exposure_time(self.exposure_time_us) - self.set_gain(self.gain_percent) + self.set_exposure_time(self.exposure_time) + self.set_gain(self.gain) self.set_temp_tint(self.temp, self.tint) @@ -426,12 +447,12 @@ def apply_to_camera(self, camera: BaseCamera) -> None: self.set_contrast(self.contrast) self.set_gamma(self.gamma) - self.set_level_range(self.levelrange_low, self.levelrange_high) + self.set_level_range(self.level_range_low, self.level_range_high) self.set_fan(self.fan) self.set_high_fullwell(self.high_fullwell) - info("Successfully applied all settings to camera") + debug("Successfully applied all settings to camera") except Exception as e: exception(f"Failed to apply settings to camera: {e}") raise @@ -449,9 +470,8 @@ def refresh_from_camera(self, camera: BaseCamera) -> None: try: self.auto_exposure = bool(hcam.get_AutoExpoEnable()) self.exposure = hcam.get_AutoExpoTarget() - self.exposure_time_us = hcam.get_ExpoTime() - self.gain_percent = hcam.get_ExpoAGain() - + self.exposure_time = hcam.get_ExpoTime() + self.gain = hcam.get_ExpoAGain() temp, tint = hcam.get_TempTint() self.temp = temp self.tint = tint @@ -463,8 +483,8 @@ def refresh_from_camera(self, camera: BaseCamera) -> None: self.gamma = hcam.get_Gamma() low, high = hcam.get_LevelRange() - self.levelrange_low = RGBALevel(r=low[0], g=low[1], b=low[2], a=low[3]) - self.levelrange_high = RGBALevel(r=high[0], g=high[1], b=high[2], a=high[3]) + self.level_range_low = RGBALevel(r=low[0], g=low[1], b=low[2], a=low[3]) + self.level_range_high = RGBALevel(r=high[0], g=high[1], b=high[2], a=high[3]) self.resolution = hcam.get_eSize() diff --git a/camera/settings/camera_settings.py b/camera/settings/camera_settings.py index f73dbdb..01e9205 100644 --- a/camera/settings/camera_settings.py +++ b/camera/settings/camera_settings.py @@ -130,7 +130,7 @@ def set_exposure(self, value: int) -> None: version: str auto_exposure: bool exposure: int - exposure_time_us: int + exposure_time: int resolution: int tint: int contrast: int @@ -138,8 +138,8 @@ def set_exposure(self, value: int) -> None: saturation: int brightness: int gamma: int - levelrange_low: RGBALevel - levelrange_high: RGBALevel + level_range_low: RGBALevel + level_range_high: RGBALevel fformat: FileFormat _camera: BaseCamera | None = field(default=None, repr=False, compare=False) @@ -149,10 +149,10 @@ def __post_init__(self) -> None: if isinstance(self.fformat, str): self.fformat = FileFormat(self.fformat) - if isinstance(self.levelrange_low, (tuple, list)): - self.levelrange_low = RGBALevel(*self.levelrange_low) - if isinstance(self.levelrange_high, (tuple, list)): - self.levelrange_high = RGBALevel(*self.levelrange_high) + if isinstance(self.level_range_low, (tuple, list)): + self.level_range_low = RGBALevel(*self.level_range_low) + if isinstance(self.level_range_high, (tuple, list)): + self.level_range_high = RGBALevel(*self.level_range_high) def validate(self) -> None: metadata_list = self.get_metadata() @@ -168,14 +168,14 @@ def validate(self) -> None: ) try: - self.levelrange_low.validate() + self.level_range_low.validate() except ValueError as e: - raise ValueError(f"levelrange_low invalid: {e}") from e + raise ValueError(f"level_range_low invalid: {e}") from e try: - self.levelrange_high.validate() + self.level_range_high.validate() except ValueError as e: - raise ValueError(f"levelrange_high invalid: {e}") from e + raise ValueError(f"level_range_high invalid: {e}") from e if not isinstance(self.fformat, FileFormat): raise ValueError(f"fformat must be a FileFormat enum, got {type(self.fformat)}") @@ -197,9 +197,9 @@ def apply_to_camera(self, camera: BaseCamera) -> None: self.set_saturation(self.saturation) self.set_brightness(self.brightness) self.set_gamma(self.gamma) - self.set_level_range(self.levelrange_low, self.levelrange_high) + self.set_level_range(self.level_range_low, self.level_range_high) - info("Successfully applied all settings to camera") + debug("Successfully applied all settings to camera") except Exception as e: exception(f"Failed to apply settings to camera: {e}") @@ -300,11 +300,11 @@ def __init__(self, model: str, settings_class: type[CameraSettings]): def from_dict(self, data: dict[str, Any]) -> CameraSettings: processed_data = data.copy() - if 'levelrange_low' in processed_data and isinstance(processed_data['levelrange_low'], dict): - processed_data['levelrange_low'] = RGBALevel(**processed_data['levelrange_low']) + if 'level_range_low' in processed_data and isinstance(processed_data['level_range_low'], dict): + processed_data['level_range_low'] = RGBALevel(**processed_data['level_range_low']) - if 'levelrange_high' in processed_data and isinstance(processed_data['levelrange_high'], dict): - processed_data['levelrange_high'] = RGBALevel(**processed_data['levelrange_high']) + if 'level_range_high' in processed_data and isinstance(processed_data['level_range_high'], dict): + processed_data['level_range_high'] = RGBALevel(**processed_data['level_range_high']) settings = self.settings_class(**processed_data) return settings From 66fdb2c581932799fc2842ca6a9efbb7a2e4d61b Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Sat, 7 Feb 2026 04:16:55 -0900 Subject: [PATCH 27/46] fixed some camera options --- camera/cameras/amscope_camera.py | 27 +++++++-------------------- camera/settings/amscope_settings.py | 4 ++-- 2 files changed, 9 insertions(+), 22 deletions(-) diff --git a/camera/cameras/amscope_camera.py b/camera/cameras/amscope_camera.py index c53950d..f181910 100644 --- a/camera/cameras/amscope_camera.py +++ b/camera/cameras/amscope_camera.py @@ -14,7 +14,7 @@ import gc from camera.cameras.base_camera import BaseCamera, CameraResolution, CameraInfo -from logger import info, debug, error, exception, get_logger +from logger import info, debug, error, exception, warning from camera.settings.amscope_settings import AmscopeSettings # Module-level reference to the loaded SDK @@ -37,15 +37,6 @@ class AmscopeCamera(BaseCamera): # Class-level flag to track SDK loading _sdk_loaded = False - # Option constants (from amcam SDK documentation) - OPTION_FAN = 0x0a - OPTION_TEC = 0x08 - OPTION_TECTARGET = 0x0c - OPTION_LOW_NOISE = 0x53 - OPTION_HIGH_FULLWELL = 0x51 - OPTION_TESTPATTERN = 0x2c - OPTION_DEMOSAIC = 0x5a - OPTION_BYTEORDER = 0x01 def __init__(self, model: str): """ @@ -59,14 +50,12 @@ def __init__(self, model: str): # Set Settings class self._settings_class = AmscopeSettings - # Initialize logger - self._logger = get_logger() - + self._hcam = None # Will be amcam.Amcam after SDK loads + # Ensure SDK is loaded before instantiating if not AmscopeCamera._sdk_loaded: AmscopeCamera.ensure_sdk_loaded() - - self._hcam = None # Will be amcam.Amcam after SDK loads + self._camera_info = None # Must be set via set_camera_info() before opening self._frame_buffer = None @@ -210,7 +199,7 @@ def open(self, camera_id: str) -> bool: self._hcam = amcam.Amcam.Open(camera_id) if self._hcam: # Set RGB byte order for Qt compatibility - self._hcam.put_Option(self.OPTION_BYTEORDER, 0) + self._hcam.put_Option(_amcam.AMCAM_OPTION_BYTEORDER, 0) # Initialize settings self.initialize_settings() self._is_open = True @@ -282,8 +271,7 @@ def pull_image(self, buffer: ctypes.Array, bits_per_pixel: int = 24, timeout_ms: True if successful, False otherwise """ if not self._hcam: - logger = get_logger() - logger.error("Cannot pull image: camera handle is None") + error("Cannot pull image: camera handle is None") return False amcam = self._get_sdk() @@ -294,8 +282,7 @@ def pull_image(self, buffer: ctypes.Array, bits_per_pixel: int = 24, timeout_ms: return True except amcam.HRESULTException as e: # If timeout or no frame available, log the error - logger = get_logger() - logger.error(f"Failed to pull image: {e}") + error(f"Failed to pull image: {e}") return False def snap_image(self, resolution_index: int = 0) -> bool: diff --git a/camera/settings/amscope_settings.py b/camera/settings/amscope_settings.py index a6b32ce..b237c45 100644 --- a/camera/settings/amscope_settings.py +++ b/camera/settings/amscope_settings.py @@ -333,12 +333,12 @@ def set_level_range_high(self, high: RGBALevel) -> None: def set_fan(self, enabled: bool) -> None: self.fan = enabled if self._camera and hasattr(self._camera, '_hcam'): - self._camera._hcam.put_Option(0x0a, 1 if enabled else 0) + self._camera._hcam.put_Option(self._camera._hcam.AMCAM_OPTION_FAN, 1 if enabled else 0) def set_high_fullwell(self, enabled: bool) -> None: self.high_fullwell = enabled if self._camera and hasattr(self._camera, '_hcam'): - self._camera._hcam.put_Option(0x51, 1 if enabled else 0) + self._camera._hcam.put_Option(self._camera._hcam.AMCAM_OPTION_HIGH_FULLWELL, 1 if enabled else 0) def get_resolutions(self) -> list[CameraResolution]: if self._camera is None or not hasattr(self._camera, '_hcam'): From f2b79ad560a734bbf7d7e4dfa4b17607dfb72925 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Sat, 7 Feb 2026 04:28:56 -0900 Subject: [PATCH 28/46] removed bad camera settings --- camera/settings/amscope_settings.py | 36 ----------------------------- 1 file changed, 36 deletions(-) diff --git a/camera/settings/amscope_settings.py b/camera/settings/amscope_settings.py index b237c45..f669292 100644 --- a/camera/settings/amscope_settings.py +++ b/camera/settings/amscope_settings.py @@ -35,9 +35,6 @@ class AmscopeSettings(CameraSettings): level_range_high: RGBALevel = RGBALevel(255, 255, 255, 255) fformat: FileFormat = FileFormat.TIFF - fan: bool = field(default=False) - high_fullwell: bool = field(default=False) - _camera: BaseCamera | None = field(default=None, repr=False, compare=False) def __post_init__(self) -> None: @@ -195,22 +192,6 @@ def get_metadata(self) -> list[SettingMetadata]: group="Capture", runtime_changeable=True, ), - SettingMetadata( - name="fan", - display_name="Cooling Fan", - setting_type=SettingType.BOOL, - description="Enable cooling fan", - group="Hardware", - runtime_changeable=True, - ), - SettingMetadata( - name="high_fullwell", - display_name="High Full-Well", - setting_type=SettingType.BOOL, - description="Enable high full-well capacity mode", - group="Hardware", - runtime_changeable=True, - ), ] def _get_metadata_map(self) -> dict[str, SettingMetadata]: @@ -328,17 +309,6 @@ def set_level_range_high(self, high: RGBALevel) -> None: (high.r, high.g, high.b, high.a) ) self.level_range_high = high - - - def set_fan(self, enabled: bool) -> None: - self.fan = enabled - if self._camera and hasattr(self._camera, '_hcam'): - self._camera._hcam.put_Option(self._camera._hcam.AMCAM_OPTION_FAN, 1 if enabled else 0) - - def set_high_fullwell(self, enabled: bool) -> None: - self.high_fullwell = enabled - if self._camera and hasattr(self._camera, '_hcam'): - self._camera._hcam.put_Option(self._camera._hcam.AMCAM_OPTION_HIGH_FULLWELL, 1 if enabled else 0) def get_resolutions(self) -> list[CameraResolution]: if self._camera is None or not hasattr(self._camera, '_hcam'): @@ -449,9 +419,6 @@ def apply_to_camera(self, camera: BaseCamera) -> None: self.set_level_range(self.level_range_low, self.level_range_high) - self.set_fan(self.fan) - self.set_high_fullwell(self.high_fullwell) - debug("Successfully applied all settings to camera") except Exception as e: exception(f"Failed to apply settings to camera: {e}") @@ -488,9 +455,6 @@ def refresh_from_camera(self, camera: BaseCamera) -> None: self.resolution = hcam.get_eSize() - self.fan = bool(hcam.get_Option(0x0a)) - self.high_fullwell = bool(hcam.get_Option(0x51)) - info("Successfully refreshed all settings from camera") except Exception as e: exception(f"Failed to refresh settings from camera: {e}") \ No newline at end of file From d0ca69ef38bd13fcd04e3ce10491a840e4d700f5 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Sun, 8 Feb 2026 23:14:55 -0900 Subject: [PATCH 29/46] fixed tiff bgr problem --- camera/cameras/amscope_camera.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/camera/cameras/amscope_camera.py b/camera/cameras/amscope_camera.py index f181910..2478a70 100644 --- a/camera/cameras/amscope_camera.py +++ b/camera/cameras/amscope_camera.py @@ -392,7 +392,6 @@ def get_camera_metadata(self) -> dict[str, Any]: # Get metadata from settings if available if self._settings is not None: metadata['exposure_time_us'] = self._settings.get_exposure_time() - metadata['gain_percent'] = self._settings.get_gain() metadata['temperature'] = self._settings.temp metadata['tint'] = self._settings.tint @@ -487,9 +486,6 @@ def still_callback(event, ctx): stride = amcam.TDIBWIDTHBYTES(w * 24) image_data = np.frombuffer(pData, dtype=np.uint8).reshape((h, stride))[:, :w*3].reshape((h, w, 3)).copy() - # Convert BGR to RGB - image_data = image_data[:, :, ::-1].copy() - del pData # Save with metadata From 3e6496545bc1f234dc7e92c1e567af930c767b62 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Mon, 9 Feb 2026 19:53:17 -0900 Subject: [PATCH 30/46] removed leftover/redundant code --- camera/camera_enumerator.py | 18 ++++----- camera/camera_manager.py | 36 ++++++------------ camera/cameras/amscope_camera.py | 64 +++++++------------------------- camera/cameras/base_camera.py | 8 ---- 4 files changed, 35 insertions(+), 91 deletions(-) diff --git a/camera/camera_enumerator.py b/camera/camera_enumerator.py index 5b41624..b06e163 100644 --- a/camera/camera_enumerator.py +++ b/camera/camera_enumerator.py @@ -8,7 +8,7 @@ from abc import ABC, abstractmethod from dataclasses import dataclass from enum import Enum -from typing import List, Optional, Dict, Any +from typing import Any from logger import error, exception, debug from camera.cameras.amscope_camera import AmscopeCamera @@ -28,11 +28,11 @@ class CameraInfo: camera_type: CameraType device_id: str display_name: str - model: Optional[str] = None - manufacturer: Optional[str] = None - serial_number: Optional[str] = None - max_resolution: Optional[tuple[int, int]] = None - metadata: Optional[Dict[str, Any]] = None + model: str | None = None + manufacturer: str | None = None + serial_number: str | None = None + max_resolution: tuple[int, int] | None = None + metadata: dict[str, Any] | None = None def __str__(self) -> str: parts = [self.display_name] @@ -53,7 +53,7 @@ class CameraEnumerator(ABC): """ @abstractmethod - def enumerate(self) -> List[CameraInfo]: + def enumerate(self) -> list[CameraInfo]: """ Enumerate all cameras of this type. @@ -137,7 +137,7 @@ def is_available(self) -> bool: self._sdk = None return False - def enumerate(self) -> List[CameraInfo]: + def enumerate(self) -> list[CameraInfo]: """Enumerate Amscope cameras""" # Ensure SDK is available before enumerating if not self.is_available(): @@ -212,7 +212,7 @@ def is_available(self) -> bool: except ImportError: return False - def enumerate(self) -> List[CameraInfo]: + def enumerate(self) -> list[CameraInfo]: """Enumerate generic USB cameras (placeholder)""" # For now, return empty list # Future: Implement using OpenCV or platform-specific APIs diff --git a/camera/camera_manager.py b/camera/camera_manager.py index 158059b..811c031 100644 --- a/camera/camera_manager.py +++ b/camera/camera_manager.py @@ -5,7 +5,6 @@ from __future__ import annotations -from typing import Optional, List, Callable from PySide6.QtCore import QObject, Signal from camera.cameras.base_camera import BaseCamera @@ -39,34 +38,34 @@ def __init__(self): super().__init__() # Available camera enumerators (plugin architecture) - self._enumerators: List[CameraEnumerator] = [ + self._enumerators: list[CameraEnumerator] = [ AmscopeEnumerator(), GenericUSBEnumerator(), # Future: Add more enumerators here ] # Available cameras (from last enumeration) - self._available_cameras: List[CameraInfo] = [] + self._available_cameras: list[CameraInfo] = [] # Active camera - self._active_camera: Optional[BaseCamera] = None - self._active_camera_info: Optional[CameraInfo] = None + self._active_camera: BaseCamera | None = None + self._active_camera_info: CameraInfo | None = None self._camera_thread_started = False info("Camera manager initialized") @property - def available_cameras(self) -> List[CameraInfo]: + def available_cameras(self) -> list[CameraInfo]: """Get list of available cameras from last enumeration""" return self._available_cameras.copy() @property - def active_camera(self) -> Optional[BaseCamera]: + def active_camera(self) -> BaseCamera | None: """Get the currently active camera (may be None)""" return self._active_camera @property - def active_camera_info(self) -> Optional[CameraInfo]: + def active_camera_info(self) -> CameraInfo | None: """Get info about the currently active camera""" return self._active_camera_info @@ -75,7 +74,7 @@ def has_active_camera(self) -> bool: """Check if there is an active camera""" return self._active_camera is not None - def enumerate_cameras(self) -> List[CameraInfo]: + def enumerate_cameras(self) -> list[CameraInfo]: """ Enumerate all available cameras across all enumerators. @@ -112,7 +111,7 @@ def enumerate_cameras(self) -> List[CameraInfo]: return cameras - def get_camera_by_id(self, device_id: str) -> Optional[CameraInfo]: + def get_camera_by_id(self, device_id: str) -> CameraInfo | None: """ Find a camera by its device ID. @@ -127,7 +126,7 @@ def get_camera_by_id(self, device_id: str) -> Optional[CameraInfo]: return camera_info return None - def get_cameras_by_type(self, camera_type: CameraType) -> List[CameraInfo]: + def get_cameras_by_type(self, camera_type: CameraType) -> list[CameraInfo]: """ Get all cameras of a specific type. @@ -163,17 +162,6 @@ def switch_camera(self, camera_info: CameraInfo) -> bool: error(f"Failed to create camera instance for {camera_info}") return False - # Set camera info if the camera supports it - if hasattr(camera, 'set_camera_info'): - # Create the old-style CameraInfo from our new CameraInfo - from camera.cameras.base_camera import CameraInfo as OldCameraInfo - old_camera_info = OldCameraInfo( - id=camera_info.device_id, - displayname=camera_info.display_name, - model=camera_info.metadata.get('model_info') if camera_info.metadata else None - ) - camera.set_camera_info(old_camera_info) - # Wrap in threaded camera threaded_camera = ThreadedCamera(camera) threaded_camera.start_thread() @@ -277,7 +265,7 @@ def close_camera(self) -> bool: self.active_camera_changed.emit(None) return False - def _create_camera_instance(self, camera_info: CameraInfo) -> Optional[BaseCamera]: + def _create_camera_instance(self, camera_info: CameraInfo) -> BaseCamera | None: """ Factory method to create camera instance based on camera info. @@ -318,4 +306,4 @@ def cleanup(self): # Clear available cameras self._available_cameras.clear() - self.camera_list_changed.emit() + self.camera_list_changed.emit() \ No newline at end of file diff --git a/camera/cameras/amscope_camera.py b/camera/cameras/amscope_camera.py index 2478a70..e555918 100644 --- a/camera/cameras/amscope_camera.py +++ b/camera/cameras/amscope_camera.py @@ -13,7 +13,7 @@ import threading import gc -from camera.cameras.base_camera import BaseCamera, CameraResolution, CameraInfo +from camera.cameras.base_camera import BaseCamera, CameraResolution from logger import info, debug, error, exception, warning from camera.settings.amscope_settings import AmscopeSettings @@ -56,7 +56,6 @@ def __init__(self, model: str): if not AmscopeCamera._sdk_loaded: AmscopeCamera.ensure_sdk_loaded() - self._camera_info = None # Must be set via set_camera_info() before opening self._frame_buffer = None def _get_settings_class(self): @@ -216,7 +215,6 @@ def close(self): self._is_open = False self._callback = None self._callback_context = None - self._camera_info = None self._frame_buffer = None def _reallocate_frame_buffer(self): @@ -300,62 +298,28 @@ def snap_image(self, resolution_index: int = 0) -> bool: # Resolution Management # ------------------------- - def set_camera_info(self, info: CameraInfo): - """Set camera information (needed before get_resolutions works)""" - self._camera_info = info - def get_resolutions(self) -> list[CameraResolution]: """Get available preview resolutions""" - if not self._camera_info or not self._camera_info.model: - return [] - - resolutions = [] - for i in range(self._camera_info.model.preview): - res = self._camera_info.model.res[i] - resolutions.append(CameraResolution(width=res.width, height=res.height)) - - return resolutions - - def get_current_resolution(self) -> Tuple[int, int, int]: + return self.settings.get_resolutions() + + def get_current_resolution(self) -> tuple[int, int, int]: """Get current resolution index, width, and height""" - if not self._hcam or not self._camera_info: - return 0, 0, 0 - - res_index = self._hcam.get_eSize() - res = self._camera_info.model.res[res_index] - return res_index, res.width, res.height - + return self.settings.get_current_resolution() + def set_resolution(self, resolution_index: int) -> bool: """Set camera resolution""" - if not self._hcam: - return False - - try: - self._hcam.put_eSize(resolution_index) - return True - except: - return False - + return self.settings.set_resolution(resolution_index) + def supports_still_capture(self) -> bool: """Check if camera supports separate still image capture""" - if not self._camera_info or not self._camera_info.model: - return False - - return self._camera_info.model.still > 0 - + return len(self.settings.get_still_resolutions()) > 0 + def get_still_resolutions(self) -> list[CameraResolution]: """Get available still image resolutions""" - if not self._camera_info or not self._camera_info.model: - return [] - - resolutions = [] - for i in range(self._camera_info.model.still): - res = self._camera_info.model.res[i] - resolutions.append(CameraResolution(width=res.width, height=res.height)) - - return resolutions - - def pull_still_image(self, buffer: ctypes.Array, bits_per_pixel: int = 24) -> Tuple[bool, int, int]: + return self.settings.get_still_resolutions() + + + def pull_still_image(self, buffer: ctypes.Array, bits_per_pixel: int = 24) -> tuple[bool, int, int]: """ Pull a still image into buffer diff --git a/camera/cameras/base_camera.py b/camera/cameras/base_camera.py index 6f1141a..9540c5c 100644 --- a/camera/cameras/base_camera.py +++ b/camera/cameras/base_camera.py @@ -28,14 +28,6 @@ def __str__(self): return f"{self.width}*{self.height}" -@dataclass -class CameraInfo: - """Basic camera information""" - id: str - displayname: str - model: Any # Model-specific information - - class BaseCamera(ABC): """ Abstract base class for camera operations. From 0b472b9f3c356a2109e7c6ad076574f6ebe35d62 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Mon, 9 Feb 2026 20:27:57 -0900 Subject: [PATCH 31/46] resolutions dropdown now properly display in camera settings --- UI/settings/pages/camera_settings.py | 2 +- UI/widgets/camera_preview.py | 6 -- camera/cameras/amscope_camera.py | 2 +- camera/settings/amscope_settings.py | 129 +++++++++++++++++++++------ camera/settings/camera_settings.py | 10 ++- 5 files changed, 114 insertions(+), 35 deletions(-) diff --git a/UI/settings/pages/camera_settings.py b/UI/settings/pages/camera_settings.py index 849207f..053e7ad 100644 --- a/UI/settings/pages/camera_settings.py +++ b/UI/settings/pages/camera_settings.py @@ -854,7 +854,7 @@ def _on_dropdown_changed(self, setter_name: str, index: int, value) -> None: """Handle dropdown setting change Args: - setter_name: Name of the setter method (e.g., 'set_resolution') + setter_name: Name of the setter method (e.g., 'set_preview_resolution') index: Index of the selected item in the dropdown value: Value associated with the selected item """ diff --git a/UI/widgets/camera_preview.py b/UI/widgets/camera_preview.py index 90edb5b..3dc5a1c 100644 --- a/UI/widgets/camera_preview.py +++ b/UI/widgets/camera_preview.py @@ -135,12 +135,6 @@ def _start_streaming(self): self._video_label.setText("Camera has no resolutions available") return - # Use first resolution (typically highest quality) - if not base_camera.set_resolution(0): - error("Preview: Failed to set resolution") - self._video_label.setText("Failed to set camera resolution") - return - # Get resolution again after setting res_index, width, height = base_camera.get_current_resolution() diff --git a/camera/cameras/amscope_camera.py b/camera/cameras/amscope_camera.py index e555918..78806dd 100644 --- a/camera/cameras/amscope_camera.py +++ b/camera/cameras/amscope_camera.py @@ -308,7 +308,7 @@ def get_current_resolution(self) -> tuple[int, int, int]: def set_resolution(self, resolution_index: int) -> bool: """Set camera resolution""" - return self.settings.set_resolution(resolution_index) + return self.settings.set_still_resolution(resolution_index) def supports_still_capture(self) -> bool: """Check if camera supports separate still image capture""" diff --git a/camera/settings/amscope_settings.py b/camera/settings/amscope_settings.py index f669292..50b976a 100644 --- a/camera/settings/amscope_settings.py +++ b/camera/settings/amscope_settings.py @@ -22,7 +22,8 @@ class AmscopeSettings(CameraSettings): auto_exposure: bool = True exposure: int = 128 exposure_time: int = 50000 - resolution: int = 0 + preview_resolution: str = "" + still_resolution: str = "" temp: int = 6500 tint: int = 1000 contrast: int = 0 @@ -46,9 +47,11 @@ def get_metadata(self) -> list[SettingMetadata]: """ # Get available resolutions from camera resolutions = self.get_resolutions() - resolution_choices = [f"{res.width}x{res.height}" - for idx, res in enumerate(resolutions)] if resolutions else [] - + resolution_choices = [f"{res.width}x{res.height}" for res in resolutions] + + still_resolutions = self.get_still_resolutions() + still_resolution_choices = [f"{res.width}x{res.height}" for res in still_resolutions] + return [ SettingMetadata( name="auto_exposure", @@ -175,14 +178,23 @@ def get_metadata(self) -> list[SettingMetadata]: runtime_changeable=True, ), SettingMetadata( - name="resolution", - display_name="Resolution", + name="preview_resolution", + display_name="Preview Resolution", setting_type=SettingType.DROPDOWN, - description="Camera resolution", - choices=resolution_choices, # Dynamically populated from camera + description="Camera preview resolution", + choices=resolution_choices, group="Capture", runtime_changeable=False, ), + SettingMetadata( + name="still_resolution", + display_name="Still Resolution", + setting_type=SettingType.DROPDOWN, + description="Resolution used when capturing a still image", + choices=still_resolution_choices, + group="Capture", + runtime_changeable=True, + ), SettingMetadata( name="fformat", display_name="File Format", @@ -341,35 +353,86 @@ def get_current_resolution(self) -> tuple[int, int, int]: error(f"Failed to get current resolution: {e}") return (0, 0, 0) - def set_resolution(self, index: int, value: str = "") -> bool: - """Set camera resolution. Requires camera restart.""" + def set_preview_resolution(self, value: str, index: int | None = None) -> bool: + """ + Set camera preview resolution. Requires camera restart. + + The dropdown supplies both the string label (e.g. "1280x960") and the index + of that label in the choices list. When ``index`` is provided it is used + directly; otherwise it is derived from ``value``. + """ try: - if not (0 <= index < len(self.get_resolutions())): - error(f"Invalid resolution index: {index}") - return False - + resolutions = self.get_resolutions() + choices = [f"{r.width}x{r.height}" for r in resolutions] + + if index is None: + if value not in choices: + error(f"Invalid resolution value: {value!r}. Available: {choices}") + return False + index = choices.index(value) + else: + if not (0 <= index < len(choices)): + error(f"Invalid resolution index: {index}. Valid range: 0-{len(choices) - 1}") + return False + value = choices[index] + camera_was_open = self._camera.is_open saved_callback = self._camera._callback saved_context = self._camera._callback_context - + if camera_was_open: debug("Camera is open, stopping to set resolution") self._camera.stop_capture() - - # Set resolution on the underlying camera + self._camera._hcam.put_eSize(index) - self.resolution = index - + self.preview_resolution = value + if camera_was_open: - debug("Restarting camera to set resolution") + debug("Restarting camera after resolution change") self._camera.start_capture(saved_callback, saved_context) - - debug(f"Successfully changed resolution to index {index}") + + debug(f"Successfully changed preview resolution to {value} (index {index})") return True except Exception as e: error(f"Failed to set resolution: {e}") return False - + + def set_still_resolution(self, value: str, index: int | None = None) -> bool: + """ + Set the still-capture resolution. + + The dropdown supplies both the string label (e.g. "2592x1944") and the index + of that label in the choices list. When ``index`` is provided it is used + directly; otherwise it is derived from ``value``. The camera does not need + to be restarted; the stored value is used as the index argument at Snap() time. + """ + try: + still_resolutions = self.get_still_resolutions() + choices = [f"{r.width}x{r.height}" for r in still_resolutions] + + if not choices: + # Camera doesn't support distinct still resolutions; store as-is. + self.still_resolution = value + return True + + if index is None: + if value not in choices: + error(f"Invalid still resolution value: {value!r}. Available: {choices}") + return False + index = choices.index(value) + else: + if not (0 <= index < len(choices)): + error(f"Invalid still resolution index: {index}. Valid range: 0-{len(choices) - 1}") + return False + value = choices[index] + + self.still_resolution = value + debug(f"Successfully changed still resolution to {value} (index {index})") + return True + except Exception as e: + error(f"Failed to set still resolution: {e}") + return False + def get_still_resolutions(self) -> list[CameraResolution]: if self._camera is None or not hasattr(self._camera, '_hcam'): return [] @@ -403,7 +466,10 @@ def apply_to_camera(self, camera: BaseCamera) -> None: info(f"Applying settings to camera {camera.model}") try: - self.set_resolution(self.resolution) + if self.preview_resolution: + self.set_preview_resolution(self.preview_resolution) + if self.still_resolution: + self.set_still_resolution(self.still_resolution) self.set_auto_exposure(self.auto_exposure) self.set_exposure(self.exposure) self.set_exposure_time(self.exposure_time) @@ -453,7 +519,20 @@ def refresh_from_camera(self, camera: BaseCamera) -> None: self.level_range_low = RGBALevel(r=low[0], g=low[1], b=low[2], a=low[3]) self.level_range_high = RGBALevel(r=high[0], g=high[1], b=high[2], a=high[3]) - self.resolution = hcam.get_eSize() + index = hcam.get_eSize() + resolutions = self.get_resolutions() + if 0 <= index < len(resolutions): + r = resolutions[index] + self.preview_resolution = f"{r.width}x{r.height}" + else: + self.preview_resolution = "" + + still_resolutions = self.get_still_resolutions() + if still_resolutions: + # The SDK has no dedicated "get current still resolution" call; default + # to the highest-resolution option (index 0) when refreshing. + r = still_resolutions[0] + self.still_resolution = f"{r.width}x{r.height}" info("Successfully refreshed all settings from camera") except Exception as e: diff --git a/camera/settings/camera_settings.py b/camera/settings/camera_settings.py index 01e9205..ddef203 100644 --- a/camera/settings/camera_settings.py +++ b/camera/settings/camera_settings.py @@ -119,6 +119,7 @@ def set_exposure(self, value: int) -> None: 4. Implement resolution and exposure methods: - get_resolutions(), set_resolution(), get_current_resolution() + - get_still_resolutions(), set_still_resolution() - get_exposure_time(), set_exposure_time() Note on Default Values: @@ -131,7 +132,8 @@ def set_exposure(self, value: int) -> None: auto_exposure: bool exposure: int exposure_time: int - resolution: int + preview_resolution: str + still_resolution: str tint: int contrast: int hue: int @@ -257,7 +259,11 @@ def get_current_resolution(self) -> tuple[int, int, int]: pass @abstractmethod - def set_resolution(self, index: int, value: str | None = None) -> bool: + def set_preview_resolution(self, value: str, index: int | None = None) -> bool: + pass + + @abstractmethod + def set_still_resolution(self, value: str, index: int | None = None) -> bool: pass def get_still_resolutions(self) -> list['CameraResolution']: From e7b5ef3507882cfe44058f6fe615e10c2929737a Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Mon, 9 Feb 2026 21:07:26 -0900 Subject: [PATCH 32/46] changed order of settings menu --- camera/settings/amscope_settings.py | 54 ++++++++++++++--------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/camera/settings/amscope_settings.py b/camera/settings/amscope_settings.py index 50b976a..bc1c82f 100644 --- a/camera/settings/amscope_settings.py +++ b/camera/settings/amscope_settings.py @@ -53,6 +53,33 @@ def get_metadata(self) -> list[SettingMetadata]: still_resolution_choices = [f"{res.width}x{res.height}" for res in still_resolutions] return [ + SettingMetadata( + name="preview_resolution", + display_name="Preview Resolution", + setting_type=SettingType.DROPDOWN, + description="Camera preview resolution", + choices=resolution_choices, + group="Capture", + runtime_changeable=False, + ), + SettingMetadata( + name="still_resolution", + display_name="Still Resolution", + setting_type=SettingType.DROPDOWN, + description="Resolution used when capturing a still image", + choices=still_resolution_choices, + group="Capture", + runtime_changeable=True, + ), + SettingMetadata( + name="fformat", + display_name="File Format", + setting_type=SettingType.DROPDOWN, + description="Default file format for saved images", + choices=self._file_formats, + group="Capture", + runtime_changeable=True, + ), SettingMetadata( name="auto_exposure", display_name="Auto Exposure", @@ -177,33 +204,6 @@ def get_metadata(self) -> list[SettingMetadata]: group="Levels", runtime_changeable=True, ), - SettingMetadata( - name="preview_resolution", - display_name="Preview Resolution", - setting_type=SettingType.DROPDOWN, - description="Camera preview resolution", - choices=resolution_choices, - group="Capture", - runtime_changeable=False, - ), - SettingMetadata( - name="still_resolution", - display_name="Still Resolution", - setting_type=SettingType.DROPDOWN, - description="Resolution used when capturing a still image", - choices=still_resolution_choices, - group="Capture", - runtime_changeable=True, - ), - SettingMetadata( - name="fformat", - display_name="File Format", - setting_type=SettingType.DROPDOWN, - description="Default file format for saved images", - choices=self._file_formats, - group="Capture", - runtime_changeable=True, - ), ] def _get_metadata_map(self) -> dict[str, SettingMetadata]: From 35380c98a06bc56c4cafdfc5709db0d67bcca2ff Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Mon, 9 Feb 2026 21:14:03 -0900 Subject: [PATCH 33/46] fixed autoexposure checkbox not working --- UI/settings/pages/camera_settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/UI/settings/pages/camera_settings.py b/UI/settings/pages/camera_settings.py index 053e7ad..b704eb0 100644 --- a/UI/settings/pages/camera_settings.py +++ b/UI/settings/pages/camera_settings.py @@ -418,7 +418,7 @@ def _create_bool_widget(self, meta, settings) -> QCheckBox | None: checkbox.setToolTip(meta.description) # Connect to setter - checkbox.stateChanged.connect( + checkbox.checkStateChanged.connect( lambda state: self._on_bool_changed(setter_name, state == Qt.CheckState.Checked) ) From 29da7772d1281f03016bf12796190671cdc32e86 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Wed, 11 Feb 2026 00:56:31 -0900 Subject: [PATCH 34/46] exposure settings now automatically adjust according to auto exposure --- UI/settings/pages/camera_settings.py | 205 +++++++++++++++++++++++++-- camera/settings/amscope_settings.py | 53 ++++++- camera/settings/camera_settings.py | 133 ++++++++--------- 3 files changed, 302 insertions(+), 89 deletions(-) diff --git a/UI/settings/pages/camera_settings.py b/UI/settings/pages/camera_settings.py index b704eb0..2198a49 100644 --- a/UI/settings/pages/camera_settings.py +++ b/UI/settings/pages/camera_settings.py @@ -20,12 +20,16 @@ QFrame, QMessageBox, ) -from PySide6.QtCore import Qt, Signal, Slot +from PySide6.QtCore import Qt, Signal, Slot, QTimer from app_context import get_app_context from logger import info, error, warning, debug from camera.cameras.base_camera import CameraResolution +# Interval (ms) between live-value polls for hardware-controlled fields. +_LIVE_POLL_INTERVAL_MS = 500 + + class CameraSettingsWidget(QWidget): """Widget for displaying and editing camera settings""" @@ -43,6 +47,15 @@ def __init__(self, parent_dialog=None, parent: QWidget | None = None) -> None: self._saved_values: dict[str, any] = {} # Store saved values for comparison self._group_names: list[str] = [] # Track group names in order self._group_widgets: dict[str, QGroupBox] = {} # Map group names to widgets + + # Maps field_name -> (controller_field_name, controlled_when) for all fields + # with controlled_by set. Populated in _refresh_settings_display(). + self._controlled_fields: dict[str, tuple[str, bool]] = {} + + # Live-value polling timer — fires when at least one controller is True. + self._live_poll_timer = QTimer(self) + self._live_poll_timer.setInterval(_LIVE_POLL_INTERVAL_MS) + self._live_poll_timer.timeout.connect(self._poll_live_values) self._setup_ui() self._connect_signals() @@ -232,6 +245,9 @@ def _on_active_camera_changed(self, camera_info) -> None: def _refresh_settings_display(self) -> None: """Refresh the settings display based on current camera""" + self._live_poll_timer.stop() + self._controlled_fields.clear() + # Clear existing settings widgets self._clear_settings_display() @@ -256,6 +272,12 @@ def _refresh_settings_display(self) -> None: for meta in metadata_list: current_value = getattr(settings, meta.name, None) self._saved_values[meta.name] = current_value + + # Build controlled-field index from metadata + for meta in metadata_list: + if meta.controlled_by: + controlled_when = getattr(meta, 'controlled_when', True) + self._controlled_fields[meta.name] = (meta.controlled_by, controlled_when) # Group settings by category grouped_settings = self._group_settings(metadata_list) @@ -277,6 +299,13 @@ def _refresh_settings_display(self) -> None: if self.parent_dialog and hasattr(self.parent_dialog, 'register_group_box'): self.parent_dialog.register_group_box("Camera", group_name, group_box) + # Apply initial controlled-field state (greyed-out / locked if controller is on) + self._apply_all_controlled_states(settings) + + # Start live polling if any controller is currently active + if self._any_controller_active(settings): + self._live_poll_timer.start() + # Update tree items in parent dialog if self.parent_dialog and hasattr(self.parent_dialog, '_update_camera_groups'): self.parent_dialog._update_camera_groups(self._group_names) @@ -356,8 +385,8 @@ def _create_settings_group(self, group_name: str, settings_list: list) -> QGroup layout.addRow(label, widget) - # Store both widget and label for styling - # Create a container that holds both for easier styling + # Store a container that holds references to both label and control + # for later styling and enable/disable operations. widget_container = QWidget() widget_container.setProperty("label", label) widget_container.setProperty("control", widget) @@ -605,6 +634,113 @@ def on_rgba_changed(): layout.addStretch() return container + + # ------------------------------------------------------------------ + # Controlled-field helpers + # ------------------------------------------------------------------ + + def _any_controller_active(self, settings) -> bool: + """Return True if at least one field is currently in its controlled (locked) state.""" + for field_name, (controller_name, controlled_when) in self._controlled_fields.items(): + controller_value = bool(getattr(settings, controller_name, False)) + if controller_value == controlled_when: + return True + return False + + def _apply_all_controlled_states(self, settings) -> None: + """Apply grey-out / lock state for every controlled field based on current settings.""" + for field_name, (controller_name, controlled_when) in self._controlled_fields.items(): + controller_value = bool(getattr(settings, controller_name, False)) + is_locked = controller_value == controlled_when + self._set_field_controlled(field_name, is_locked) + + def _set_field_controlled(self, field_name: str, controlled: bool) -> None: + """Grey out (and lock) or restore a controlled field widget.""" + container = self._settings_widgets.get(field_name) + if not container: + return + + label = container.property("label") + control = container.property("control") + + if controlled: + # Visually dim the label + if label: + label.setStyleSheet("QLabel { color: #aaaaaa; }") + # Disable all interactive child widgets so the user cannot edit + if control: + for child in control.findChildren(QWidget): + child.setEnabled(False) + control.setEnabled(False) + else: + # Restore normal appearance and re-enable + if label: + label.setStyleSheet("") + if control: + control.setEnabled(True) + for child in control.findChildren(QWidget): + child.setEnabled(True) + # Re-apply any existing "modified" orange styling + is_modified = field_name in self._modified_settings + if is_modified: + self._apply_orange_styling(container, True) + + def _update_display_value(self, field_name: str, value: int) -> None: + """Update only the visual display of a controlled field without touching settings.""" + container = self._settings_widgets.get(field_name) + if not container: + return + + control = container.property("control") + if not control: + return + + self._updating_from_camera = True + try: + # Range widgets: container holds a slider and a spinbox + spinboxes = control.findChildren(QSpinBox) + dbl_spinboxes = control.findChildren(QDoubleSpinBox) + sliders = control.findChildren(QSlider) + + for sb in spinboxes: + sb.blockSignals(True) + sb.setValue(int(value)) + sb.blockSignals(False) + for sb in dbl_spinboxes: + sb.blockSignals(True) + sb.setValue(float(value)) + sb.blockSignals(False) + for sl in sliders: + sl.blockSignals(True) + sl.setValue(int(value)) + sl.blockSignals(False) + finally: + self._updating_from_camera = False + + @Slot() + def _poll_live_values(self) -> None: + """Timer slot: read live hardware values and update display widgets.""" + camera = self.ctx.camera + if not camera: + self._live_poll_timer.stop() + return + + settings = camera.settings + try: + live = settings.get_live_values() + except Exception as e: + error(f"Error polling live values: {e}") + return + + if not live: + # No controlled fields are active; stop polling. + self._live_poll_timer.stop() + return + + for field_name, value in live.items(): + self._update_display_value(field_name, value) + + # ------------------------------------------------------------------ def _mark_setting_modified(self, setting_name: str, current_value) -> None: """Mark a setting as modified and update its widget styling""" @@ -627,13 +763,25 @@ def _mark_setting_modified(self, setting_name: str, current_value) -> None: else: self._modified_settings.discard(setting_name) - # Update widget styling + # Update widget styling (skip if still greyed out / controlled) + entry = self._controlled_fields.get(setting_name) + if entry: + controller_name, controlled_when = entry + camera = self.ctx.camera + if camera: + controller_value = bool(getattr(camera.settings, controller_name, False)) + if controller_value == controlled_when: + # Field is still locked — don't apply orange yet + self._emit_modifications_changed() + return + self._update_widget_styling(setting_name, is_modified) - + self._emit_modifications_changed() + + def _emit_modifications_changed(self) -> None: # Update category color in parent dialog if self.parent_dialog and hasattr(self.parent_dialog, 'set_category_modified'): self.parent_dialog.set_category_modified("Camera", len(self._modified_settings) > 0) - # Emit signal about modification state change self.modifications_changed.emit(len(self._modified_settings) > 0) @@ -645,7 +793,6 @@ def _update_widget_styling(self, setting_name: str, is_modified: bool) -> None: if is_modified: # Orange text and slider for modified settings - # Apply styling to all child widgets self._apply_orange_styling(widget, True) else: # Clear custom styling to revert to default @@ -728,18 +875,50 @@ def _on_bool_changed(self, setter_name: str, value: bool) -> None: camera = self.ctx.camera if not camera: return - + + field_name = setter_name.removeprefix("set_") + + # Determine if this boolean is a controller for other fields. + controlled_by_this = [ + (fn, controlled_when) + for fn, (ctrl, controlled_when) in self._controlled_fields.items() + if ctrl == field_name + ] + + # If we are turning the controller OFF, flush live-value fields (controlled_when=True). + if not value and controlled_by_this: + try: + camera.settings.on_controller_disabled(field_name) + except Exception as e: + error(f"Error flushing controlled values for {field_name}: {e}") + try: setter = getattr(camera.settings, setter_name) setter(value) - - # Extract setting name from setter name (remove "set_" prefix) - setting_name = setter_name.replace("set_", "") - self._mark_setting_modified(setting_name, value) - + self._mark_setting_modified(field_name, value) debug(f"Set {setter_name} to {value}") except Exception as e: error(f"Error setting {setter_name}: {e}") + return + + # Update controlled field state and live polling + if controlled_by_this: + for fn, controlled_when in controlled_by_this: + is_locked = value == controlled_when + self._set_field_controlled(fn, is_locked) + + if not is_locked and controlled_when: + # A live-value field just became editable — flush its display and mark modified. + flushed_value = getattr(camera.settings, fn, None) + if flushed_value is not None: + self._update_display_value(fn, flushed_value) + self._mark_setting_modified(fn, flushed_value) + + if self._any_controller_active(camera.settings): + if not self._live_poll_timer.isActive(): + self._live_poll_timer.start() + else: + self._live_poll_timer.stop() def _on_int_changed(self, setter_name: str, value: int, slider: QSlider) -> None: """Handle integer setting change from spinbox""" diff --git a/camera/settings/amscope_settings.py b/camera/settings/amscope_settings.py index bc1c82f..f408b68 100644 --- a/camera/settings/amscope_settings.py +++ b/camera/settings/amscope_settings.py @@ -97,6 +97,8 @@ def get_metadata(self) -> list[SettingMetadata]: max_value=235, group="Exposure", runtime_changeable=True, + controlled_by="auto_exposure", + controlled_when=False, ), SettingMetadata( name="exposure_time", @@ -107,6 +109,7 @@ def get_metadata(self) -> list[SettingMetadata]: max_value=1000000, group="Exposure", runtime_changeable=True, + controlled_by="auto_exposure", ), SettingMetadata( name="gain", @@ -117,6 +120,7 @@ def get_metadata(self) -> list[SettingMetadata]: max_value=300, group="Exposure", runtime_changeable=True, + controlled_by="auto_exposure", ), SettingMetadata( name="temp", @@ -216,8 +220,51 @@ def _validate_range(self, name: str, value: int) -> None: raise ValueError( f"{name} must be in [{meta.min_value}, {meta.max_value}], got {value}" ) + + # ------------------------------------------------------------------ + # Live-value protocol + # ------------------------------------------------------------------ + + def get_live_values(self) -> dict[str, int]: + """Return live hardware exposure_time and gain while auto_exposure is on.""" + if not self.auto_exposure: + return {} + if not (self._camera and hasattr(self._camera, '_hcam')): + return {} + try: + hcam = self._camera._hcam + return { + "exposure_time": hcam.get_ExpoTime(), + "gain": hcam.get_ExpoAGain(), + } + except Exception as e: + error(f"Failed to read live exposure values: {e}") + return {} + + def on_controller_disabled(self, controller_name: str) -> None: + """Flush live exposure_time / gain into stored settings when auto_exposure turns off.""" + if controller_name != "auto_exposure": + super().on_controller_disabled(controller_name) + return + + if not (self._camera and hasattr(self._camera, '_hcam')): + return + try: + hcam = self._camera._hcam + self.exposure_time = hcam.get_ExpoTime() + self.gain = hcam.get_ExpoAGain() + debug( + f"Flushed auto-exposure values: exposure_time={self.exposure_time}, gain={self.gain}" + ) + except Exception as e: + error(f"Failed to flush live exposure values: {e}") + + # ------------------------------------------------------------------ def set_auto_exposure(self, enabled: bool) -> None: + if not enabled and self.auto_exposure: + # Flush hardware values before turning off so stored settings are up-to-date. + self.on_controller_disabled("auto_exposure") self.auto_exposure = enabled if self._camera and hasattr(self._camera, '_hcam'): self._camera._hcam.put_AutoExpoEnable(1 if enabled else 0) @@ -403,15 +450,13 @@ def set_still_resolution(self, value: str, index: int | None = None) -> bool: The dropdown supplies both the string label (e.g. "2592x1944") and the index of that label in the choices list. When ``index`` is provided it is used - directly; otherwise it is derived from ``value``. The camera does not need - to be restarted; the stored value is used as the index argument at Snap() time. + directly; otherwise it is derived from ``value``. """ try: still_resolutions = self.get_still_resolutions() choices = [f"{r.width}x{r.height}" for r in still_resolutions] if not choices: - # Camera doesn't support distinct still resolutions; store as-is. self.still_resolution = value return True @@ -529,8 +574,6 @@ def refresh_from_camera(self, camera: BaseCamera) -> None: still_resolutions = self.get_still_resolutions() if still_resolutions: - # The SDK has no dedicated "get current still resolution" call; default - # to the highest-resolution option (index 0) when refreshing. r = still_resolutions[0] self.still_resolution = f"{r.width}x{r.height}" diff --git a/camera/settings/camera_settings.py b/camera/settings/camera_settings.py index ddef203..1d04ba4 100644 --- a/camera/settings/camera_settings.py +++ b/camera/settings/camera_settings.py @@ -49,83 +49,32 @@ class SettingMetadata: choices: list[str] | None = None group: str = "General" runtime_changeable: bool = True + # When set, this field is greyed out (and, for live-value fields, polled from + # hardware) while the named boolean setting equals *controlled_when*. + # + # controlled_when=True (default): grey out while the controller is ON. + # Example: exposure_time / gain are greyed while auto_exposure is True. + # controlled_when=False: grey out while the controller is OFF. + # Example: exposure target is greyed while auto_exposure is False. + controlled_by: str | None = None + controlled_when: bool = True @dataclass class CameraSettings(ABC): """ Abstract base camera settings class with validation and hardware manipulation. - - This is an abstract base class that MUST be subclassed for each camera type. - Subclasses must implement all abstract methods and provide camera-specific configuration. - - Architecture: - - CameraSettings owns settings storage, validation, and hardware access - - For cameras with SDKs (like AmScope): subclass accesses camera._sdk directly - - For cameras without SDKs (like USB): subclass implements direct hardware access - - BaseCamera provides camera operations, SDK loading, and settings management - - CameraSettingsManager handles loading/saving settings from YAML files - - Responsibilities of CameraSettings: - 1. Storage of settings values (dataclass fields) - 2. Validation of settings (using metadata from get_metadata()) - 3. High-level API (set_* methods with validation) - 4. Low-level hardware access (directly to SDK/hardware via abstract methods) - 5. Applying settings to camera hardware - 6. Reading settings from camera hardware - 7. Providing metadata for GUI generation and validation (single source of truth) - - Requirements for Subclasses: - - 1. Implement get_metadata() class method (SINGLE SOURCE OF TRUTH): - - Return SettingMetadata list for GUI generation AND validation - - Include ranges, descriptions, groups, types for ALL settings - - This replaces the old get_ranges() method - no duplication needed - - Example: - @classmethod - def get_metadata(cls) -> list[SettingMetadata]: - return [ - SettingMetadata( - name="exposure", - display_name="Exposure Target", - setting_type=SettingType.RANGE, - description="Target brightness for auto exposure", - min_value=16, - max_value=220, - group="Exposure", - ), - # ... all other settings - ] - - 2. Implement all abstract setter methods (set_exposure, set_temp, etc.): - - Get validation ranges from get_metadata() - - Validate input against those ranges - - Update the corresponding dataclass field - - Access hardware directly to apply the setting - - Example: - def set_exposure(self, value: int) -> None: - metadata = {m.name: m for m in self.get_metadata()} - meta = metadata['exposure'] - if not (meta.min_value <= value <= meta.max_value): - raise ValueError(f"exposure must be in [{meta.min_value}, {meta.max_value}]") - - self.exposure = value - if self._camera and hasattr(self._camera, '_sdk'): - self._camera._sdk.put_AutoExpoTarget(self._camera._device, value) - - 3. Implement refresh_from_camera(): - - Read all current settings from camera hardware - - Update all dataclass fields with hardware values - - 4. Implement resolution and exposure methods: - - get_resolutions(), set_resolution(), get_current_resolution() - - get_still_resolutions(), set_still_resolution() - - get_exposure_time(), set_exposure_time() - - Note on Default Values: - - Default values are loaded from YAML files by CameraSettingsManager - - Subclasses do NOT need a create_default() method - - The YAML file serves as the default configuration + + Live-value protocol (for settings driven by automatic hardware control): + - Mark controlled fields with ``controlled_by=""`` in + SettingMetadata. + - Override ``get_live_values()`` to return {field_name: current_hw_value} for + all fields currently being driven by hardware. Return an empty dict when no + field is under hardware control. + - Override ``on_controller_disabled()`` if you need custom flush logic; the + default calls ``get_live_values()`` and writes each value to self. + - The GUI polls ``get_live_values()`` on a timer, updates display widgets only, + and calls ``on_controller_disabled()`` when the controlling boolean turns off. """ version: str @@ -185,6 +134,48 @@ def validate(self) -> None: @abstractmethod def get_metadata(cls) -> list[SettingMetadata]: pass + + # ------------------------------------------------------------------ + # Live-value protocol + # ------------------------------------------------------------------ + + def get_live_values(self) -> dict[str, int]: + """Return the current hardware values for any fields under automatic control. + + Returns a mapping of ``{field_name: current_hardware_value}`` for fields + whose controlling boolean is currently True. Return an empty dict when no + field is actively being driven by hardware. + + The GUI polls this on a short interval and updates display widgets without + writing back to the stored settings object. + """ + return {} + + def on_controller_disabled(self, controller_name: str) -> None: + """Flush current hardware values for fields controlled by *controller_name*. + + Called by the GUI immediately after the user turns off a controlling + boolean (e.g. ``auto_exposure``). The default implementation reads + ``get_live_values()`` and writes any value whose metadata ``controlled_by`` + matches *controller_name* back into self, so that the stored settings + reflect the actual hardware state the moment control was released. + + Subclasses may override for clamping, extra register reads, etc. + """ + live = self.get_live_values() + metadata_map = {m.name: m for m in self.get_metadata()} + for field_name, value in live.items(): + meta = metadata_map.get(field_name) + if ( + meta + and meta.controlled_by == controller_name + and meta.controlled_when # only flush live-value fields (controlled_when=True) + and hasattr(self, field_name) + ): + setattr(self, field_name, value) + debug(f"Flushed live value {field_name}={value} after {controller_name} disabled") + + # ------------------------------------------------------------------ def apply_to_camera(self, camera: BaseCamera) -> None: self._camera = camera From c07fe1b7df6ec26df5b0530eea545c6fb0840b44 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Wed, 11 Feb 2026 14:27:53 -0900 Subject: [PATCH 35/46] modified styling, and added camera flipping and rotation --- UI/settings/pages/camera_settings.py | 3 +- UI/style.py | 22 +++++ UI/widgets/camera_preview.py | 8 +- camera/settings/amscope_settings.py | 122 ++++++++++++++++++++++++++- 4 files changed, 148 insertions(+), 7 deletions(-) diff --git a/UI/settings/pages/camera_settings.py b/UI/settings/pages/camera_settings.py index 2198a49..7767fdb 100644 --- a/UI/settings/pages/camera_settings.py +++ b/UI/settings/pages/camera_settings.py @@ -74,7 +74,8 @@ def _setup_ui(self) -> None: # Content widget inside scroll area with white background content = QWidget() - content.setStyleSheet("background: white;") + content.setObjectName("CameraSettingsContent") + content.setStyleSheet("QWidget#CameraSettingsContent { background: white; }") content_layout = QVBoxLayout(content) content_layout.setContentsMargins(10, 10, 10, 10) content_layout.setSpacing(10) diff --git a/UI/style.py b/UI/style.py index 6d4b7bf..ffc3390 100644 --- a/UI/style.py +++ b/UI/style.py @@ -69,6 +69,28 @@ def apply_style(app: QApplication) -> None: background : transparent; }} + /* Push Buttons - Grey styling */ + QPushButton {{ + background-color: #d0d3d6; + border: 1px solid #b0b3b6; + border-radius: 0px; + padding: 2px 8px; + color: #2c2c2c; + }} + QPushButton:hover {{ + background-color: #c0c3c6; + border-color: #a0a3a6; + }} + QPushButton:pressed {{ + background-color: #b0b3b6; + border-color: #909396; + }} + QPushButton:disabled {{ + background-color: #e0e3e6; + border-color: #d0d3d6; + color: #a0a3a6; + }} + /* Status panel in tab corner */ QFrame#StatusBar {{ diff --git a/UI/widgets/camera_preview.py b/UI/widgets/camera_preview.py index 3dc5a1c..6c2806f 100644 --- a/UI/widgets/camera_preview.py +++ b/UI/widgets/camera_preview.py @@ -138,6 +138,10 @@ def _start_streaming(self): # Get resolution again after setting res_index, width, height = base_camera.get_current_resolution() + # Use final (post-rotation) dimensions for buffer and QImage. + # For 90/270-degree rotations the SDK transposes width and height + # before delivering frames; get_output_dimensions() reflects this. + width, height = self._camera.settings.get_output_dimensions() self._img_width = width self._img_height = height @@ -206,9 +210,9 @@ def _handle_image_event(self): return try: - # Check if resolution has changed + # Check if resolution has changed (use final post-rotation dimensions) base_camera = self._camera.underlying_camera - _, current_width, current_height = base_camera.get_current_resolution() + current_width, current_height = self._camera.settings.get_output_dimensions() # If resolution changed, update buffer if current_width != self._img_width or current_height != self._img_height: diff --git a/camera/settings/amscope_settings.py b/camera/settings/amscope_settings.py index f408b68..1e22282 100644 --- a/camera/settings/amscope_settings.py +++ b/camera/settings/amscope_settings.py @@ -35,6 +35,9 @@ class AmscopeSettings(CameraSettings): level_range_low: RGBALevel = RGBALevel(0, 0, 0, 0) level_range_high: RGBALevel = RGBALevel(255, 255, 255, 255) fformat: FileFormat = FileFormat.TIFF + rotate: int = 0 + hflip: bool = False + vflip: bool = False _camera: BaseCamera | None = field(default=None, repr=False, compare=False) @@ -80,6 +83,31 @@ def get_metadata(self) -> list[SettingMetadata]: group="Capture", runtime_changeable=True, ), + SettingMetadata( + name="rotate", + display_name="Rotation", + setting_type=SettingType.DROPDOWN, + description="Rotate the camera image clockwise. Requires camera restart to apply.", + choices=["0", "90", "180", "270"], + group="Capture", + runtime_changeable=False, + ), + SettingMetadata( + name="hflip", + display_name="Flip Horizontal", + setting_type=SettingType.BOOL, + description="Mirror the image horizontally", + group="Capture", + runtime_changeable=True, + ), + SettingMetadata( + name="vflip", + display_name="Flip Vertical", + setting_type=SettingType.BOOL, + description="Mirror the image vertically", + group="Capture", + runtime_changeable=True, + ), SettingMetadata( name="auto_exposure", display_name="Auto Exposure", @@ -369,6 +397,72 @@ def set_level_range_high(self, high: RGBALevel) -> None: ) self.level_range_high = high + def set_rotate(self, value: int | str, index: int | None = None) -> bool: + """ + Set the camera image rotation (0, 90, 180, 270 degrees clockwise). + + AMCAM_OPTION_ROTATE cannot be changed while the camera is running, so + this method follows the same stop/restart pattern as set_preview_resolution. + The dropdown supplies both the string label (e.g. "90") and the index of + that label in the choices list. When ``index`` is provided it is used + directly; otherwise it is derived from ``value``. + """ + valid_degrees = [0, 90, 180, 270] + + if index is not None: + if not (0 <= index < len(valid_degrees)): + error(f"Invalid rotation index: {index}. Valid range: 0-{len(valid_degrees) - 1}") + return False + degrees = valid_degrees[index] + else: + try: + degrees = int(value) + except (ValueError, TypeError): + error(f"Invalid rotation value: {value!r}. Must be one of {valid_degrees}") + return False + if degrees not in valid_degrees: + error(f"Invalid rotation value: {degrees}. Must be one of {valid_degrees}") + return False + + try: + self.rotate = degrees + + if not (self._camera and hasattr(self._camera, '_hcam')): + return True + + camera_was_open = self._camera.is_open + saved_callback = self._camera._callback + saved_context = self._camera._callback_context + + if camera_was_open: + debug("Camera is open, stopping to set rotation") + self._camera.stop_capture() + + amcam = self._camera._get_sdk() + self._camera._hcam.put_Option(amcam.AMCAM_OPTION_ROTATE, degrees) + + if camera_was_open: + debug("Restarting camera after rotation change") + self._camera.start_capture(saved_callback, saved_context) + + debug(f"Successfully changed rotation to {degrees} degrees") + return True + except Exception as e: + error(f"Failed to set rotation: {e}") + return False + + def set_hflip(self, enabled: bool) -> None: + """Flip the image horizontally.""" + self.hflip = enabled + if self._camera and hasattr(self._camera, '_hcam'): + self._camera._hcam.put_HFlip(1 if enabled else 0) + + def set_vflip(self, enabled: bool) -> None: + """Flip the image vertically.""" + self.vflip = enabled + if self._camera and hasattr(self._camera, '_hcam'): + self._camera._hcam.put_VFlip(1 if enabled else 0) + def get_resolutions(self) -> list[CameraResolution]: if self._camera is None or not hasattr(self._camera, '_hcam'): return [] @@ -399,14 +493,25 @@ def get_current_resolution(self) -> tuple[int, int, int]: except Exception as e: error(f"Failed to get current resolution: {e}") return (0, 0, 0) + + def get_output_dimensions(self) -> tuple[int, int]: + """ + Return the final (width, height) of frames delivered by the SDK. + """ + if self._camera is None or not hasattr(self._camera, '_hcam'): + return (0, 0) + try: + width, height = self._camera._hcam.get_FinalSize() + return (width, height) + except Exception: + pass + # Fallback: raw sensor resolution (no rotation compensation) + _, width, height = self.get_current_resolution() + return (width, height) def set_preview_resolution(self, value: str, index: int | None = None) -> bool: """ Set camera preview resolution. Requires camera restart. - - The dropdown supplies both the string label (e.g. "1280x960") and the index - of that label in the choices list. When ``index`` is provided it is used - directly; otherwise it is derived from ``value``. """ try: resolutions = self.get_resolutions() @@ -530,6 +635,10 @@ def apply_to_camera(self, camera: BaseCamera) -> None: self.set_level_range(self.level_range_low, self.level_range_high) + self.set_rotate(self.rotate) + self.set_hflip(self.hflip) + self.set_vflip(self.vflip) + debug("Successfully applied all settings to camera") except Exception as e: exception(f"Failed to apply settings to camera: {e}") @@ -576,6 +685,11 @@ def refresh_from_camera(self, camera: BaseCamera) -> None: if still_resolutions: r = still_resolutions[0] self.still_resolution = f"{r.width}x{r.height}" + + rotate_raw = hcam.get_Option(camera._get_sdk().AMCAM_OPTION_ROTATE) + self.rotate = rotate_raw if rotate_raw in (0, 90, 180, 270) else 0 + self.hflip = bool(hcam.get_HFlip()) + self.vflip = bool(hcam.get_VFlip()) info("Successfully refreshed all settings from camera") except Exception as e: From b53a13b43a4213adb5beb9960a3c1f0df271d873 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Wed, 11 Feb 2026 17:34:09 -0900 Subject: [PATCH 36/46] moved frame handling to the camera manager. --- UI/widgets/camera_preview.py | 345 +++++++++++------------------------ app_context.py | 8 +- camera/camera_manager.py | 276 +++++++++++++++++++++++++++- 3 files changed, 382 insertions(+), 247 deletions(-) diff --git a/UI/widgets/camera_preview.py b/UI/widgets/camera_preview.py index 6c2806f..2ec93be 100644 --- a/UI/widgets/camera_preview.py +++ b/UI/widgets/camera_preview.py @@ -1,38 +1,27 @@ from __future__ import annotations -from typing import Any import numpy as np -from PySide6.QtCore import Qt, Signal, QTimer, Slot +from PySide6.QtCore import Qt, Slot from PySide6.QtGui import QImage, QPixmap from PySide6.QtWidgets import QFrame, QLabel, QVBoxLayout, QWidget, QSizePolicy from app_context import get_app_context -from camera.cameras.base_camera import BaseCamera from logger import info, error, warning + class CameraPreview(QFrame): """ - Camera-agnostic Preview Area with live streaming. + Camera preview widget that displays frames from the camera manager. + This widget only handles display - it does not manage camera lifecycle. """ - - # Signal for camera events (thread-safe) - camera_event = Signal(int) - - # Signal when new frame is available for capture - frame_ready = Signal(np.ndarray) def __init__(self, parent: QWidget | None = None) -> None: super().__init__(parent) self.setFrameShape(QFrame.Shape.NoFrame) - # Camera state - self._camera: BaseCamera | None = None - self._camera_info = None - self._img_width = 0 - self._img_height = 0 - self._img_buffer: bytes | None = None - self._is_streaming = False - self._no_camera_logged = False + # Display state + self._current_width = 0 + self._current_height = 0 # UI elements self._video_label = QLabel() @@ -41,7 +30,7 @@ def __init__(self, parent: QWidget | None = None) -> None: self._video_label.setMinimumSize(1, 1) self._video_label.setSizePolicy(QSizePolicy.Policy.Ignored, QSizePolicy.Policy.Ignored) self._video_label.setStyleSheet("color: #888; font-size: 16px;") - self._video_label.setText("Initializing camera...") + self._video_label.setText("No camera stream") layout = QVBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) @@ -50,259 +39,141 @@ def __init__(self, parent: QWidget | None = None) -> None: self.setStyleSheet("QFrame { background: #000000; }") - # Timer for checking camera availability - self._init_timer = QTimer(self) - self._init_timer.timeout.connect(self._try_initialize_camera) - - # Connect camera event signal - self.camera_event.connect(self._on_camera_event) - - # Start initialization - self._init_timer.start(500) + # Connect to camera manager signals + self._connect_to_camera_manager() - @Slot() - def _try_initialize_camera(self): - """Try to initialize and connect to camera""" - self._init_timer.stop() - - # Get camera manager from app context + def _connect_to_camera_manager(self): + """Connect to camera manager signals""" ctx = get_app_context() camera_manager = ctx.camera_manager - # Check if camera is already active - if camera_manager.has_active_camera: - info("Preview: Using active camera") - self._camera = ctx.camera - - if self._camera: - # Get camera info from manager - self._camera_info = camera_manager.active_camera_info - self._start_streaming() - else: - error("Preview: Camera manager says camera is active but ctx.camera is None") - self._init_timer.start(3000) - return + # Connect to frame ready signal + camera_manager.frame_ready.connect(self._on_frame_ready) - # No active camera, enumerate and open - info("Preview: No active camera, enumerating...") - cameras = camera_manager.enumerate_cameras() - - if not cameras: - self._video_label.setText("No camera detected") - if not self._no_camera_logged: - warning("Preview: No cameras found") - self._no_camera_logged = True - # Retry in a few seconds - self._init_timer.start(3000) - return + # Connect to streaming status signals + camera_manager.streaming_started.connect(self._on_streaming_started) + camera_manager.streaming_stopped.connect(self._on_streaming_stopped) - # Camera found, reset flag - self._no_camera_logged = False + # Connect to camera status signals + camera_manager.camera_error.connect(self._on_camera_error) + camera_manager.camera_disconnected.connect(self._on_camera_disconnected) + camera_manager.active_camera_changed.connect(self._on_active_camera_changed) - # Open first camera - if camera_manager.switch_camera(cameras[0]): - info(f"Preview: Opened camera: {cameras[0]}") - self._camera = ctx.camera - self._camera_info = cameras[0] - self._start_streaming() + # Update initial state + if camera_manager.is_streaming: + width, height = camera_manager.frame_dimensions + self._on_streaming_started(width, height) + elif camera_manager.has_active_camera: + self._video_label.setText("Camera ready - not streaming") else: - self._video_label.setText("Failed to open camera") - error("Preview: Failed to open camera") - # Retry - self._init_timer.start(3000) + self._video_label.setText("No camera connected") - def _start_streaming(self): - """Start camera streaming after camera is opened""" - if not self._camera: - error("Preview: Cannot start streaming - no camera") + @Slot(int, int) + def _on_frame_ready(self, width: int, height: int): + """Handle new frame available from camera manager""" + ctx = get_app_context() + camera_manager = ctx.camera_manager + + # Get frame buffer from camera manager + frame_buffer = camera_manager.get_current_frame() + if not frame_buffer: return try: - # Get underlying camera - base_camera = self._camera.underlying_camera + # Check if dimensions changed + if width != self._current_width or height != self._current_height: + self._current_width = width + self._current_height = height - # Get current resolution from underlying camera - res_index, width, height = base_camera.get_current_resolution() - - # If no resolution set (0x0), set to first resolution - if width == 0 or height == 0: - info("Preview: Setting default resolution...") - - # Get available resolutions - resolutions = base_camera.get_resolutions() - if not resolutions: - error("Preview: No resolutions available") - self._video_label.setText("Camera has no resolutions available") - return - - # Get resolution again after setting - res_index, width, height = base_camera.get_current_resolution() - - # Use final (post-rotation) dimensions for buffer and QImage. - # For 90/270-degree rotations the SDK transposes width and height - # before delivering frames; get_output_dimensions() reflects this. - width, height = self._camera.settings.get_output_dimensions() - self._img_width = width - self._img_height = height + # Calculate stride + camera = camera_manager.active_camera + if not camera: + return - # Calculate buffer size using base camera class method + base_camera = camera.underlying_camera base_camera_class = type(base_camera) - buffer_size = base_camera_class.calculate_buffer_size(width, height, 24) - self._img_buffer = bytes(buffer_size) + stride = base_camera_class.calculate_stride(width, 24) - # Start capture - use underlying camera directly - success = base_camera.start_capture( - self._camera_callback, - self + # Create QImage from buffer + image = QImage( + frame_buffer, + width, + height, + stride, + QImage.Format.Format_RGB888 ) - if not success: - error("Preview: start_capture returned False") - self._video_label.setText("Failed to start camera stream") - return - - self._is_streaming = True - # Clear text when streaming starts - self._video_label.setText("") - info(f"Preview: Streaming started ({width}x{height})") - - except Exception as e: - self._video_label.setText(f"Error: {str(e)}") - error(f"Preview: Camera start streaming error: {e}") - import traceback - error(traceback.format_exc()) - - @staticmethod - def _camera_callback(event: int, context: Any): - """ - Camera event callback (called from camera thread). - Forward to UI thread via signal. - """ - if isinstance(context, CameraPreview): - context.camera_event.emit(event) - - @Slot(int) - def _on_camera_event(self, event: int): - """Handle camera events in UI thread""" - if not self._camera: - return - - # Get underlying camera - base_camera = self._camera.underlying_camera - - # Check if camera is open - if not base_camera.is_open: - return - - # Get event constants from camera - events = base_camera.get_event_constants() - - if event == events.IMAGE: - self._handle_image_event() - elif event == events.ERROR: - self._handle_error() - elif event == events.DISCONNECTED: - self._handle_disconnected() - - def _handle_image_event(self): - """Handle new image from camera""" - if not self._camera or not self._img_buffer: - return - - try: - # Check if resolution has changed (use final post-rotation dimensions) - base_camera = self._camera.underlying_camera - current_width, current_height = self._camera.settings.get_output_dimensions() - - # If resolution changed, update buffer - if current_width != self._img_width or current_height != self._img_height: - info(f"Preview: Resolution changed from {self._img_width}x{self._img_height} to {current_width}x{current_height}") - self._img_width = current_width - self._img_height = current_height - - # Recalculate buffer size - base_camera_class = type(base_camera) - buffer_size = base_camera_class.calculate_buffer_size(current_width, current_height, 24) - self._img_buffer = bytes(buffer_size) + # Make a deep copy for display + image = image.copy() - # Pull image into buffer from underlying camera - if self._camera.underlying_camera.pull_image(self._img_buffer, 24): - # Calculate stride using base camera class method - base_camera_class = type(self._camera.underlying_camera) - stride = base_camera_class.calculate_stride(self._img_width, 24) - - # Create QImage from buffer - image = QImage( - self._img_buffer, - self._img_width, - self._img_height, - stride, - QImage.Format.Format_RGB888 + # Scale to fit label while maintaining aspect ratio + if self._video_label.width() > 0 and self._video_label.height() > 0: + scaled_image = image.scaled( + self._video_label.width(), + self._video_label.height(), + Qt.AspectRatioMode.KeepAspectRatio, + Qt.TransformationMode.FastTransformation ) + self._video_label.setPixmap(QPixmap.fromImage(scaled_image)) - # Make a deep copy for display - image = image.copy() - - # Scale to fit label while maintaining aspect ratio - if self._video_label.width() > 0 and self._video_label.height() > 0: - scaled_image = image.scaled( - self._video_label.width(), - self._video_label.height(), - Qt.AspectRatioMode.KeepAspectRatio, - Qt.TransformationMode.FastTransformation - ) - self._video_label.setPixmap(QPixmap.fromImage(scaled_image)) except Exception as e: - error(f"Preview: Error handling image: {e}") + error(f"Preview: Error displaying frame: {e}") + + @Slot(int, int) + def _on_streaming_started(self, width: int, height: int): + """Handle streaming started signal""" + info(f"Preview: Streaming started ({width}x{height})") + self._current_width = width + self._current_height = height + self._video_label.setText("") # Clear any text when streaming starts - def _handle_error(self): + @Slot() + def _on_streaming_stopped(self): + """Handle streaming stopped signal""" + info("Preview: Streaming stopped") + self._video_label.setText("Camera stream stopped") + + @Slot() + def _on_camera_error(self): """Handle camera error""" self._video_label.setText("Camera error occurred") error("Preview: Camera error occurred") - self._close_camera() - # Try to reconnect - self._init_timer.start(3000) - def _handle_disconnected(self): + @Slot() + def _on_camera_disconnected(self): """Handle camera disconnection""" self._video_label.setText("Camera disconnected") warning("Preview: Camera disconnected") - self._close_camera() - # Try to reconnect - self._init_timer.start(3000) - def _close_camera(self): - """Close camera and cleanup""" - self._is_streaming = False - - if self._camera: - try: - # Stop capture first (use underlying camera for immediate effect) - info("Preview: Stopping camera capture...") - if self._camera.underlying_camera.is_open: - self._camera.underlying_camera.stop_capture() - - info("Preview: Stopped using camera") - - except Exception as e: - error(f"Preview: Error stopping camera: {e}") - - self._img_buffer = None - self._camera = None - - def closeEvent(self, event): - """Handle widget close event""" - self._close_camera() - super().closeEvent(event) + @Slot(object) + def _on_active_camera_changed(self, camera_info): + """Handle active camera changed""" + if camera_info is None: + self._video_label.setText("No camera connected") + info("Preview: No active camera") + else: + info(f"Preview: Active camera changed to {camera_info.display_name}") + # Don't clear text yet - wait for streaming to start + ctx = get_app_context() + if not ctx.camera_manager.is_streaming: + self._video_label.setText("Camera ready - not streaming") def cleanup(self): """Cleanup resources when widget is being destroyed""" info("Preview: cleanup starting...") - # Stop the initialization timer first - self._init_timer.stop() - - # Stop using camera - self._close_camera() + # Disconnect from camera manager signals + try: + ctx = get_app_context() + camera_manager = ctx.camera_manager + + camera_manager.frame_ready.disconnect(self._on_frame_ready) + camera_manager.streaming_started.disconnect(self._on_streaming_started) + camera_manager.streaming_stopped.disconnect(self._on_streaming_stopped) + camera_manager.camera_error.disconnect(self._on_camera_error) + camera_manager.camera_disconnected.disconnect(self._on_camera_disconnected) + camera_manager.active_camera_changed.disconnect(self._on_active_camera_changed) + except Exception as e: + error(f"Preview: Error disconnecting signals: {e}") info("Preview cleanup complete") \ No newline at end of file diff --git a/app_context.py b/app_context.py index 81d6c91..393f287 100644 --- a/app_context.py +++ b/app_context.py @@ -53,7 +53,7 @@ def __init__(self): def camera_manager(self) -> CameraManager: """ Get the camera manager instance. - Use this to enumerate cameras, switch cameras, etc. + Use this to enumerate cameras, switch cameras, start/stop streaming, etc. """ if self._camera_manager is None: self._initialize_camera_manager() @@ -158,10 +158,10 @@ def _initialize_camera_manager(self): cameras = self._camera_manager.enumerate_cameras() if cameras: - # Auto-open the first camera + # Auto-open the first camera and start streaming info("Auto-opening first available camera...") - if self._camera_manager.open_first_available(): - debug("Camera opened successfully during initialization") + if self._camera_manager.open_first_available(start_streaming=True): + debug("Camera opened and streaming started successfully") else: warning("Failed to auto-open first camera") else: diff --git a/camera/camera_manager.py b/camera/camera_manager.py index 811c031..abcb265 100644 --- a/camera/camera_manager.py +++ b/camera/camera_manager.py @@ -1,11 +1,13 @@ """ Camera manager for handling camera enumeration, selection, and lifecycle. -Provides plugin architecture for multiple camera types. +Provides plugin architecture for multiple camera types and manages frame acquisition. """ from __future__ import annotations -from PySide6.QtCore import QObject, Signal +from typing import Any +import numpy as np +from PySide6.QtCore import QObject, Signal, Slot from camera.cameras.base_camera import BaseCamera from camera.cameras.amscope_camera import AmscopeCamera @@ -22,17 +24,30 @@ class CameraManager(QObject): """ - Manages camera enumeration, selection, and lifecycle. + Manages camera enumeration, selection, lifecycle, and frame acquisition. Signals: camera_list_changed: Emitted when available cameras change active_camera_changed: Emitted when active camera changes (camera_info or None) enumeration_complete: Emitted when camera enumeration completes (camera_count) + frame_ready: Emitted when a new frame is available (width, height) + streaming_started: Emitted when camera streaming starts (width, height) + streaming_stopped: Emitted when camera streaming stops + camera_error: Emitted when a camera error occurs + camera_disconnected: Emitted when camera is disconnected """ camera_list_changed = Signal() active_camera_changed = Signal(object) # CameraInfo or None enumeration_complete = Signal(int) # count + frame_ready = Signal(int, int) # width, height + streaming_started = Signal(int, int) # width, height + streaming_stopped = Signal() + camera_error = Signal() + camera_disconnected = Signal() + + # Internal signal for forwarding camera events to UI thread + _camera_event = Signal(int) def __init__(self): super().__init__() @@ -52,6 +67,15 @@ def __init__(self): self._active_camera_info: CameraInfo | None = None self._camera_thread_started = False + # Frame management + self._current_frame_buffer: bytes | None = None + self._frame_width = 0 + self._frame_height = 0 + self._is_streaming = False + + # Connect internal camera event signal + self._camera_event.connect(self._on_camera_event) + info("Camera manager initialized") @property @@ -74,6 +98,16 @@ def has_active_camera(self) -> bool: """Check if there is an active camera""" return self._active_camera is not None + @property + def is_streaming(self) -> bool: + """Check if camera is currently streaming""" + return self._is_streaming + + @property + def frame_dimensions(self) -> tuple[int, int]: + """Get current frame dimensions (width, height)""" + return (self._frame_width, self._frame_height) + def enumerate_cameras(self) -> list[CameraInfo]: """ Enumerate all available cameras across all enumerators. @@ -138,13 +172,14 @@ def get_cameras_by_type(self, camera_type: CameraType) -> list[CameraInfo]: """ return [cam for cam in self._available_cameras if cam.camera_type == camera_type] - def switch_camera(self, camera_info: CameraInfo) -> bool: + def switch_camera(self, camera_info: CameraInfo, start_streaming: bool = True) -> bool: """ Switch to a different camera. Closes the current camera if any, then opens the new one. Args: camera_info: Information about the camera to switch to + start_streaming: If True, automatically start streaming after opening Returns: True if switch was successful, False otherwise @@ -185,6 +220,11 @@ def switch_camera(self, camera_info: CameraInfo) -> bool: debug(f"Successfully switched to camera: {camera_info}") self.active_camera_changed.emit(camera_info) + + # Start streaming if requested + if start_streaming: + self.start_streaming() + return True except Exception as e: @@ -195,10 +235,13 @@ def switch_camera(self, camera_info: CameraInfo) -> bool: exception(f"Error stopping thread: {stop_error}") return False - def open_first_available(self) -> bool: + def open_first_available(self, start_streaming: bool = True) -> bool: """ Convenience method to enumerate and open the first available camera. + Args: + start_streaming: If True, automatically start streaming after opening + Returns: True if a camera was opened, False otherwise """ @@ -209,7 +252,222 @@ def open_first_available(self) -> bool: return False # Try to open the first camera - return self.switch_camera(cameras[0]) + return self.switch_camera(cameras[0], start_streaming=start_streaming) + + def start_streaming(self) -> bool: + """ + Start streaming from the active camera. + + Returns: + True if streaming started successfully, False otherwise + """ + if not self._active_camera: + error("Cannot start streaming - no active camera") + return False + + if self._is_streaming: + debug("Streaming already active") + return True + + try: + # Get underlying camera + base_camera = self._active_camera.underlying_camera + + # Get current resolution from underlying camera + res_index, width, height = base_camera.get_current_resolution() + + # If no resolution set (0x0), set to first resolution + if width == 0 or height == 0: + info("Setting default resolution...") + + # Get available resolutions + resolutions = base_camera.get_resolutions() + if not resolutions: + error("No resolutions available") + return False + + # Get resolution again after setting + res_index, width, height = base_camera.get_current_resolution() + + # Use final (post-rotation) dimensions for buffer. + # For 90/270-degree rotations the SDK transposes width and height + # before delivering frames; get_output_dimensions() reflects this. + width, height = self._active_camera.settings.get_output_dimensions() + self._frame_width = width + self._frame_height = height + + # Calculate buffer size using base camera class method + base_camera_class = type(base_camera) + buffer_size = base_camera_class.calculate_buffer_size(width, height, 24) + self._current_frame_buffer = bytes(buffer_size) + + # Start capture - use underlying camera directly + success = base_camera.start_capture( + self._camera_callback, + self + ) + + if not success: + error("start_capture returned False") + return False + + self._is_streaming = True + info(f"Streaming started ({width}x{height})") + self.streaming_started.emit(width, height) + return True + + except Exception as e: + error(f"Camera start streaming error: {e}") + import traceback + error(traceback.format_exc()) + return False + + def stop_streaming(self) -> bool: + """ + Stop streaming from the active camera. + + Returns: + True if streaming stopped successfully, False otherwise + """ + if not self._is_streaming: + debug("Streaming not active") + return True + + try: + if self._active_camera and self._active_camera.underlying_camera.is_open: + info("Stopping camera streaming...") + self._active_camera.underlying_camera.stop_capture() + + self._is_streaming = False + self._current_frame_buffer = None + info("Streaming stopped") + self.streaming_stopped.emit() + return True + + except Exception as e: + error(f"Error stopping streaming: {e}") + return False + + def get_current_frame(self) -> bytes | None: + """ + Get the current frame buffer. + + Returns: + Frame buffer as bytes, or None if no frame is available + """ + return self._current_frame_buffer + + def copy_current_frame_to_numpy(self) -> np.ndarray | None: + """ + Copy the current frame to a numpy array. + + Returns: + Frame as numpy array (height, width, 3) or None if no frame available + """ + if not self._current_frame_buffer or self._frame_width == 0 or self._frame_height == 0: + return None + + try: + # Create numpy array from buffer + # Calculate stride + base_camera = self._active_camera.underlying_camera + base_camera_class = type(base_camera) + stride = base_camera_class.calculate_stride(self._frame_width, 24) + + # Create view of buffer + arr = np.frombuffer(self._current_frame_buffer, dtype=np.uint8) + + # Reshape to image dimensions + # Note: stride may be larger than width*3 due to alignment + bytes_per_pixel = 3 + if stride == self._frame_width * bytes_per_pixel: + # No padding, simple reshape + return arr.reshape((self._frame_height, self._frame_width, bytes_per_pixel)).copy() + else: + # Has padding, need to account for it + # Reshape to include stride, then slice off padding + arr_2d = arr.reshape((self._frame_height, stride)) + return arr_2d[:, :self._frame_width * bytes_per_pixel].reshape( + (self._frame_height, self._frame_width, bytes_per_pixel) + ).copy() + + except Exception as e: + error(f"Error converting frame to numpy: {e}") + return None + + @staticmethod + def _camera_callback(event: int, context: Any): + """ + Camera event callback (called from camera thread). + Forward to UI thread via signal. + """ + if isinstance(context, CameraManager): + # Emit signal to forward to UI thread + context._camera_event.emit(event) + + @Slot(int) + def _on_camera_event(self, event: int): + """Handle camera events in UI thread""" + if not self._active_camera: + return + + # Get underlying camera + base_camera = self._active_camera.underlying_camera + + # Check if camera is open + if not base_camera.is_open: + return + + # Get event constants from camera + events = base_camera.get_event_constants() + + if event == events.IMAGE: + self._handle_image_event() + elif event == events.ERROR: + self._handle_error() + elif event == events.DISCONNECTED: + self._handle_disconnected() + + def _handle_image_event(self): + """Handle new image from camera""" + if not self._active_camera or not self._current_frame_buffer: + return + + try: + # Check if resolution has changed (use final post-rotation dimensions) + base_camera = self._active_camera.underlying_camera + current_width, current_height = self._active_camera.settings.get_output_dimensions() + + # If resolution changed, update buffer + if current_width != self._frame_width or current_height != self._frame_height: + info(f"Resolution changed from {self._frame_width}x{self._frame_height} to {current_width}x{current_height}") + self._frame_width = current_width + self._frame_height = current_height + + # Recalculate buffer size + base_camera_class = type(base_camera) + buffer_size = base_camera_class.calculate_buffer_size(current_width, current_height, 24) + self._current_frame_buffer = bytes(buffer_size) + + # Pull image into buffer from underlying camera + if base_camera.pull_image(self._current_frame_buffer, 24): + # Emit signal that frame is ready + self.frame_ready.emit(self._frame_width, self._frame_height) + + except Exception as e: + error(f"Error handling image: {e}") + + def _handle_error(self): + """Handle camera error""" + error("Camera error occurred") + self.camera_error.emit() + self.stop_streaming() + + def _handle_disconnected(self): + """Handle camera disconnection""" + warning("Camera disconnected") + self.camera_disconnected.emit() + self.stop_streaming() def close_camera(self) -> bool: """ @@ -224,6 +482,9 @@ def close_camera(self) -> bool: info(f"Closing camera: {self._active_camera_info}") + # Stop streaming first + self.stop_streaming() + try: # Close the camera result = self._active_camera.close(wait=True) @@ -301,6 +562,9 @@ def cleanup(self): """Cleanup camera manager resources""" info("Cleaning up camera manager") + # Stop streaming + self.stop_streaming() + # Close active camera self.close_camera() From fe31c82cdb5b16b82bb46c9f20914aa34f544dfe Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Wed, 11 Feb 2026 18:10:26 -0900 Subject: [PATCH 37/46] Added Camera Overlays --- UI/style.py | 32 +++++++++ UI/widgets/camera_preview.py | 135 +++++++++++++++++++++++++++++++++-- 2 files changed, 160 insertions(+), 7 deletions(-) diff --git a/UI/style.py b/UI/style.py index ffc3390..feecccc 100644 --- a/UI/style.py +++ b/UI/style.py @@ -188,6 +188,38 @@ def apply_style(app: QApplication) -> None: color: rgba(0,0,0,0.62); }} + /* Camera Preview */ + QFrame#CameraPreview {{ + background: #000000; + }} + + QLabel#VideoLabel {{ + color: #888888; + font-size: 16px; + }} + + /* Camera Preview Overlay Buttons */ + QPushButton#OverlayButton, QPushButton#CrosshairButton {{ + background-color: rgba(240, 240, 240, 180); + color: #000; + border: 1px solid rgba(200, 200, 200, 255); + border-radius: 4px; + font-size: 18px; + font-weight: bold; + }} + QPushButton#OverlayButton:hover, QPushButton#CrosshairButton:hover {{ + background-color: rgba(255, 255, 255, 200); + }} + QPushButton#OverlayButton:checked, QPushButton#CrosshairButton:checked {{ + background-color: rgba(100, 150, 200, 200); + color: white; + border: 2px solid rgba(150, 200, 255, 255); + }} + + QPushButton#CrosshairButton {{ + padding-bottom: 4px; + }} + """ ) \ No newline at end of file diff --git a/UI/widgets/camera_preview.py b/UI/widgets/camera_preview.py index 2ec93be..7921773 100644 --- a/UI/widgets/camera_preview.py +++ b/UI/widgets/camera_preview.py @@ -1,14 +1,102 @@ from __future__ import annotations import numpy as np -from PySide6.QtCore import Qt, Slot -from PySide6.QtGui import QImage, QPixmap -from PySide6.QtWidgets import QFrame, QLabel, QVBoxLayout, QWidget, QSizePolicy +from PySide6.QtCore import Qt, Slot, QRect +from PySide6.QtGui import QImage, QPixmap, QPainter, QPen, QColor +from PySide6.QtWidgets import ( + QFrame, QLabel, QVBoxLayout, QWidget, QSizePolicy, + QPushButton, QHBoxLayout +) from app_context import get_app_context from logger import info, error, warning +class OverlayLabel(QLabel): + """Custom QLabel that can draw overlays on top of the image""" + + def __init__(self, parent: QWidget | None = None) -> None: + super().__init__(parent) + self.show_grid = False + self.show_crosshair = False + + def paintEvent(self, event): + """Override paint event to draw overlays""" + # First draw the base image + super().paintEvent(event) + + # Only draw overlays if we have a pixmap + if self.pixmap() is None or self.pixmap().isNull(): + return + + painter = QPainter(self) + painter.setRenderHint(QPainter.RenderHint.Antialiasing) + + # Get the actual image rect (considering aspect ratio) + pixmap = self.pixmap() + if pixmap.width() == 0 or pixmap.height() == 0: + return + + # Calculate the displayed image rect + widget_rect = self.rect() + pixmap_rect = pixmap.rect() + + # Calculate scaled rect maintaining aspect ratio + scale = min( + widget_rect.width() / pixmap_rect.width(), + widget_rect.height() / pixmap_rect.height() + ) + + scaled_width = int(pixmap_rect.width() * scale) + scaled_height = int(pixmap_rect.height() * scale) + + x = (widget_rect.width() - scaled_width) // 2 + y = (widget_rect.height() - scaled_height) // 2 + + image_rect = QRect(x, y, scaled_width, scaled_height) + + # Set up pen for drawing overlays + pen = QPen(QColor(0, 0, 0, 180)) # Black with transparency + pen.setWidth(2) + painter.setPen(pen) + + # Draw grid if enabled + if self.show_grid: + self._draw_grid(painter, image_rect) + + # Draw crosshair if enabled + if self.show_crosshair: + self._draw_crosshair(painter, image_rect) + + painter.end() + + def _draw_grid(self, painter: QPainter, rect: QRect): + """Draw a 3x3 grid""" + x, y, w, h = rect.x(), rect.y(), rect.width(), rect.height() + + # Vertical lines + for i in range(1, 3): + x_pos = x + (w * i // 3) + painter.drawLine(x_pos, y, x_pos, y + h) + + # Horizontal lines + for i in range(1, 3): + y_pos = y + (h * i // 3) + painter.drawLine(x, y_pos, x + w, y_pos) + + def _draw_crosshair(self, painter: QPainter, rect: QRect): + """Draw a crosshair at the center""" + center_x = rect.x() + rect.width() // 2 + center_y = rect.y() + rect.height() // 2 + + # Draw horizontal line - smaller (1/24 of smaller dimension) + line_length = min(rect.width(), rect.height()) // 24 + painter.drawLine(center_x - line_length, center_y, center_x + line_length, center_y) + + # Draw vertical line + painter.drawLine(center_x, center_y - line_length, center_x, center_y + line_length) + + class CameraPreview(QFrame): """ Camera preview widget that displays frames from the camera manager. @@ -18,26 +106,45 @@ class CameraPreview(QFrame): def __init__(self, parent: QWidget | None = None) -> None: super().__init__(parent) self.setFrameShape(QFrame.Shape.NoFrame) + self.setObjectName("CameraPreview") # Display state self._current_width = 0 self._current_height = 0 - # UI elements - self._video_label = QLabel() + # UI elements - use custom overlay label + self._video_label = OverlayLabel() + self._video_label.setObjectName("VideoLabel") self._video_label.setAlignment(Qt.AlignmentFlag.AlignCenter) self._video_label.setScaledContents(False) self._video_label.setMinimumSize(1, 1) self._video_label.setSizePolicy(QSizePolicy.Policy.Ignored, QSizePolicy.Policy.Ignored) - self._video_label.setStyleSheet("color: #888; font-size: 16px;") self._video_label.setText("No camera stream") + # Main layout layout = QVBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) layout.setSpacing(0) layout.addWidget(self._video_label, 1) - self.setStyleSheet("QFrame { background: #000000; }") + # Create overlay control buttons as direct children (true overlay) + self._crosshair_button = QPushButton("+", self) + self._crosshair_button.setObjectName("CrosshairButton") + self._crosshair_button.setCheckable(True) + self._crosshair_button.setFixedSize(30, 30) + self._crosshair_button.setToolTip("Toggle Crosshair") + self._crosshair_button.clicked.connect(self._toggle_crosshair) + self._crosshair_button.move(10, 10) # Position in top left + self._crosshair_button.raise_() # Ensure it's on top + + self._grid_button = QPushButton("⌗", self) + self._grid_button.setObjectName("OverlayButton") + self._grid_button.setCheckable(True) + self._grid_button.setFixedSize(30, 30) + self._grid_button.setToolTip("Toggle Grid") + self._grid_button.clicked.connect(self._toggle_grid) + self._grid_button.move(10, 45) # Position below crosshair button (10 + 30 + 5) + self._grid_button.raise_() # Ensure it's on top # Connect to camera manager signals self._connect_to_camera_manager() @@ -68,6 +175,20 @@ def _connect_to_camera_manager(self): else: self._video_label.setText("No camera connected") + @Slot(bool) + def _toggle_crosshair(self, checked: bool): + """Toggle crosshair overlay""" + self._video_label.show_crosshair = checked + self._video_label.update() # Trigger repaint + info(f"Preview: Crosshair {'enabled' if checked else 'disabled'}") + + @Slot(bool) + def _toggle_grid(self, checked: bool): + """Toggle grid overlay""" + self._video_label.show_grid = checked + self._video_label.update() # Trigger repaint + info(f"Preview: Grid {'enabled' if checked else 'disabled'}") + @Slot(int, int) def _on_frame_ready(self, width: int, height: int): """Handle new frame available from camera manager""" From 9e37681e48c58d3dbe6a8ab830ad405d6761befd Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Wed, 11 Feb 2026 22:13:10 -0900 Subject: [PATCH 38/46] Added Dark Field Correction --- UI/settings/pages/camera_settings.py | 245 +++++++++++++++++++++++++++ camera/cameras/amscope_camera.py | 9 + camera/settings/amscope_settings.py | 235 ++++++++++++++++++++++++- camera/settings/camera_settings.py | 3 + 4 files changed, 491 insertions(+), 1 deletion(-) diff --git a/UI/settings/pages/camera_settings.py b/UI/settings/pages/camera_settings.py index 7767fdb..f85334f 100644 --- a/UI/settings/pages/camera_settings.py +++ b/UI/settings/pages/camera_settings.py @@ -35,6 +35,7 @@ class CameraSettingsWidget(QWidget): settings_loaded = Signal(bool, object) # success, result modifications_changed = Signal(bool) # has_modifications + external_setting_changed = Signal(str, object) # field_name, value def __init__(self, parent_dialog=None, parent: QWidget | None = None) -> None: super().__init__(parent) @@ -135,6 +136,7 @@ def _connect_signals(self) -> None: self.reset_btn.clicked.connect(self._reset_settings) self.load_btn.clicked.connect(self._load_settings) self.settings_loaded.connect(self._on_settings_loaded) + self.external_setting_changed.connect(self._handle_external_setting_change) # Connect to parent dialog's save button if available if self.parent_dialog: @@ -302,6 +304,13 @@ def _refresh_settings_display(self) -> None: # Apply initial controlled-field state (greyed-out / locked if controller is on) self._apply_all_controlled_states(settings) + + # Register callback for external setting changes (e.g., async DFC completion) + if hasattr(settings, '_ui_update_callback'): + settings._ui_update_callback = self._on_external_setting_change + debug("Registered UI update callback for external setting changes") + else: + debug("Settings object does not support _ui_update_callback") # Start live polling if any controller is currently active if self._any_controller_active(settings): @@ -421,6 +430,12 @@ def _create_setting_widget(self, meta) -> QWidget | None: return self._create_dropdown_widget(meta, settings) elif type_str == "rgba_level": return self._create_rgba_level_widget(meta, settings) + elif type_str == "button": + return self._create_button_widget(meta, settings) + elif type_str == "file_picker_button": + return self._create_file_picker_button_widget(meta, settings) + elif type_str == "number_picker": + return self._create_number_picker_widget(meta, settings) elif type_str == "rgb_gain": # TODO: Implement custom RGB gain widget warning(f"RGB_GAIN widget not yet implemented for {meta.name}") @@ -635,6 +650,188 @@ def on_rgba_changed(): layout.addStretch() return container + + def _create_button_widget(self, meta, settings) -> QPushButton | None: + """Create a button that calls a setter method without arguments""" + setter_name = f"set_{meta.name}" + if not hasattr(settings, setter_name): + warning(f"No setter found: {setter_name} - skipping widget creation") + return None + + button = QPushButton(meta.display_name) + + # Set tooltip + if hasattr(meta, 'description') and meta.description: + button.setToolTip(meta.description) + + # Connect to setter + def on_button_clicked(): + if self._updating_from_camera: + return + + camera = self.ctx.camera + if not camera: + return + + try: + setter = getattr(camera.settings, setter_name) + setter() + debug(f"Called {setter_name}") + + # Refresh controlled states in case this button enabled other controls + self._apply_all_controlled_states(camera.settings) + + if self.ctx and hasattr(self.ctx, 'toast'): + self.ctx.toast.success(f"{meta.display_name} completed", duration=2000) + except Exception as e: + error(f"Error calling {setter_name}: {e}") + if self.ctx and hasattr(self.ctx, 'toast'): + self.ctx.toast.error(f"Error: {e}", duration=3000) + + button.clicked.connect(on_button_clicked) + return button + + def _create_file_picker_button_widget(self, meta, settings) -> QPushButton | None: + """Create a file picker button that calls a setter method with a filepath""" + setter_name = f"set_{meta.name}" + if not hasattr(settings, setter_name): + warning(f"No setter found: {setter_name} - skipping widget creation") + return None + + button = QPushButton(meta.display_name) + + # Set tooltip + if hasattr(meta, 'description') and meta.description: + button.setToolTip(meta.description) + + # Determine if this is an import or export button based on name + is_export = 'export' in meta.name.lower() + + # Connect to setter + def on_button_clicked(): + if self._updating_from_camera: + return + + camera = self.ctx.camera + if not camera: + return + + # Determine file extension from metadata name (e.g., dfc_import -> .dfc) + name_parts = meta.name.split('_') + if len(name_parts) >= 2: + file_ext = name_parts[0] # e.g., 'dfc' + else: + file_ext = 'dat' + + # Get default directory and filename + # Try to use stored filepath if available, otherwise use config directory + from pathlib import Path + default_path = "" + + # Look for a filepath field (e.g., dfc_filepath for dfc_import/dfc_export) + filepath_field = f"{file_ext}_filepath" + if hasattr(camera.settings, filepath_field): + stored_path = getattr(camera.settings, filepath_field) + if stored_path: + default_path = stored_path + + # If no stored path, use config directory + if not default_path: + config_dir = Path("./config/cameras") / camera.underlying_camera.model + config_dir.mkdir(parents=True, exist_ok=True) + if is_export: + # Suggest a timestamped filename for exports + from datetime import datetime + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + default_path = str(config_dir / f"{file_ext}_{timestamp}.{file_ext}") + else: + # Just use the directory for imports + default_path = str(config_dir) + + # Open file dialog + if is_export: + file_path, _ = QFileDialog.getSaveFileName( + self, + f"Export {meta.display_name}", + default_path, + f"{file_ext.upper()} Files (*.{file_ext});;All Files (*)" + ) + else: + file_path, _ = QFileDialog.getOpenFileName( + self, + f"Import {meta.display_name}", + default_path, + f"{file_ext.upper()} Files (*.{file_ext});;All Files (*)" + ) + + if not file_path: + return + + try: + setter = getattr(camera.settings, setter_name) + setter(file_path) + debug(f"Called {setter_name} with {file_path}") + + # Refresh controlled states in case this button enabled other controls + self._apply_all_controlled_states(camera.settings) + + action = "exported to" if is_export else "imported from" + if self.ctx and hasattr(self.ctx, 'toast'): + self.ctx.toast.success(f"Successfully {action} {file_path}", duration=2000) + except Exception as e: + error(f"Error calling {setter_name}: {e}") + if self.ctx and hasattr(self.ctx, 'toast'): + self.ctx.toast.error(f"Error: {e}", duration=3000) + + button.clicked.connect(on_button_clicked) + return button + + def _create_number_picker_widget(self, meta, settings) -> QSpinBox | None: + """Create a number picker (spinbox only, no slider)""" + setter_name = f"set_{meta.name}" + if not hasattr(settings, setter_name): + warning(f"No setter found: {setter_name} - skipping widget creation") + return None + + spinbox = QSpinBox() + spinbox.setFixedWidth(90) + + # Set range + if hasattr(meta, 'min_value') and hasattr(meta, 'max_value'): + spinbox.setMinimum(meta.min_value) + spinbox.setMaximum(meta.max_value) + + # Get current value + current_value = getattr(settings, meta.name, 0) + spinbox.setValue(current_value) + + # Set tooltip + if hasattr(meta, 'description') and meta.description: + spinbox.setToolTip(meta.description) + + # Connect to setter + def on_value_changed(value: int): + if self._updating_from_camera: + return + + camera = self.ctx.camera + if not camera: + return + + try: + setter = getattr(camera.settings, setter_name) + setter(value) + + # Extract setting name from setter name (remove "set_" prefix) + setting_name = setter_name.replace("set_", "") + self._mark_setting_modified(setting_name, value) + + debug(f"Set {setter_name} to {value}") + except Exception as e: + error(f"Error setting {setter_name}: {e}") + + spinbox.valueChanged.connect(on_value_changed) + return spinbox # ------------------------------------------------------------------ # Controlled-field helpers @@ -740,6 +937,54 @@ def _poll_live_values(self) -> None: for field_name, value in live.items(): self._update_display_value(field_name, value) + + def _on_external_setting_change(self, field_name: str, value) -> None: + """Handle setting changes that occur externally (e.g., async callbacks). + + This is called from a camera thread and needs to be marshalled to the UI thread. + We emit a signal which will be delivered to the UI thread automatically. + """ + debug(f"_on_external_setting_change called: {field_name} = {value}") + self.external_setting_changed.emit(field_name, value) + + @Slot(str, object) + def _handle_external_setting_change(self, field_name: str, value) -> None: + """Handle external setting change on the UI thread (connected to signal). + + This runs on the UI thread after the signal is emitted from the camera thread. + """ + camera = self.ctx.camera + if not camera: + debug(f"External setting change for '{field_name}': no camera") + return + + # Update the actual setting value in the widget (if it has one) + container = self._settings_widgets.get(field_name) + if container: + control = container.property("control") + if control and isinstance(control, QCheckBox): + self._updating_from_camera = True + try: + control.blockSignals(True) + control.setChecked(value) + control.blockSignals(False) + debug(f"Updated checkbox widget for '{field_name}' to {value}") + finally: + self._updating_from_camera = False + else: + debug(f"No widget found for '{field_name}' (this is normal for controller-only fields)") + + # If this field controls others, update their state + controlled_by_this = [ + (fn, controlled_when) + for fn, (ctrl, controlled_when) in self._controlled_fields.items() + if ctrl == field_name + ] + + if controlled_by_this: + for fn, controlled_when in controlled_by_this: + is_locked = value == controlled_when + self._set_field_controlled(fn, is_locked) # ------------------------------------------------------------------ diff --git a/camera/cameras/amscope_camera.py b/camera/cameras/amscope_camera.py index 78806dd..0b4e64d 100644 --- a/camera/cameras/amscope_camera.py +++ b/camera/cameras/amscope_camera.py @@ -57,6 +57,7 @@ def __init__(self, model: str): AmscopeCamera.ensure_sdk_loaded() self._frame_buffer = None + self._dfc_completion_callback = None # Callback for DFC completion def _get_settings_class(self): """ @@ -541,12 +542,20 @@ def enable_gige(cls, callback: Callable | None = None, context: Any = None): def _event_callback_wrapper(self, event: int, context: Any): """Internal wrapper for camera events.""" + amcam = self._get_sdk() + # Update frame buffer on IMAGE events if event == self.EVENT_IMAGE and hasattr(self, '_frame_buffer') and self._frame_buffer is not None: try: self._hcam.PullImageV4(self._frame_buffer, 0, 24, 0, None) + debug("Frame captured and pulled to buffer") except: pass + elif event == amcam.AMCAM_EVENT_DFC: + # DFC event received - call completion callback if registered + debug("DFC event received") + if hasattr(self, '_dfc_completion_callback') and self._dfc_completion_callback: + self._dfc_completion_callback() # Call registered callback if self._callback: diff --git a/camera/settings/amscope_settings.py b/camera/settings/amscope_settings.py index 1e22282..805f8a9 100644 --- a/camera/settings/amscope_settings.py +++ b/camera/settings/amscope_settings.py @@ -2,6 +2,7 @@ from dataclasses import dataclass, field from typing import TYPE_CHECKING +from pathlib import Path from camera.settings.camera_settings import ( CameraSettings, @@ -10,7 +11,7 @@ RGBALevel, FileFormat, ) -from logger import info, error, exception, debug +from logger import info, error, exception, debug, warning if TYPE_CHECKING: from camera.cameras.base_camera import BaseCamera, CameraResolution @@ -39,7 +40,14 @@ class AmscopeSettings(CameraSettings): hflip: bool = False vflip: bool = False + # Dark Field Correction + dfc_enable: bool = False + _dfc_initialized: bool = False # Track if DFC has been captured or imported + dfc_quantity: int = 10 + dfc_filepath: str = "" # Path to the DFC file + _camera: BaseCamera | None = field(default=None, repr=False, compare=False) + _ui_update_callback: callable | None = field(default=None, repr=False, compare=False) def __post_init__(self) -> None: super().__post_init__() @@ -236,6 +244,50 @@ def get_metadata(self) -> list[SettingMetadata]: group="Levels", runtime_changeable=True, ), + SettingMetadata( + name="dfc_enable", + display_name="Enable", + setting_type=SettingType.BOOL, + description="Enable dark field correction (must capture or import DFC data first)", + group="Dark Field Correction", + runtime_changeable=True, + controlled_by="_dfc_initialized", + controlled_when=False, + ), + SettingMetadata( + name="dfc_capture", + display_name="Capture", + setting_type=SettingType.BUTTON, + description="Capture dark field correction frames", + group="Dark Field Correction", + runtime_changeable=True, + ), + SettingMetadata( + name="dfc_import", + display_name="Import", + setting_type=SettingType.FILE_PICKER_BUTTON, + description="Import dark field correction from file", + group="Dark Field Correction", + runtime_changeable=True, + ), + SettingMetadata( + name="dfc_export", + display_name="Export", + setting_type=SettingType.FILE_PICKER_BUTTON, + description="Export dark field correction to file", + group="Dark Field Correction", + runtime_changeable=True, + ), + SettingMetadata( + name="dfc_quantity", + display_name="Quantity", + setting_type=SettingType.NUMBER_PICKER, + description="Number of frames to average for dark field correction", + min_value=1, + max_value=255, + group="Dark Field Correction", + runtime_changeable=True, + ), ] def _get_metadata_map(self) -> dict[str, SettingMetadata]: @@ -463,6 +515,150 @@ def set_vflip(self, enabled: bool) -> None: if self._camera and hasattr(self._camera, '_hcam'): self._camera._hcam.put_VFlip(1 if enabled else 0) + # Dark Field Correction methods + def set_dfc_enable(self, enabled: bool) -> None: + """Enable or disable dark field correction.""" + self.dfc_enable = enabled + if self._camera and hasattr(self._camera, '_hcam'): + try: + amcam = self._camera._get_sdk() + self._camera._hcam.put_Option(amcam.AMCAM_OPTION_DFC, 1 if enabled else 0) + debug(f"Set DFC enable to {enabled}") + except Exception as e: + error(f"Failed to set DFC enable: {e}") + + def set_dfc_capture(self) -> None: + """Capture dark field correction frames and save to config directory.""" + if self._camera and hasattr(self._camera, '_hcam'): + try: + # Save whether DFC was enabled before capture + dfc_was_enabled = self.dfc_enable + + # Reset initialized flag when starting new capture + self._dfc_initialized = False + + # Reset DFC to clear any existing data before capturing new frames + amcam = self._camera._get_sdk() + self._camera._hcam.put_Option(amcam.AMCAM_OPTION_DFC, -1) + info("Reset DFC before capturing new frames") + + # Set the average number + self._camera._hcam.put_Option(amcam.AMCAM_OPTION_DFC, 0xff000000 | self.dfc_quantity) + + info(f"Starting DFC capture with {self.dfc_quantity} frames...") + + # Store completion handler on camera + logged_sequences = set() + + def on_dfc_event(): + """Called when DFC event fires - check if we're done""" + try: + # Query the current DFC state + dfc_val = self._camera._hcam.get_Option(amcam.AMCAM_OPTION_DFC) + dfc_state = dfc_val & 0xff # 0=disabled, 1=enabled, 2=inited + dfc_sequence = (dfc_val & 0xff00) >> 8 # Current sequence number + + # Log frame capture if we haven't logged this sequence yet + if dfc_sequence > 0 and dfc_sequence not in logged_sequences: + info(f"DFC frame {dfc_sequence}/{self.dfc_quantity} captured") + logged_sequences.add(dfc_sequence) + + # Check if DFC is initialized (state == 2) - means all frames captured + if dfc_state == 2: + info(f"All {self.dfc_quantity} DFC frames captured and processed") + + # Generate timestamped filename + filename = f"dark_field_correction.dfc" + + # Get config directory + config_dir = Path("./config/cameras") / self._camera.model + config_dir.mkdir(parents=True, exist_ok=True) + + filepath = config_dir / filename + + # Export the captured DFC to the file + self._camera._hcam.DfcExport(str(filepath)) + + # Store the filepath + self.dfc_filepath = str(filepath) + self._dfc_initialized = True + + info(f"DFC successfully exported to {filepath}") + + # Re-enable DFC if it was enabled before capture + if dfc_was_enabled: + self._camera._hcam.put_Option(amcam.AMCAM_OPTION_DFC, 1) + self.dfc_enable = True + info("Re-enabled DFC after capture completion") + + # Clean up - remove the callback + self._camera._dfc_completion_callback = None + + # Notify UI that _dfc_initialized has changed + # This will enable the dfc_enable checkbox in the UI + if self._ui_update_callback: + try: + debug(f"Calling UI update callback for _dfc_initialized=True") + self._ui_update_callback('_dfc_initialized', True) + debug(f"UI update callback completed successfully") + except Exception as e: + error(f"Failed to notify UI of DFC initialization: {e}") + else: + warning("No UI update callback registered - UI won't be notified of DFC initialization") + + except Exception as e: + error(f"Failed to process DFC completion: {e}") + # Clean up on error + self._camera._dfc_completion_callback = None + + # Register the callback + self._camera._dfc_completion_callback = on_dfc_event + + # Trigger the capture (async - will complete via events) + self._camera._hcam.DfcOnce() + + info("DFC capture started (will complete asynchronously)") + + except Exception as e: + error(f"Failed to start DFC capture: {e}") + raise + + def set_dfc_import(self, filepath: str) -> None: + """Import dark field correction from file.""" + if self._camera and hasattr(self._camera, '_hcam'): + try: + self._camera._hcam.DfcImport(filepath) + self.dfc_filepath = filepath + self._dfc_initialized = True + info(f"Imported DFC from {filepath} - DFC initialized") + + # Notify UI that _dfc_initialized has changed + if self._ui_update_callback: + try: + self._ui_update_callback('_dfc_initialized', True) + except Exception as e: + error(f"Failed to notify UI of DFC initialization: {e}") + except Exception as e: + error(f"Failed to import DFC: {e}") + raise # Re-raise so UI can show error + + def set_dfc_export(self, filepath: str) -> None: + """Export dark field correction to file.""" + if self._camera and hasattr(self._camera, '_hcam'): + try: + self._camera._hcam.DfcExport(filepath) + info(f"Exported DFC to {filepath}") + except Exception as e: + error(f"Failed to export DFC: {e}") + + def set_dfc_quantity(self, value: int) -> None: + """Set the number of frames to average for dark field correction.""" + if not (1 <= value <= 255): + error(f"DFC quantity must be between 1 and 255, got {value}") + return + self.dfc_quantity = value + debug(f"Set DFC quantity to {value}") + def get_resolutions(self) -> list[CameraResolution]: if self._camera is None or not hasattr(self._camera, '_hcam'): return [] @@ -639,6 +835,33 @@ def apply_to_camera(self, camera: BaseCamera) -> None: self.set_hflip(self.hflip) self.set_vflip(self.vflip) + # Dark Field Correction + # Load DFC file if filepath is set and file exists + if self.dfc_filepath: + dfc_path = Path(self.dfc_filepath) + if dfc_path.exists(): + try: + self._camera._hcam.DfcImport(str(dfc_path)) + self._dfc_initialized = True + info(f"Loaded DFC from {dfc_path}") + except Exception as e: + error(f"Failed to load DFC from {dfc_path}: {e}") + self._dfc_initialized = False + self.dfc_filepath = "" + else: + warning(f"DFC file not found: {dfc_path}") + self._dfc_initialized = False + self.dfc_filepath = "" + else: + self._dfc_initialized = False + + if self.dfc_enable and not self._dfc_initialized: + warning("Cannot enable DFC: no DFC data available. Disabling DFC.") + self.dfc_enable = False + + self.set_dfc_quantity(self.dfc_quantity) + self.set_dfc_enable(self.dfc_enable) + debug("Successfully applied all settings to camera") except Exception as e: exception(f"Failed to apply settings to camera: {e}") @@ -691,6 +914,16 @@ def refresh_from_camera(self, camera: BaseCamera) -> None: self.hflip = bool(hcam.get_HFlip()) self.vflip = bool(hcam.get_VFlip()) + # Dark Field Correction + amcam = camera._get_sdk() + dfc_val = hcam.get_Option(amcam.AMCAM_OPTION_DFC) + dfc_state = dfc_val & 0xff + self.dfc_enable = (dfc_state == 1) # 0=disabled, 1=enabled, 2=inited + self._dfc_initialized = (dfc_state >= 1) + dfc_avg = (dfc_val & 0xff0000) >> 16 + if dfc_avg > 0: + self.dfc_quantity = dfc_avg + info("Successfully refreshed all settings from camera") except Exception as e: exception(f"Failed to refresh settings from camera: {e}") \ No newline at end of file diff --git a/camera/settings/camera_settings.py b/camera/settings/camera_settings.py index 1d04ba4..70efcda 100644 --- a/camera/settings/camera_settings.py +++ b/camera/settings/camera_settings.py @@ -36,6 +36,9 @@ class SettingType(str, Enum): RANGE = "range" DROPDOWN = "dropdown" RGBA_LEVEL = "rgba_level" + BUTTON = "button" + FILE_PICKER_BUTTON = "file_picker_button" + NUMBER_PICKER = "number_picker" @dataclass From ffb08476165eae147a142c0dd8573472100490c1 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Thu, 12 Feb 2026 01:23:37 -0900 Subject: [PATCH 39/46] fixed default image type --- UI/widgets/camera_controls_widget.py | 4 +- camera/settings/amscope_settings.py | 88 ++++++++++++++-------------- 2 files changed, 47 insertions(+), 45 deletions(-) diff --git a/UI/widgets/camera_controls_widget.py b/UI/widgets/camera_controls_widget.py index 18b8986..a3027ae 100644 --- a/UI/widgets/camera_controls_widget.py +++ b/UI/widgets/camera_controls_widget.py @@ -93,7 +93,9 @@ def _create_capture_group(self) -> QGroupBox: self._format_combo = QComboBox() self._format_combo.addItems(self._image_formats.keys()) - self._format_combo.setCurrentText("TIFF") # Default to TIFF + + # Set default format from camera settings + self._format_combo.setCurrentText(get_app_context().camera.settings.fformat.upper()) self._open_folder_button = QPushButton("Browse Output") self._open_folder_button.clicked.connect(self._open_folder) diff --git a/camera/settings/amscope_settings.py b/camera/settings/amscope_settings.py index 805f8a9..d7def72 100644 --- a/camera/settings/amscope_settings.py +++ b/camera/settings/amscope_settings.py @@ -158,6 +158,50 @@ def get_metadata(self) -> list[SettingMetadata]: runtime_changeable=True, controlled_by="auto_exposure", ), + SettingMetadata( + name="dfc_enable", + display_name="Enable", + setting_type=SettingType.BOOL, + description="Enable dark field correction (must capture or import DFC data first)", + group="Dark Field Correction", + runtime_changeable=True, + controlled_by="_dfc_initialized", + controlled_when=False, + ), + SettingMetadata( + name="dfc_capture", + display_name="Capture", + setting_type=SettingType.BUTTON, + description="Capture dark field correction frames", + group="Dark Field Correction", + runtime_changeable=True, + ), + SettingMetadata( + name="dfc_import", + display_name="Import", + setting_type=SettingType.FILE_PICKER_BUTTON, + description="Import dark field correction from file", + group="Dark Field Correction", + runtime_changeable=True, + ), + SettingMetadata( + name="dfc_export", + display_name="Export", + setting_type=SettingType.FILE_PICKER_BUTTON, + description="Export dark field correction to file", + group="Dark Field Correction", + runtime_changeable=True, + ), + SettingMetadata( + name="dfc_quantity", + display_name="Quantity", + setting_type=SettingType.NUMBER_PICKER, + description="Number of frames to average for dark field correction", + min_value=1, + max_value=255, + group="Dark Field Correction", + runtime_changeable=True, + ), SettingMetadata( name="temp", display_name="Color Temperature", @@ -244,50 +288,6 @@ def get_metadata(self) -> list[SettingMetadata]: group="Levels", runtime_changeable=True, ), - SettingMetadata( - name="dfc_enable", - display_name="Enable", - setting_type=SettingType.BOOL, - description="Enable dark field correction (must capture or import DFC data first)", - group="Dark Field Correction", - runtime_changeable=True, - controlled_by="_dfc_initialized", - controlled_when=False, - ), - SettingMetadata( - name="dfc_capture", - display_name="Capture", - setting_type=SettingType.BUTTON, - description="Capture dark field correction frames", - group="Dark Field Correction", - runtime_changeable=True, - ), - SettingMetadata( - name="dfc_import", - display_name="Import", - setting_type=SettingType.FILE_PICKER_BUTTON, - description="Import dark field correction from file", - group="Dark Field Correction", - runtime_changeable=True, - ), - SettingMetadata( - name="dfc_export", - display_name="Export", - setting_type=SettingType.FILE_PICKER_BUTTON, - description="Export dark field correction to file", - group="Dark Field Correction", - runtime_changeable=True, - ), - SettingMetadata( - name="dfc_quantity", - display_name="Quantity", - setting_type=SettingType.NUMBER_PICKER, - description="Number of frames to average for dark field correction", - min_value=1, - max_value=255, - group="Dark Field Correction", - runtime_changeable=True, - ), ] def _get_metadata_map(self) -> dict[str, SettingMetadata]: From b2d9a65923dff7e18736ce0c63309d0e4feb5416 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Fri, 13 Feb 2026 00:55:57 -0900 Subject: [PATCH 40/46] Added placeholder navigation widget --- UI/settings/pages/camera_settings.py | 1 - UI/tabs/navigate_tab.py | 11 +- UI/widgets/navigation_widget.py | 544 +++++++++++++++++++++++++++ 3 files changed, 549 insertions(+), 7 deletions(-) create mode 100644 UI/widgets/navigation_widget.py diff --git a/UI/settings/pages/camera_settings.py b/UI/settings/pages/camera_settings.py index f85334f..db626e4 100644 --- a/UI/settings/pages/camera_settings.py +++ b/UI/settings/pages/camera_settings.py @@ -24,7 +24,6 @@ from app_context import get_app_context from logger import info, error, warning, debug -from camera.cameras.base_camera import CameraResolution # Interval (ms) between live-value polls for hardware-controlled fields. _LIVE_POLL_INTERVAL_MS = 500 diff --git a/UI/tabs/navigate_tab.py b/UI/tabs/navigate_tab.py index 09aa7b1..887cc0a 100644 --- a/UI/tabs/navigate_tab.py +++ b/UI/tabs/navigate_tab.py @@ -1,12 +1,6 @@ from __future__ import annotations from PySide6.QtWidgets import ( - QCheckBox, - QFormLayout, - QGroupBox, - QHBoxLayout, - QLineEdit, - QPushButton, QVBoxLayout, QWidget, QScrollArea, @@ -18,6 +12,7 @@ from UI.widgets.camera_preview import CameraPreview from UI.widgets.collapsible_section import CollapsibleSection from UI.widgets.camera_controls_widget import CameraControlsWidget +from UI.widgets.navigation_widget import NavigationWidget from app_context import open_settings @@ -40,6 +35,10 @@ def _make_sidebar(self) -> QWidget: # Start Widgets + navigation = CollapsibleSection("Navigation", on_settings=lambda: open_settings("Navigation")) + navigation.layout_for_content().addWidget(NavigationWidget()) + content_layout.addWidget(navigation) + camera_controls = CollapsibleSection("Camera Controls", on_settings=lambda: open_settings("Camera")) camera_controls.layout_for_content().addWidget(CameraControlsWidget()) content_layout.addWidget(camera_controls) diff --git a/UI/widgets/navigation_widget.py b/UI/widgets/navigation_widget.py new file mode 100644 index 0000000..bb5a9a7 --- /dev/null +++ b/UI/widgets/navigation_widget.py @@ -0,0 +1,544 @@ +from __future__ import annotations + +import math +from PySide6.QtWidgets import ( + QWidget, + QVBoxLayout, + QHBoxLayout, + QPushButton, + QLabel, + QFrame, +) +from PySide6.QtGui import QPainter, QColor, QFont, QPen, QPainterPath, QTransform, QRegion, QPolygon +from PySide6.QtCore import Qt, QRectF, QPointF + +def clamp(v: int, lo: int = 0, hi: int = 255) -> int: + return max(lo, min(hi, v)) + + +def adjust_color(c: QColor, factor: float) -> QColor: + return QColor( + clamp(int(c.red() * factor)), + clamp(int(c.green() * factor)), + clamp(int(c.blue() * factor)), + ) + + +class DiamondButton(QPushButton): + def __init__( + self, + label: str = "", + parent: QWidget | None = None, + base_color: QColor = QColor(208, 211, 214), + font_px: int = 28, + size: int = 90, + text_offset_y: int = 0 + ): + super().__init__("", parent) + self.setFixedSize(size, size) + self.setStyleSheet("border: none; background: transparent;") + self.setMouseTracking(True) + + # Enable mouse tracking on parent to handle pass-through + self.setAttribute(Qt.WidgetAttribute.WA_TransparentForMouseEvents, False) + + self._base = QColor(base_color) + self._hover = False + self._label = label + self._font_px = font_px + self._text_offset_y = text_offset_y + + def enterEvent(self, event): + # Don't automatically set hover - check in mouseMoveEvent + super().enterEvent(event) + + def leaveEvent(self, event): + self._hover = False + self.unsetCursor() + self.update() + super().leaveEvent(event) + + def mouseMoveEvent(self, event): + # Update hover state based on whether mouse is over diamond + was_hover = self._hover + is_over_diamond = self.hitButton(event.position().toPoint()) + self._hover = is_over_diamond + + # Set or unset cursor based on position + if is_over_diamond: + self.setCursor(Qt.CursorShape.PointingHandCursor) + else: + self.unsetCursor() + + if was_hover != self._hover: + self.update() + super().mouseMoveEvent(event) + + def mousePressEvent(self, event): + # Check if click is inside diamond + if self.hitButton(event.position().toPoint()): + super().mousePressEvent(event) + else: + # Pass the event to the parent by ignoring it + event.ignore() + + def mouseReleaseEvent(self, event): + if self.hitButton(event.position().toPoint()): + super().mouseReleaseEvent(event) + else: + event.ignore() + + def hitButton(self, pos): + w = self.width() + h = self.height() + + cx = w / 2 + cy = h / 2 + + # Translate point to origin + x = pos.x() - cx + y = pos.y() - cy + + # Rotate point by -45 degrees (inverse of the 45 degree rotation in paintEvent) + angle = -45 * math.pi / 180 + cos_a = math.cos(angle) + sin_a = math.sin(angle) + + rotated_x = x * cos_a - y * sin_a + rotated_y = x * sin_a + y * cos_a + + # Check if the rotated point is inside the square + side = min(w, h) / math.sqrt(2) + half_side = side / 2 + + return (abs(rotated_x) <= half_side and abs(rotated_y) <= half_side) + + def paintEvent(self, event): + painter = QPainter(self) + painter.setRenderHint(QPainter.RenderHint.Antialiasing, True) + + w = self.width() + h = self.height() + + color = QColor(self._base) + if self._hover: + color = adjust_color(color, 0.90) # Darken on hover + if self.isDown(): + color = adjust_color(color, 0.85) + + # Move origin to center + painter.translate(w / 2, h / 2) + + # Rotate 45 degrees + painter.rotate(45) + + # Define square centered at origin + side = min(w, h) / math.sqrt(2) # scale factor controls diamond size + rect = QRectF(-side / 2, -side / 2, side, side) + + # Fill + painter.setBrush(color) + painter.setPen(Qt.PenStyle.NoPen) + painter.drawRect(rect) + + # Border + pen = QPen(QColor(120, 120, 120)) + pen.setWidth(2) + pen.setJoinStyle(Qt.PenJoinStyle.MiterJoin) + painter.setPen(pen) + painter.setBrush(Qt.BrushStyle.NoBrush) + painter.drawRect(rect) + + # Reset transform for text + painter.resetTransform() + + # Draw label normally with offset + font = self.font() + font.setPixelSize(self._font_px) + font.setBold(True) + painter.setFont(font) + painter.setPen(Qt.GlobalColor.black) + + # Apply vertical offset to text rect + text_rect = self.rect() + if self._text_offset_y != 0: + text_rect = text_rect.adjusted(0, self._text_offset_y, 0, self._text_offset_y) + + painter.drawText(text_rect, Qt.AlignmentFlag.AlignCenter, self._label) + + + +class NavigationWidget(QWidget): + def __init__(self, parent: QWidget | None = None): + super().__init__(parent) + + # Mock position data + self._x_pos = 123.45 + self._y_pos = 678.90 + self._z_pos = 42.00 + + # Step size in mm + self._step_size = 0.4 # Default to 0.4mm + + self._setup_ui() + + def _setup_ui(self) -> None: + main_layout = QVBoxLayout(self) + main_layout.setContentsMargins(10, 10, 10, 10) + main_layout.setSpacing(15) + + # Set white background + self.setStyleSheet(""" + NavigationWidget { + background: white; + border-radius: 8px; + } + """) + + # Step size controls + step_size_controls = self._create_step_size_controls() + main_layout.addWidget(step_size_controls) + + # Combined jog controls + jog_controls = self._create_jog_controls() + main_layout.addWidget(jog_controls) + + main_layout.addStretch(1) + + def _create_step_size_controls(self) -> QWidget: + """Create step size selection buttons with position display""" + from PySide6.QtWidgets import QGroupBox + + group = QGroupBox("Step Size") + group_layout = QVBoxLayout(group) + group_layout.setContentsMargins(10, 5, 10, 5) + group_layout.setSpacing(10) + + # Buttons row + buttons_row = QWidget() + buttons_layout = QHBoxLayout(buttons_row) + buttons_layout.setContentsMargins(0, 0, 0, 0) + buttons_layout.setSpacing(8) + + # Step size buttons + step_sizes = [0.04, 0.4, 2.0, 10.0] + self.step_buttons = [] + + for size in step_sizes: + btn = QPushButton(f"{size}mm") + btn.setFixedHeight(30) + btn.setCheckable(True) + btn.setStyleSheet("padding: 0px;") # Smaller padding + btn.clicked.connect(lambda checked, s=size: self._set_step_size(s)) + buttons_layout.addWidget(btn) + self.step_buttons.append((btn, size)) + + # Set default button as checked (0.4mm) + self.step_buttons[1][0].setChecked(True) + + group_layout.addWidget(buttons_row) + + # Position display + self.position_label = QLabel() + self.position_label.setAlignment(Qt.AlignmentFlag.AlignCenter) + self.position_label.setStyleSheet(""" + QLabel { + font-size: 13px; + padding: 2px; + } + """) + self._update_position_display() + group_layout.addWidget(self.position_label) + + return group + + def _set_step_size(self, size: float) -> None: + """Set the step size and update button states""" + self._step_size = size + + # Update button checked states + for btn, btn_size in self.step_buttons: + btn.setChecked(btn_size == size) + + print(f"Step size set to {size}mm") + + def _create_jog_controls(self) -> QWidget: + """Create combined jog controls with diamond navigation and Z-axis""" + from PySide6.QtWidgets import QGroupBox + + group = QGroupBox("Jog") + group_layout = QVBoxLayout(group) + group_layout.setContentsMargins(0, 0, 0, 0) + group_layout.setSpacing(0) + + # Top row: diamond and z-axis controls + top_row = QWidget() + top_layout = QHBoxLayout(top_row) + top_layout.setContentsMargins(0, 0, 0, 0) + top_layout.setSpacing(15) + + # Diamond panel + diamond_container = self._create_diamond_panel() + top_layout.addWidget(diamond_container, 0, Qt.AlignmentFlag.AlignCenter) + + # Z-axis controls + z_container = self._create_z_controls() + top_layout.addWidget(z_container, 0, Qt.AlignmentFlag.AlignCenter) + + top_layout.addStretch(1) + + group_layout.addWidget(top_row) + + return group + + def _create_diamond_panel(self) -> QWidget: + """Create the diamond navigation with home button in center""" + container = QWidget() + container.setFixedSize(240, 200) # Slightly larger for better spacing + + # Outer arrows (Unicode) - larger size + self.top_btn = DiamondButton("↑", parent=container, font_px=32, size=90) + self.left_btn = DiamondButton("←", parent=container, font_px=32, size=90, text_offset_y=-3) + self.right_btn = DiamondButton("→", parent=container, font_px=32, size=90, text_offset_y=-3) + self.bot_btn = DiamondButton("↓", parent=container, font_px=32, size=90) + + # Center home icon - smaller and orange + self.center_btn = DiamondButton( + "H", + parent=container, + font_px=20, + size=60 # Smaller than outer buttons + ) + + # Install event filters on all buttons for click pass-through + for btn in [self.top_btn, self.left_btn, self.right_btn, self.bot_btn, self.center_btn]: + btn.installEventFilter(self) + + # Connect buttons to placeholder functions + self.top_btn.clicked.connect(self._move_up) + self.left_btn.clicked.connect(self._move_left) + self.right_btn.clicked.connect(self._move_right) + self.bot_btn.clicked.connect(self._move_down) + self.center_btn.clicked.connect(self._go_home) + + self.center_btn.raise_() + + # Position buttons when container is shown + container.resizeEvent = lambda event: self._layout_diamond_buttons(container) + + return container + + def eventFilter(self, obj, event): + """Intercept button events and pass through if in corner regions""" + from PySide6.QtCore import QEvent + + # Only filter events on DiamondButtons + if not isinstance(obj, DiamondButton): + return super().eventFilter(obj, event) + + # Handle mouse move events for hover + if event.type() == QEvent.Type.MouseMove: + btn_local_pos = event.position().toPoint() + is_over_obj_diamond = obj.hitButton(btn_local_pos) + global_pos = obj.mapToGlobal(btn_local_pos) + + # Define buttons in z-order (home button is on top) + buttons = [self.center_btn, self.top_btn, self.left_btn, + self.right_btn, self.bot_btn] + + # Find which button should be hovered + hovered_btn = None + + if is_over_obj_diamond: + # Mouse is over this button's diamond - it should be hovered + hovered_btn = obj + else: + # Mouse is in corner region - check buttons beneath + for btn in buttons: + if btn is obj: + continue + + btn_local = btn.mapFromGlobal(global_pos) + if btn.geometry().contains(btn.mapToParent(btn_local)): + if btn.hitButton(btn_local): + hovered_btn = btn + break # Stop at first match (top-most button) + + # Update hover state on all buttons + for btn in buttons: + if btn is hovered_btn: + # Set hover on this button + if not btn._hover: + btn._hover = True + btn.setCursor(Qt.CursorShape.PointingHandCursor) + btn.update() + else: + # Clear hover on all other buttons + if btn._hover: + btn._hover = False + btn.unsetCursor() + btn.update() + + return False # Don't consume move events + + # Handle mouse button press events + if event.type() == QEvent.Type.MouseButtonPress: + if event.button() == Qt.MouseButton.LeftButton: + # Check if click is actually inside the diamond shape + btn_local_pos = event.position().toPoint() + if not obj.hitButton(btn_local_pos): + # Click is in corner region - manually pass to buttons beneath + global_pos = obj.mapToGlobal(btn_local_pos) + + # Define buttons in z-order (home button is on top) + buttons = [self.center_btn, self.top_btn, self.left_btn, + self.right_btn, self.bot_btn] + + for btn in buttons: + if btn is obj: + continue # Skip the button we're filtering + + # Check if this button is beneath the click + btn_local = btn.mapFromGlobal(global_pos) + if btn.geometry().contains(btn.mapToParent(btn_local)): + if btn.hitButton(btn_local): + # Manually trigger this button (first match due to z-order) + btn.clicked.emit() + return True # Consume the event + + # No button beneath, just consume the event (don't trigger anything) + return True + + return super().eventFilter(obj, event) + + def _layout_diamond_buttons(self, container: QWidget) -> None: + """Layout the diamond buttons in proper positions""" + # Container center + cx = container.width() // 2 + cy = container.height() // 2 + + # Simple positioning: place outer buttons at fixed distance from center + # Distance should be enough to have visible gap between buttons + distance = 50 # Distance from center to outer button centers + + def place(btn: QPushButton, x: int, y: int) -> None: + """Place button centered at x, y""" + btn.move(x - btn.width() // 2, y - btn.height() // 2) + + # Place outer buttons in cardinal directions + place(self.top_btn, cx, cy - distance) + place(self.bot_btn, cx, cy + distance) + place(self.left_btn, cx - distance, cy) + place(self.right_btn, cx + distance, cy) + + # Place home button at center + place(self.center_btn, cx, cy) + + self.center_btn.raise_() + + def _create_z_controls(self) -> QWidget: + """Create Z-axis increase/decrease buttons""" + container = QWidget() + container.setFixedHeight(200) # Match diamond container height + layout = QVBoxLayout(container) + layout.setContentsMargins(0, 0, 0, 0) + layout.setSpacing(10) + + # Add stretch to center vertically + layout.addStretch(1) + + # Increase button - smaller with border + self.z_up_btn = QPushButton("▲") + self.z_up_btn.setFixedSize(55, 55) # Smaller square + self.z_up_btn.setStyleSheet(""" + QPushButton { + background-color: rgb(208, 211, 214); + border: 2px solid rgb(120, 120, 120); + border-radius: 4px; + font-size: 14px; + } + QPushButton:hover { + background-color: rgb(187, 190, 193); + } + QPushButton:pressed { + background-color: rgb(177, 180, 182); + } + """) + self.z_up_btn.clicked.connect(self._z_increase) + layout.addWidget(self.z_up_btn, 0, Qt.AlignmentFlag.AlignCenter) + + # Decrease button - smaller with border + self.z_down_btn = QPushButton("▼") + self.z_down_btn.setFixedSize(55, 55) # Smaller square + self.z_down_btn.setStyleSheet(""" + QPushButton { + background-color: rgb(208, 211, 214); + border: 2px solid rgb(120, 120, 120); + border-radius: 4px; + font-size: 14px; + } + QPushButton:hover { + background-color: rgb(187, 190, 193); + } + QPushButton:pressed { + background-color: rgb(177, 180, 182); + } + """) + self.z_down_btn.clicked.connect(self._z_decrease) + layout.addWidget(self.z_down_btn, 0, Qt.AlignmentFlag.AlignCenter) + + # Add stretch to center vertically + layout.addStretch(1) + + return container + + def _update_position_display(self) -> None: + """Update the position display label""" + self.position_label.setText( + f"X: {self._x_pos:.2f} Y: {self._y_pos:.2f} Z: {self._z_pos:.2f} mm" + ) + + # Placeholder movement functions + def _move_up(self) -> None: + """Move stage up (positive Y)""" + print(f"Moving up (Y+) by {self._step_size}mm") + self._y_pos += self._step_size + self._update_position_display() + + def _move_down(self) -> None: + """Move stage down (negative Y)""" + print(f"Moving down (Y-) by {self._step_size}mm") + self._y_pos -= self._step_size + self._update_position_display() + + def _move_left(self) -> None: + """Move stage left (negative X)""" + print(f"Moving left (X-) by {self._step_size}mm") + self._x_pos -= self._step_size + self._update_position_display() + + def _move_right(self) -> None: + """Move stage right (positive X)""" + print(f"Moving right (X+) by {self._step_size}mm") + self._x_pos += self._step_size + self._update_position_display() + + def _go_home(self) -> None: + """Return stage to home position""" + print("Going to home position") + self._x_pos = 0.0 + self._y_pos = 0.0 + self._z_pos = 0.0 + self._update_position_display() + + def _z_increase(self) -> None: + """Increase Z height""" + print(f"Increasing Z height by {self._step_size}mm") + self._z_pos += self._step_size + self._update_position_display() + + def _z_decrease(self) -> None: + """Decrease Z height""" + print(f"Decreasing Z height by {self._step_size}mm") + self._z_pos -= self._step_size + self._update_position_display() \ No newline at end of file From 039f4435f161a432ea0d78733d37e5cb252c1e4a Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Fri, 13 Feb 2026 01:35:50 -0900 Subject: [PATCH 41/46] Added Machine vision button --- UI/style.py | 11 ++++++++--- UI/widgets/camera_preview.py | 37 +++++++++++++++++++++++++++++++++++- 2 files changed, 44 insertions(+), 4 deletions(-) diff --git a/UI/style.py b/UI/style.py index feecccc..358d945 100644 --- a/UI/style.py +++ b/UI/style.py @@ -199,7 +199,7 @@ def apply_style(app: QApplication) -> None: }} /* Camera Preview Overlay Buttons */ - QPushButton#OverlayButton, QPushButton#CrosshairButton {{ + QPushButton#OverlayButton, QPushButton#CrosshairButton, QPushButton#FocusButton {{ background-color: rgba(240, 240, 240, 180); color: #000; border: 1px solid rgba(200, 200, 200, 255); @@ -207,10 +207,10 @@ def apply_style(app: QApplication) -> None: font-size: 18px; font-weight: bold; }} - QPushButton#OverlayButton:hover, QPushButton#CrosshairButton:hover {{ + QPushButton#OverlayButton:hover, QPushButton#CrosshairButton:hover, QPushButton#FocusButton:hover {{ background-color: rgba(255, 255, 255, 200); }} - QPushButton#OverlayButton:checked, QPushButton#CrosshairButton:checked {{ + QPushButton#OverlayButton:checked, QPushButton#CrosshairButton:checked, QPushButton#FocusButton:checked {{ background-color: rgba(100, 150, 200, 200); color: white; border: 2px solid rgba(150, 200, 255, 255); @@ -219,6 +219,11 @@ def apply_style(app: QApplication) -> None: QPushButton#CrosshairButton {{ padding-bottom: 4px; }} + + QLabel#FocusOverlayLabel {{ + font-size: 18px; + font-weight: normal; + }} """ diff --git a/UI/widgets/camera_preview.py b/UI/widgets/camera_preview.py index 7921773..985b5c4 100644 --- a/UI/widgets/camera_preview.py +++ b/UI/widgets/camera_preview.py @@ -128,7 +128,7 @@ def __init__(self, parent: QWidget | None = None) -> None: layout.addWidget(self._video_label, 1) # Create overlay control buttons as direct children (true overlay) - self._crosshair_button = QPushButton("+", self) + self._crosshair_button = QPushButton("⌖", self) self._crosshair_button.setObjectName("CrosshairButton") self._crosshair_button.setCheckable(True) self._crosshair_button.setFixedSize(30, 30) @@ -146,6 +146,36 @@ def __init__(self, parent: QWidget | None = None) -> None: self._grid_button.move(10, 45) # Position below crosshair button (10 + 30 + 5) self._grid_button.raise_() # Ensure it's on top + # Create focus button with custom overlaid text + self._focus_button = QPushButton(self) + self._focus_button.setObjectName("FocusButton") + self._focus_button.setCheckable(True) + self._focus_button.setFixedSize(30, 30) + self._focus_button.setToolTip("Toggle Focus Overlay") + self._focus_button.clicked.connect(self._toggle_focus) + self._focus_button.move(10, 80) # Position below grid button (45 + 30 + 5) + + # Create labels for the overlaid symbols - each line separate + focus_top_corners = QLabel("⌜⌝", self._focus_button) + focus_top_corners.setObjectName("FocusOverlayLabel") + focus_top_corners.setAlignment(Qt.AlignmentFlag.AlignCenter) + focus_top_corners.setGeometry(0, -2, 30, 30) + focus_top_corners.setAttribute(Qt.WidgetAttribute.WA_TransparentForMouseEvents) + + focus_bottom_corners = QLabel("⌞⌟", self._focus_button) + focus_bottom_corners.setObjectName("FocusOverlayLabel") + focus_bottom_corners.setAlignment(Qt.AlignmentFlag.AlignCenter) + focus_bottom_corners.setGeometry(0, 2, 30, 30) + focus_bottom_corners.setAttribute(Qt.WidgetAttribute.WA_TransparentForMouseEvents) + + focus_center = QLabel("⌖", self._focus_button) + focus_center.setObjectName("FocusOverlayLabel") + focus_center.setAlignment(Qt.AlignmentFlag.AlignCenter) + focus_center.setGeometry(0, 0, 30, 30) + focus_center.setAttribute(Qt.WidgetAttribute.WA_TransparentForMouseEvents) + + self._focus_button.raise_() # Ensure it's on top + # Connect to camera manager signals self._connect_to_camera_manager() @@ -189,6 +219,11 @@ def _toggle_grid(self, checked: bool): self._video_label.update() # Trigger repaint info(f"Preview: Grid {'enabled' if checked else 'disabled'}") + @Slot(bool) + def _toggle_focus(self, checked: bool): + """Toggle focus overlay""" + info(f"Focus Overlay Toggled {'on' if checked else 'off'}") + @Slot(int, int) def _on_frame_ready(self, width: int, height: int): """Handle new frame available from camera manager""" From f6b513a7c15a11e957b5bae71f7cc2c7c1b7a64f Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Fri, 13 Feb 2026 15:16:05 -0900 Subject: [PATCH 42/46] renamed to FieldWeave --- .gitignore | 7 +++- README.md | 42 +++++++++---------- UI/main_window.py | 4 +- UI/settings/pages/about_settings.py | 18 ++++++++ UI/settings/settings_main.py | 2 + UI/widgets/toast_widget.py | 2 +- app_context.py | 33 +++++++-------- camera/cameras/base_camera.py | 6 +-- .../default_settings.yaml | 2 +- forgeConfig.py => fieldweaveConfig.py | 40 +++++++++--------- generic_config.py | 18 ++++---- logger.py | 6 +-- main.py | 4 +- printer/automated_controller.py | 8 ++-- printer/base_controller.py | 12 +++--- 15 files changed, 113 insertions(+), 91 deletions(-) create mode 100644 UI/settings/pages/about_settings.py rename config/{forge => fieldweave}/default_settings.yaml (52%) rename forgeConfig.py => fieldweaveConfig.py (71%) diff --git a/.gitignore b/.gitignore index 4424d7d..29b6c74 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,9 @@ ./input/ output/ +misc/test_img/ +misc/input/ + config/cameras/amscope/backups/ config/cameras/amscope/settings.yaml @@ -10,8 +13,8 @@ config/printers/Ender3/settings.yaml config/automation/backups/ config/automation/settings.yaml -config/forge/backups/ -config/forge/settings.yaml +config/fieldweave/backups/ +config/fieldweave/settings.yaml # Libraries focus-stack/ diff --git a/README.md b/README.md index aee529c..14adf98 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,11 @@ -# Forge - Low Cost Gigapixel Scanner +# FieldWeave - Low Cost Gigapixel Scanner [![Windows](https://custom-icon-badges.demolab.com/badge/Windows-0078D6?logo=windows11&logoColor=white)](#) [![Linux](https://img.shields.io/badge/Linux-FCC624?logo=linux&logoColor=black)](#) ![Python](https://img.shields.io/badge/Python-3.12-blue?logo=python&logoColor=white) -Forge is an opensource, gigapixel imaging system designed to scan tree core samples with high precision. Built upon a modified off the shelf 3D printer, it automates the imaging of multiple samples, producing high resolution images suitable for dendrochronology and related research. +FieldWeave is an opensource, gigapixel imaging system designed to scan tree core samples with high precision. Built upon a modified off the shelf 3D printer, it automates the imaging of multiple samples, producing high resolution images suitable for dendrochronology and related research. @@ -24,7 +24,7 @@ Forge is an opensource, gigapixel imaging system designed to scan tree core samp - A picture of Forge using a heavily moddeed Ender 3 3D printer. + A picture of FieldWeave using a heavily moddeed Ender 3 3D printer. @@ -34,31 +34,31 @@ Forge is an opensource, gigapixel imaging system designed to scan tree core samp - Forge on a heavily modded Ender 3 3D printer. - Forge's GUI + FieldWeave on a heavily modded Ender 3 3D printer. + FieldWeave's GUI - An example of a single image taken using Forge + An example of a single image taken using FieldWeave - A M2 Heatset insert tip imaged using Forge and then stitched using an external program. + A M2 Heatset insert tip imaged using FieldWeave and then stitched using an external program. The end of a tree core sample taken using a MU1000 HS camera - A M2 Heatset Insert Tip taken using a MU500 Camera. Typical Forge outputs for tree core samples are significantly larger. Click to view full resolution + A M2 Heatset Insert Tip taken using a MU500 Camera. Typical FieldWeave outputs for tree core samples are significantly larger. Click to view full resolution -## Forge for Reflected Light Microscopy +## FieldWeave for Reflected Light Microscopy > **Reflected Light Microscopy (alternate lens configuration)** - These images were captured using Forge with a different lens and illumination setup. The same capture pipeline was used, with image stacking and stitching currently performed using external tools. - Forge does not officially support high magnification imaging yet, but it is being worked on. See github issue [#45](https://github.com/AnthonyvW/FORGE/issues/45) + These images were captured using FieldWeave with a different lens and illumination setup. The same capture pipeline was used, with image stacking and stitching currently performed using external tools. + FieldWeave does not officially support high magnification imaging yet, but it is being worked on. See github issue [#45](https://github.com/AnthonyvW/FORGE/issues/45) @@ -109,7 +109,7 @@ Forge is an opensource, gigapixel imaging system designed to scan tree core samp ## Printer Modification -Before using Forge, your 3D printer must be modified to mount the camera system in place of the print head. +Before using FieldWeave, your 3D printer must be modified to mount the camera system in place of the print head. ### Required Printed Parts @@ -151,7 +151,7 @@ Before modifying your printer, you must 3D print the following components: - Screw on the imaging lens securely. 7. **Install Light** - Install the light you will be using with Forge. + Install the light you will be using with FieldWeave. > If using the Amscope ring light, place the light pads onto the metal tips of the screws that hold the light in place before putting the light on the lens. 8. **Plug Everything in** @@ -187,7 +187,7 @@ Prerequisites\. Ensure you have the latest version of python installed, and you 3\.2\. Move the downloaded zipped folder into 3rd_party_imports -4\. Configure the camera settings using `amscope_camera_configuration.yaml`. For now, you can copy settings from TRIM until I get around to properly implementing this functionality into Forge. +4\. Configure the camera settings using `amscope_camera_configuration.yaml`. For now, you can copy settings from TRIM until I get around to properly implementing this functionality into FieldWeave. 5\. Run the main application: @@ -197,7 +197,7 @@ Prerequisites\. Ensure you have the latest version of python installed, and you --- ## Confirmed Compatible Cameras -Forge supports USB cameras through a modular driver architecture. +FieldWeave supports USB cameras through a modular driver architecture. | Camera Model | Notes | |-------------------------|-----------------------------| @@ -211,7 +211,7 @@ Forge supports USB cameras through a modular driver architecture. ### Adding Support for New Cameras -Users are encouraged to contribute new camera interfaces by implementing the Forge camera interface and submitting them as pull requests. +Users are encouraged to contribute new camera interfaces by implementing the FieldWeave camera interface and submitting them as pull requests. If your camera is not currently supported or you would like to contribute an interfaces, please open an issue or submit a pull request. @@ -222,9 +222,9 @@ Alternatively, contributions of driver implementations with thorough documentati ## 3D Printer Compatibility -Forge is designed to run on 3D printers using **Marlin firmware**, which supports standard G-code over USB serial. Compatibility with other firmware types varies and may require additional configuration or is not currently supported. +FieldWeave is designed to run on 3D printers using **Marlin firmware**, which supports standard G-code over USB serial. Compatibility with other firmware types varies and may require additional configuration or is not currently supported. -> Not sure if your 3D printer will work? Plug your printer into your computer via USB, and then start Forge. If the printer homes then it is compatible with Forge. +> Not sure if your 3D printer will work? Plug your printer into your computer via USB, and then start FieldWeave. If the printer homes then it is compatible with FieldWeave. ## Confirmed Compatible Printers @@ -240,10 +240,10 @@ Forge is designed to run on 3D printers using **Marlin firmware**, which support | Printer / Firmware | Status | Reason | |----------------------------------|---------------|------------------------------------------------------------------------| | **Klipper-based printers** | ❓ Unverified | Serial responses (e.g., `ok`, `M400`) may differ. Needs testing. | -| **RepRapFirmware (e.g., Duet)** | ❌ Incompatible | Different G-code syntax; not supported by Forge | +| **RepRapFirmware (e.g., Duet)** | ❌ Incompatible | Different G-code syntax; not supported by FieldWeave | | **Sailfish Firmware (e.g., FlashForge)** | ❌ Incompatible | Proprietary, non-standard G-code | | **Proprietary OEM firmware** | ❌ Incompatible | Often locked or limited (e.g., XYZprinting); lacks serial G-code input | -| **Non-G-code motion platforms** | ❌ Incompatible | Forge requires G-code over USB for motion control | +| **Non-G-code motion platforms** | ❌ Incompatible | FieldWeave requires G-code over USB for motion control | > Want to help verify compatibility with other printers, firmware, or cameras? > [Open an issue](https://github.com/AnthonyvW/FORGE/issues) with your setup details and test results! diff --git a/UI/main_window.py b/UI/main_window.py index fbe1fd0..2ca656a 100644 --- a/UI/main_window.py +++ b/UI/main_window.py @@ -34,9 +34,9 @@ def __init__(self) -> None: self.app_context.register_main_window(self) # Set window title with version - self.setWindowTitle(f"Forge - v{self.app_context.current_version}") + self.setWindowTitle(f"FieldWeave - v{self.app_context.current_version}") self.resize(1920, 1080) - + self.move(500,200) # Move window to a more convenient position. self._state = State() # Create and register settings dialog diff --git a/UI/settings/pages/about_settings.py b/UI/settings/pages/about_settings.py new file mode 100644 index 0000000..a29559c --- /dev/null +++ b/UI/settings/pages/about_settings.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from PySide6.QtWidgets import ( + QVBoxLayout, + QWidget, + QFormLayout, + QGroupBox, +) + +def about_page() ->QWidget: + w = QWidget() + layout = QVBoxLayout(w) + + top = QGroupBox("About FieldWeave") + form = QFormLayout(top) + layout.addWidget(top) + + return w \ No newline at end of file diff --git a/UI/settings/settings_main.py b/UI/settings/settings_main.py index 219ee6f..a5f7054 100644 --- a/UI/settings/settings_main.py +++ b/UI/settings/settings_main.py @@ -24,6 +24,7 @@ from .pages.automation_settings import automation_page from .pages.machine_vision_settings import machine_vision_page from .pages.navigation_settings import navigation_page +from .pages.about_settings import about_page class SettingsButton(QToolButton): def __init__(self, tooltip: str = "Settings", parent: QWidget | None = None)-> None: @@ -111,6 +112,7 @@ def __init__(self, parent: QWidget | None = None) -> None: self._add_page("Navigation", navigation_page()) self._add_page("Automation", automation_page()) self._add_page("Machine Vision", machine_vision_page()) + self._add_page("About FieldWeave", about_page()) self.sidebar.itemClicked.connect(self._on_tree_item_clicked) diff --git a/UI/widgets/toast_widget.py b/UI/widgets/toast_widget.py index 1ef03a8..ef9b281 100644 --- a/UI/widgets/toast_widget.py +++ b/UI/widgets/toast_widget.py @@ -1,5 +1,5 @@ """ -Toast notification widget for Forge microscope application. +Toast notification widget for FieldWeave microscope application. Provides temporary, color-coded notifications that stack and auto-dismiss. Integrates with the logging system for consistent message handling. diff --git a/app_context.py b/app_context.py index 393f287..9bf8a0e 100644 --- a/app_context.py +++ b/app_context.py @@ -9,14 +9,13 @@ from camera.camera_manager import CameraManager from camera.cameras.base_camera import BaseCamera from logger import info, error, warning, debug -from forgeConfig import ForgeSettingsManager, ForgeSettings +from fieldweaveConfig import FieldWeaveSettingsManager, FieldWeaveSettings if TYPE_CHECKING: from UI.settings.settings_main import SettingsDialog from UI.widgets.toast_widget import ToastManager -# Current Forge version - update this when releasing new versions -FORGE_VERSION = "1.2" +FIELDWEAVE_VERSION = "1.2" class AppContext: @@ -37,8 +36,8 @@ def __init__(self): self._camera_manager: CameraManager | None = None self._settings_dialog: SettingsDialog | None = None - self._settings_manager: ForgeSettingsManager | None = None - self._settings: ForgeSettings | None = None + self._settings_manager: FieldWeaveSettingsManager | None = None + self._settings: FieldWeaveSettings | None = None self._toast_manager: ToastManager | None = None self._main_window = None self._initialized = True @@ -77,13 +76,13 @@ def has_camera(self) -> bool: return self.camera is not None @property - def settings(self) -> ForgeSettings | None: - """Get the Forge settings""" + def settings(self) -> FieldWeaveSettings | None: + """Get the FieldWeave settings""" return self._settings @property - def settings_manager(self) -> ForgeSettingsManager | None: - """Get the Forge settings manager""" + def settings_manager(self) -> FieldWeaveSettingsManager | None: + """Get the FieldWeave settings manager""" return self._settings_manager @property @@ -98,8 +97,8 @@ def toast(self) -> ToastManager | None: @property def current_version(self) -> str: - """Get the current Forge version""" - return FORGE_VERSION + """Get the current FieldWeave version""" + return FIELDWEAVE_VERSION def register_main_window(self, window): """Register the main window instance""" @@ -127,22 +126,22 @@ def open_settings(self, category: str): self._settings_dialog.activateWindow() def _load_settings(self): - """Load Forge application settings""" + """Load FieldWeave application settings""" try: - self._settings_manager = ForgeSettingsManager() + self._settings_manager = FieldWeaveSettingsManager() self._settings = self._settings_manager.load() - info(f"Forge settings loaded - running v{FORGE_VERSION}") + info(f"FieldWeave settings loaded - running v{FIELDWEAVE_VERSION}") # Check if we should show patch notes if self._settings.show_patchnotes: info("New version detected - patch notes should be displayed") except Exception as e: - error(f"Failed to load Forge settings: {e}") + error(f"Failed to load FieldWeave settings: {e}") # Create default settings if loading fails - self._settings = ForgeSettings() - warning("Using default Forge settings") + self._settings = FieldWeaveSettings() + warning("Using default FieldWeave settings") def _initialize_camera_manager(self): """Initialize the camera manager and open first available camera""" diff --git a/camera/cameras/base_camera.py b/camera/cameras/base_camera.py index 9540c5c..82471e8 100644 --- a/camera/cameras/base_camera.py +++ b/camera/cameras/base_camera.py @@ -556,7 +556,7 @@ def _save_tiff_with_metadata( # Add software information from app_context import get_app_context - exif[base_tags['Software']] = f"Forge - v{get_app_context().settings.version}" + exif[base_tags['Software']] = f"FieldWeave - v{get_app_context().settings.version}" # Add timestamp timestamp = metadata.get("timestamp", datetime.now().isoformat()) @@ -621,7 +621,7 @@ def _save_jpeg_with_metadata( # Add software information from app_context import get_app_context - exif[base_tags['Software']] = f"Forge - v{get_app_context().settings.version}" + exif[base_tags['Software']] = f"FieldWeave - v{get_app_context().settings.version}" # Add timestamp timestamp = metadata.get("timestamp", datetime.now().isoformat()) @@ -678,7 +678,7 @@ def _save_png_with_metadata( # Add software info from app_context import get_app_context - pnginfo.add_text("Software", f"Forge - v{get_app_context().settings.version}") + pnginfo.add_text("Software", f"FieldWeave - v{get_app_context().settings.version}") pnginfo.add_text("Metadata", json.dumps(metadata, indent=2)) # Add individual camera settings as separate chunks diff --git a/config/forge/default_settings.yaml b/config/fieldweave/default_settings.yaml similarity index 52% rename from config/forge/default_settings.yaml rename to config/fieldweave/default_settings.yaml index 3afb528..39ad112 100644 --- a/config/forge/default_settings.yaml +++ b/config/fieldweave/default_settings.yaml @@ -1,3 +1,3 @@ -config_type: forge_settings +config_type: fieldweave_settings config_version: '1.2' version: '1.2' diff --git a/forgeConfig.py b/fieldweaveConfig.py similarity index 71% rename from forgeConfig.py rename to fieldweaveConfig.py index c721321..36cc9e8 100644 --- a/forgeConfig.py +++ b/fieldweaveConfig.py @@ -8,15 +8,15 @@ from logger import info @dataclass -class ForgeSettings: - """Forge application settings""" +class FieldWeaveSettings: + """FieldWeave application settings""" version: str = "1.2" # Version from last startup show_patchnotes: bool = False # Runtime flag - set when version changes, not saved def validate(self) -> None: """ - Validate Forge settings. + Validate FieldWeave settings. Raises: ValueError: If any setting is invalid @@ -25,9 +25,9 @@ def validate(self) -> None: raise ValueError("version must be a non-empty string") -class ForgeSettingsManager(ConfigManager[ForgeSettings]): +class FieldWeaveSettingsManager(ConfigManager[FieldWeaveSettings]): """ - Configuration manager for Forge application settings. + Configuration manager for FieldWeave application settings. When a version mismatch is detected during load, the migration updates the stored version and sets show_patchnotes flag. @@ -36,13 +36,13 @@ class ForgeSettingsManager(ConfigManager[ForgeSettings]): def __init__( self, *, - root_dir: Union[str, Path] = "./config/forge", + root_dir: Union[str, Path] = "./config/fieldweave", default_filename: str = "default_settings.yaml", backup_dirname: str = "backups", backup_keep: int = 5, ) -> None: super().__init__( - config_type="forge_settings", + config_type="fieldweave_settings", root_dir=root_dir, default_filename=default_filename, backup_dirname=backup_dirname, @@ -56,7 +56,7 @@ def migrate( to_version: str ) -> dict[str, Any]: """ - Migrate Forge settings and update version. + Migrate FieldWeave settings and update version. When version changes: 1. Updates the stored version to current @@ -65,12 +65,12 @@ def migrate( Args: data: Dictionary containing settings data from_version: Version from the file - to_version: Current Forge version + to_version: Current FieldWeave version Returns: Migrated dictionary with updated version """ - info(f"Forge version changed: {from_version} -> {to_version}") + info(f"FieldWeave version changed: {from_version} -> {to_version}") # Update version to current data["version"] = to_version @@ -79,9 +79,9 @@ def migrate( return data - def from_dict(self, data: dict[str, Any]) -> ForgeSettings: + def from_dict(self, data: dict[str, Any]) -> FieldWeaveSettings: """ - Convert dictionary to ForgeSettings object. + Convert dictionary to FieldWeaveSettings object. Sets show_patchnotes flag if migration occurred. @@ -89,19 +89,19 @@ def from_dict(self, data: dict[str, Any]) -> ForgeSettings: data: Dictionary containing settings data Returns: - ForgeSettings instance with show_patchnotes set if needed + FieldWeaveSettings instance with show_patchnotes set if needed """ # Handle empty dict (fresh instance) if not data: - settings = ForgeSettings() + settings = FieldWeaveSettings() else: - # Extract only valid fields for ForgeSettings + # Extract only valid fields for FieldWeaveSettings valid_fields = {"version"} filtered_data = {k: v for k, v in data.items() if k in valid_fields} - settings = ForgeSettings(**filtered_data) + settings = FieldWeaveSettings(**filtered_data) # If migration happened, set the show_patchnotes flag - if settings.version != self.get_forge_version(): + if settings.version != self.get_fieldweave_version(): settings.show_patchnotes = True info("Patch notes flag set - new version detected") @@ -110,14 +110,14 @@ def from_dict(self, data: dict[str, Any]) -> ForgeSettings: return settings - def to_dict(self, settings: ForgeSettings) -> dict[str, Any]: + def to_dict(self, settings: FieldWeaveSettings) -> dict[str, Any]: """ - Convert ForgeSettings object to dictionary. + Convert FieldWeaveSettings object to dictionary. Only includes fields that should be saved (excludes show_patchnotes). Args: - settings: ForgeSettings instance to convert + settings: FieldWeaveSettings instance to convert Returns: Dictionary representation diff --git a/generic_config.py b/generic_config.py index ab70090..00bd7d7 100644 --- a/generic_config.py +++ b/generic_config.py @@ -32,7 +32,7 @@ class ConfigManager(Generic[S], ABC): All config files include metadata fields: - config_type: Identifies which config loader to use - - config_version: The Forge version that created/last modified this config + - config_version: The FieldWeave version that created/last modified this config Directory structure: root_dir/ @@ -87,7 +87,7 @@ def __init__( Initialize the config manager. Args: - config_type: Identifier for this config type (e.g., "camera_settings", "forge_settings") + config_type: Identifier for this config type (e.g., "camera_settings", "fieldweave_settings") root_dir: Directory for config files (settings, defaults, backups) default_filename: Name for the defaults file backup_dirname: Name for the backups subdirectory @@ -171,17 +171,17 @@ def migrate( Args: data: Dictionary containing the config data (without metadata fields) from_version: Version the config was created with - to_version: Current Forge version + to_version: Current FieldWeave version Returns: Migrated dictionary (or original if no migration needed) """ return data - def get_forge_version(self) -> str: - """Get the current Forge version.""" - from app_context import FORGE_VERSION - return FORGE_VERSION + def get_fieldweave_version(self) -> str: + """Get the current FieldWeave version.""" + from app_context import FIELDWEAVE_VERSION + return FIELDWEAVE_VERSION def active_path(self) -> Path: """Return path to the active settings file.""" @@ -209,7 +209,7 @@ def _add_metadata(self, data: dict[str, Any]) -> dict[str, Any]: """ return { "config_type": self.config_type, - "config_version": self.get_forge_version(), + "config_version": self.get_fieldweave_version(), **data, } @@ -240,7 +240,7 @@ def _extract_metadata( # Handle migration if version mismatch if config_version is not None: - current_version = self.get_forge_version() + current_version = self.get_fieldweave_version() if config_version != current_version and current_version != "unknown": info( diff --git a/logger.py b/logger.py index c5830d0..639ae40 100644 --- a/logger.py +++ b/logger.py @@ -30,7 +30,7 @@ def __init__(self): return self._log_callbacks: list[Callable[[str, str], None]] = [] - self._logger = logging.getLogger('ForgeApp') + self._logger = logging.getLogger('FieldWeaveApp') self._logger.setLevel(logging.DEBUG) # Default log directory @@ -48,7 +48,7 @@ def __init__(self): def _setup_file_handler(self): """Setup rotating file handler""" timestamp = datetime.now().strftime('%Y%m%d_%H%M%S') - log_file = self._log_dir / f"Forge_{timestamp}.log" + log_file = self._log_dir / f"FieldWeave_{timestamp}.log" # Store current log file path self._current_log_file = log_file @@ -83,7 +83,7 @@ def _cleanup_old_logs(self): try: # Get all base log files (without .1, .2, etc extensions) log_files = sorted( - [f for f in self._log_dir.glob("Forge_*.log") + [f for f in self._log_dir.glob("FieldWeave_*.log") if not f.stem.split('.')[-1].isdigit()], key=lambda x: x.stat().st_mtime, reverse=True diff --git a/main.py b/main.py index 636f892..b4a7d15 100644 --- a/main.py +++ b/main.py @@ -21,7 +21,7 @@ # Initialize app context (this will load camera SDK and config) ctx = get_app_context() - info("Forge application starting") + info("FieldWeave application starting") win = MainWindow() win.show() @@ -29,7 +29,7 @@ exit_code = app.exec() # Cleanup - info("Forge application shutting down") + info("FieldWeave application shutting down") ctx.cleanup() sys.exit(exit_code) diff --git a/printer/automated_controller.py b/printer/automated_controller.py index c5276d3..239cb28 100644 --- a/printer/automated_controller.py +++ b/printer/automated_controller.py @@ -14,8 +14,8 @@ from UI.input.toggle_button import ToggleButton from UI.input.text_field import TextField -from forgeConfig import ( - ForgeSettings, +from fieldweaveConfig import ( + FieldWeaveSettings, ) from .automation_config import ( AutomationSettings, @@ -32,8 +32,8 @@ class AutomatedPrinter(CameraCalibrationMixin, AutofocusMixin, BasePrinterController): """Extended printer controller with automation capabilities""" AUTOMATION_CONFIG_SUBDIR = "" - def __init__(self, forgeConfig: ForgeSettings, camera): - super().__init__(forgeConfig) + def __init__(self, fieldweaveConfig: FieldWeaveSettings, camera): + super().__init__(fieldweaveConfig) AutomationSettingsManager.scope_dir(self.AUTOMATION_CONFIG_SUBDIR) self.automation_settings: AutomationSettings = AutomationSettingsManager.load(self.AUTOMATION_CONFIG_SUBDIR) diff --git a/printer/base_controller.py b/printer/base_controller.py index f53a414..33f4c8c 100644 --- a/printer/base_controller.py +++ b/printer/base_controller.py @@ -15,8 +15,8 @@ PrinterSettings, PrinterSettingsManager ) -from forgeConfig import ( - ForgeSettings, +from fieldweaveConfig import ( + FieldWeaveSettings, ) def _probe_port(port_device, baud, indicators, request=b"M115\r\n", read_window_s=10, min_lines=3): @@ -98,7 +98,7 @@ class command: class BasePrinterController: CONFIG_SUBDIR = "Ender3" """Base class for 3D printer control""" - def __init__(self, forgeConfig: ForgeSettings): + def __init__(self, fieldweaveConfig: FieldWeaveSettings): self.config = PrinterSettings() PrinterSettingsManager.scope_dir(self.CONFIG_SUBDIR) self.config = PrinterSettingsManager.load(self.CONFIG_SUBDIR) @@ -127,13 +127,13 @@ def __init__(self, forgeConfig: ForgeSettings): # Initialize serial connection - self._initialize_printer(forgeConfig) + self._initialize_printer(fieldweaveConfig) # Start command processing thread self._processing_thread = threading.Thread(target=self._process_commands, daemon=True) self._processing_thread.start() - def _initialize_printer(self, forgeConfig): + def _initialize_printer(self, fieldweaveConfig): """Initialize printer serial connection""" baud = self.config.baud_rate indicators = getattr(self.config, "valid_response_indicators", None) or [ @@ -146,7 +146,7 @@ def _initialize_printer(self, forgeConfig): raise RuntimeError("No serial ports found. Is the printer connected?") preferred = [] - cfg_port = getattr(forgeConfig, "serial_port", None) + cfg_port = getattr(fieldweaveConfig, "serial_port", None) if cfg_port: preferred = [cfg_port] remaining = [p for p in detected if p not in set(preferred)] From deeeffd8ce4b3865f0d7b3399f1098e7b5636455 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Sat, 14 Feb 2026 01:12:38 -0900 Subject: [PATCH 43/46] made setting objects more available --- UI/settings/pages/camera_settings.py | 59 +++++++++++++--------------- camera/settings/camera_settings.py | 53 +------------------------ common/setting_types.py | 55 ++++++++++++++++++++++++++ 3 files changed, 84 insertions(+), 83 deletions(-) create mode 100644 common/setting_types.py diff --git a/UI/settings/pages/camera_settings.py b/UI/settings/pages/camera_settings.py index db626e4..b86f6e7 100644 --- a/UI/settings/pages/camera_settings.py +++ b/UI/settings/pages/camera_settings.py @@ -24,6 +24,7 @@ from app_context import get_app_context from logger import info, error, warning, debug +from common.setting_types import SettingMetadata # Interval (ms) between live-value polls for hardware-controlled fields. _LIVE_POLL_INTERVAL_MS = 500 @@ -365,12 +366,12 @@ def _show_error_message(self, error_msg: str) -> None: label.setStyleSheet("color: red; padding: 20px;") self.settings_layout.addWidget(label) - def _group_settings(self, metadata_list: list) -> dict[str, list]: + def _group_settings(self, metadata_list: list[SettingMetadata]) -> dict[str, list[SettingMetadata]]: """Group settings by their group property""" - grouped = {} + grouped: dict[str, list[SettingMetadata]] = {} for meta in metadata_list: - group = meta.group if hasattr(meta, 'group') and meta.group else "General" + group = meta.group # Always present with default "General" if group not in grouped: grouped[group] = [] @@ -379,7 +380,7 @@ def _group_settings(self, metadata_list: list) -> dict[str, list]: return grouped - def _create_settings_group(self, group_name: str, settings_list: list) -> QGroupBox: + def _create_settings_group(self, group_name: str, settings_list: list[SettingMetadata]) -> QGroupBox: """Create a group box for a category of settings""" group_box = QGroupBox(group_name) layout = QFormLayout(group_box) @@ -389,7 +390,7 @@ def _create_settings_group(self, group_name: str, settings_list: list) -> QGroup if widget: # Create label with tooltip label = QLabel(setting_meta.display_name + ":") - if hasattr(setting_meta, 'description') and setting_meta.description: + if setting_meta.description: label.setToolTip(setting_meta.description) layout.addRow(label, widget) @@ -403,22 +404,16 @@ def _create_settings_group(self, group_name: str, settings_list: list) -> QGroup return group_box - def _create_setting_widget(self, meta) -> QWidget | None: + def _create_setting_widget(self, meta: SettingMetadata) -> QWidget | None: """Create appropriate widget for a setting based on its metadata""" camera = self.ctx.camera if not camera: return None settings = camera.settings - setting_type = meta.setting_type if hasattr(meta, 'setting_type') else None - - # Handle both enum and string values - if setting_type is None: - warning(f"No setting type for {meta.name}") - return None # Convert enum to string value if needed - type_str = setting_type.value if hasattr(setting_type, 'value') else str(setting_type) + type_str = meta.setting_type.value if hasattr(meta.setting_type, 'value') else str(meta.setting_type) # Create widget based on type if type_str == "bool": @@ -443,7 +438,7 @@ def _create_setting_widget(self, meta) -> QWidget | None: warning(f"Unknown setting type: {type_str} for {meta.name}") return None - def _create_bool_widget(self, meta, settings) -> QCheckBox | None: + def _create_bool_widget(self, meta: SettingMetadata, settings) -> QCheckBox | None: """Create checkbox for boolean settings""" # Check if setter exists first setter_name = f"set_{meta.name}" @@ -458,7 +453,7 @@ def _create_bool_widget(self, meta, settings) -> QCheckBox | None: checkbox.setChecked(current_value) # Set tooltip - if hasattr(meta, 'description') and meta.description: + if meta.description: checkbox.setToolTip(meta.description) # Connect to setter @@ -468,7 +463,7 @@ def _create_bool_widget(self, meta, settings) -> QCheckBox | None: return checkbox - def _create_range_widget(self, meta, settings) -> QWidget | None: + def _create_range_widget(self, meta: SettingMetadata, settings) -> QWidget | None: """Create slider with value display for range settings""" # Check if setter exists first setter_name = f"set_{meta.name}" @@ -481,7 +476,7 @@ def _create_range_widget(self, meta, settings) -> QWidget | None: layout.setContentsMargins(0, 0, 0, 0) # Determine if we need float or int - is_float = hasattr(meta, 'min_value') and isinstance(meta.min_value, float) + is_float = meta.min_value is not None and isinstance(meta.min_value, float) if is_float: # Use double spin box for float values @@ -495,7 +490,7 @@ def _create_range_widget(self, meta, settings) -> QWidget | None: spinbox.setFixedWidth(90) # Set range - if hasattr(meta, 'min_value') and hasattr(meta, 'max_value'): + if meta.min_value is not None and meta.max_value is not None: spinbox.setMinimum(meta.min_value) spinbox.setMaximum(meta.max_value) @@ -504,7 +499,7 @@ def _create_range_widget(self, meta, settings) -> QWidget | None: spinbox.setValue(current_value) # Set tooltip - if hasattr(meta, 'description') and meta.description: + if meta.description: spinbox.setToolTip(meta.description) # Create slider @@ -518,8 +513,8 @@ def _create_range_widget(self, meta, settings) -> QWidget | None: int((current_value - meta.min_value) / (meta.max_value - meta.min_value) * 1000) ) else: - slider.setMinimum(int(meta.min_value) if hasattr(meta, 'min_value') else 0) - slider.setMaximum(int(meta.max_value) if hasattr(meta, 'max_value') else 100) + slider.setMinimum(int(meta.min_value) if meta.min_value is not None else 0) + slider.setMaximum(int(meta.max_value) if meta.max_value is not None else 100) slider.setValue(int(current_value)) # Connect signals @@ -543,7 +538,7 @@ def _create_range_widget(self, meta, settings) -> QWidget | None: return container - def _create_dropdown_widget(self, meta, settings) -> QComboBox | None: + def _create_dropdown_widget(self, meta: SettingMetadata, settings) -> QComboBox | None: """Create dropdown for choice settings""" # Check if setter exists first setter_name = f"set_{meta.name}" @@ -554,7 +549,7 @@ def _create_dropdown_widget(self, meta, settings) -> QComboBox | None: combo = QComboBox() # Add choices - if hasattr(meta, 'choices') and meta.choices: + if meta.choices: for choice in meta.choices: combo.addItem(str(choice), choice) @@ -566,7 +561,7 @@ def _create_dropdown_widget(self, meta, settings) -> QComboBox | None: combo.setCurrentIndex(index) # Set tooltip - if hasattr(meta, 'description') and meta.description: + if meta.description: combo.setToolTip(meta.description) # Connect to setter @@ -576,7 +571,7 @@ def _create_dropdown_widget(self, meta, settings) -> QComboBox | None: return combo - def _create_rgba_level_widget(self, meta, settings) -> QWidget | None: + def _create_rgba_level_widget(self, meta: SettingMetadata, settings) -> QWidget | None: """Create RGBA level widget with four spinboxes for R, G, B, A""" # Check if setter exists first setter_name = f"set_{meta.name}" @@ -650,7 +645,7 @@ def on_rgba_changed(): layout.addStretch() return container - def _create_button_widget(self, meta, settings) -> QPushButton | None: + def _create_button_widget(self, meta: SettingMetadata, settings) -> QPushButton | None: """Create a button that calls a setter method without arguments""" setter_name = f"set_{meta.name}" if not hasattr(settings, setter_name): @@ -660,7 +655,7 @@ def _create_button_widget(self, meta, settings) -> QPushButton | None: button = QPushButton(meta.display_name) # Set tooltip - if hasattr(meta, 'description') and meta.description: + if meta.description: button.setToolTip(meta.description) # Connect to setter @@ -690,7 +685,7 @@ def on_button_clicked(): button.clicked.connect(on_button_clicked) return button - def _create_file_picker_button_widget(self, meta, settings) -> QPushButton | None: + def _create_file_picker_button_widget(self, meta: SettingMetadata, settings) -> QPushButton | None: """Create a file picker button that calls a setter method with a filepath""" setter_name = f"set_{meta.name}" if not hasattr(settings, setter_name): @@ -700,7 +695,7 @@ def _create_file_picker_button_widget(self, meta, settings) -> QPushButton | Non button = QPushButton(meta.display_name) # Set tooltip - if hasattr(meta, 'description') and meta.description: + if meta.description: button.setToolTip(meta.description) # Determine if this is an import or export button based on name @@ -785,7 +780,7 @@ def on_button_clicked(): button.clicked.connect(on_button_clicked) return button - def _create_number_picker_widget(self, meta, settings) -> QSpinBox | None: + def _create_number_picker_widget(self, meta: SettingMetadata, settings) -> QSpinBox | None: """Create a number picker (spinbox only, no slider)""" setter_name = f"set_{meta.name}" if not hasattr(settings, setter_name): @@ -796,7 +791,7 @@ def _create_number_picker_widget(self, meta, settings) -> QSpinBox | None: spinbox.setFixedWidth(90) # Set range - if hasattr(meta, 'min_value') and hasattr(meta, 'max_value'): + if meta.min_value is not None and meta.max_value is not None: spinbox.setMinimum(meta.min_value) spinbox.setMaximum(meta.max_value) @@ -805,7 +800,7 @@ def _create_number_picker_widget(self, meta, settings) -> QSpinBox | None: spinbox.setValue(current_value) # Set tooltip - if hasattr(meta, 'description') and meta.description: + if meta.description: spinbox.setToolTip(meta.description) # Connect to setter diff --git a/camera/settings/camera_settings.py b/camera/settings/camera_settings.py index 70efcda..edc0ea8 100644 --- a/camera/settings/camera_settings.py +++ b/camera/settings/camera_settings.py @@ -9,60 +9,11 @@ from generic_config import ConfigManager from logger import info, debug, exception, error +from common.setting_types import FileFormat, RGBALevel, SettingType, SettingMetadata + if TYPE_CHECKING: from camera.cameras.base_camera import BaseCamera, CameraResolution - -class FileFormat(str, Enum): - PNG = 'png' - TIFF = 'tiff' - JPEG = 'jpeg' - - -class RGBALevel(NamedTuple): - r: int - g: int - b: int - a: int - - def validate(self) -> None: - for name, value in [('r', self.r), ('g', self.g), ('b', self.b), ('a', self.a)]: - if not (0 <= value <= 255): - raise ValueError(f"RGBALevel.{name} must be in range [0, 255], got {value}") - - -class SettingType(str, Enum): - BOOL = "bool" - RANGE = "range" - DROPDOWN = "dropdown" - RGBA_LEVEL = "rgba_level" - BUTTON = "button" - FILE_PICKER_BUTTON = "file_picker_button" - NUMBER_PICKER = "number_picker" - - -@dataclass -class SettingMetadata: - name: str - display_name: str - setting_type: SettingType - description: str = "" - min_value: int | None = None - max_value: int | None = None - choices: list[str] | None = None - group: str = "General" - runtime_changeable: bool = True - # When set, this field is greyed out (and, for live-value fields, polled from - # hardware) while the named boolean setting equals *controlled_when*. - # - # controlled_when=True (default): grey out while the controller is ON. - # Example: exposure_time / gain are greyed while auto_exposure is True. - # controlled_when=False: grey out while the controller is OFF. - # Example: exposure target is greyed while auto_exposure is False. - controlled_by: str | None = None - controlled_when: bool = True - - @dataclass class CameraSettings(ABC): """ diff --git a/common/setting_types.py b/common/setting_types.py new file mode 100644 index 0000000..56be4b5 --- /dev/null +++ b/common/setting_types.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +from dataclasses import dataclass + +from typing import NamedTuple +from enum import Enum + +class FileFormat(str, Enum): + PNG = 'png' + TIFF = 'tiff' + JPEG = 'jpeg' + + +class RGBALevel(NamedTuple): + r: int + g: int + b: int + a: int + + def validate(self) -> None: + for name, value in [('r', self.r), ('g', self.g), ('b', self.b), ('a', self.a)]: + if not (0 <= value <= 255): + raise ValueError(f"RGBALevel.{name} must be in range [0, 255], got {value}") + + +class SettingType(str, Enum): + BOOL = "bool" + RANGE = "range" + DROPDOWN = "dropdown" + RGBA_LEVEL = "rgba_level" + BUTTON = "button" + FILE_PICKER_BUTTON = "file_picker_button" + NUMBER_PICKER = "number_picker" + + +@dataclass +class SettingMetadata: + name: str + display_name: str + setting_type: SettingType + description: str = "" + min_value: int | None = None + max_value: int | None = None + choices: list[str] | None = None + group: str = "General" + runtime_changeable: bool = True + # When set, this field is greyed out (and, for live-value fields, polled from + # hardware) while the named boolean setting equals *controlled_when*. + # + # controlled_when=True (default): grey out while the controller is ON. + # Example: exposure_time / gain are greyed while auto_exposure is True. + # controlled_when=False: grey out while the controller is OFF. + # Example: exposure target is greyed while auto_exposure is False. + controlled_by: str | None = None + controlled_when: bool = True \ No newline at end of file From 88e0a923d59ba3c20867cb6bd42bcdebc53bc16f Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Sun, 15 Feb 2026 17:27:01 -0900 Subject: [PATCH 44/46] changed setting close prompt --- UI/settings/pages/camera_settings.py | 55 ++++++++++++++++++++++++++-- UI/settings/settings_main.py | 53 ++++++++++++++++++++++----- color.py => misc/color.py | 0 3 files changed, 94 insertions(+), 14 deletions(-) rename color.py => misc/color.py (100%) diff --git a/UI/settings/pages/camera_settings.py b/UI/settings/pages/camera_settings.py index b86f6e7..d485e0b 100644 --- a/UI/settings/pages/camera_settings.py +++ b/UI/settings/pages/camera_settings.py @@ -46,6 +46,7 @@ def __init__(self, parent_dialog=None, parent: QWidget | None = None) -> None: self._updating_from_camera = False self._modified_settings: set[str] = set() # Track which settings have been modified self._saved_values: dict[str, any] = {} # Store saved values for comparison + self._default_values: dict[str, any] = {} # Store default values for reset self._group_names: list[str] = [] # Track group names in order self._group_widgets: dict[str, QGroupBox] = {} # Map group names to widgets @@ -269,12 +270,16 @@ def _refresh_settings_display(self) -> None: settings = camera.settings metadata_list = settings.get_metadata() - # Store current values as "saved" baseline + # Store current values as "saved" baseline and also as defaults self._saved_values.clear() + self._default_values.clear() self._modified_settings.clear() for meta in metadata_list: current_value = getattr(settings, meta.name, None) self._saved_values[meta.name] = current_value + # Only set default values if not already set (preserve first load) + if meta.name not in self._default_values: + self._default_values[meta.name] = current_value # Build controlled-field index from metadata for meta in metadata_list: @@ -1371,6 +1376,12 @@ def _reset_settings(self) -> None: warning("No camera to reset settings on") return + # Check if we have default values stored + if not self._default_values: + warning("No default values stored - cannot reset") + self.ctx.toast.warning("No default values available to reset to", duration=3000) + return + # Confirm reset with user reply = QMessageBox.question( self, @@ -1384,19 +1395,55 @@ def _reset_settings(self) -> None: return try: - # Refresh from camera hardware (gets defaults) settings = camera.settings - settings.refresh_from_camera(camera.underlying_camera) + metadata_list = settings.get_metadata() + + # Create a lookup dict for metadata + meta_dict = {meta.name: meta for meta in metadata_list} + + # Reset each setting to its default value + for field_name, default_value in self._default_values.items(): + if default_value is None: + continue + + meta = meta_dict.get(field_name) + if not meta: + continue + + try: + setter_name = f"set_{field_name}" + if hasattr(settings, setter_name): + setter = getattr(settings, setter_name) + + # Convert enum to string value if needed + type_str = meta.setting_type.value if hasattr(meta.setting_type, 'value') else str(meta.setting_type) + + # Handle dropdown settings that need index and value + if type_str == "dropdown" and hasattr(meta, 'choices') and meta.choices: + # Find the index of the default value + try: + default_index = meta.choices.index(default_value) + setter(index=default_index, value=default_value) + except (ValueError, AttributeError): + setter(default_value) + else: + setter(default_value) + debug(f"Reset {field_name} to default: {default_value}") + except Exception as e: + warning(f"Could not reset {field_name}: {e}") info("Camera settings reset to defaults") + # Clear modification markers + self._clear_all_modifications() + # Refresh the display self._refresh_settings_display() self.ctx.toast.info("Settings reset to defaults", duration=2000) except Exception as e: error(f"Error resetting camera settings: {e}") - self.ctx.toast.info(f"Error resetting settings: {e}", duration=3000) + self.ctx.toast.error(f"Error resetting settings: {e}", duration=3000) def camera_page(parent_dialog=None) -> QWidget: diff --git a/UI/settings/settings_main.py b/UI/settings/settings_main.py index a5f7054..3f67d34 100644 --- a/UI/settings/settings_main.py +++ b/UI/settings/settings_main.py @@ -1,6 +1,7 @@ from __future__ import annotations from PySide6.QtCore import Qt, Signal, Slot +from PySide6.QtGui import QCloseEvent from PySide6.QtWidgets import ( QHBoxLayout, QLabel, @@ -205,23 +206,55 @@ def _on_tree_item_clicked(self, item: QTreeWidgetItem, column: int) -> None: def _on_close_clicked(self) -> None: """Handle close button click with confirmation if settings modified""" + if self._handle_close_with_unsaved_changes(): + self.reject() + + def _handle_close_with_unsaved_changes(self) -> bool: + """Handle closing with unsaved changes. Returns True if close should proceed.""" # Check if camera page has unsaved changes camera_widget = self._page_widgets.get("Camera") if camera_widget and hasattr(camera_widget, 'has_unsaved_changes') and camera_widget.has_unsaved_changes(): - reply = QMessageBox.question( - self, - "Unsaved Changes", - "You have unsaved camera settings. Do you want to save them?", - QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No | QMessageBox.StandardButton.Cancel - ) + # Create custom message box with Yes, Reset to Defaults, No, Cancel buttons + msg_box = QMessageBox(self) + msg_box.setWindowTitle("Unsaved Changes") + msg_box.setText("You have unsaved camera settings.") + msg_box.setInformativeText("Would you like to save your settings?") + msg_box.setIcon(QMessageBox.Icon.Question) + + # Add custom buttons + yes_btn = msg_box.addButton("Yes", QMessageBox.ButtonRole.YesRole) + reset_btn = msg_box.addButton("Reset to Defaults", QMessageBox.ButtonRole.DestructiveRole) + no_btn = msg_box.addButton("No", QMessageBox.ButtonRole.NoRole) + cancel_btn = msg_box.addButton("Cancel", QMessageBox.ButtonRole.RejectRole) + + msg_box.setDefaultButton(cancel_btn) - if reply == QMessageBox.StandardButton.Cancel: - return # Don't close - elif reply == QMessageBox.StandardButton.Yes: + msg_box.exec() + clicked = msg_box.clickedButton() + + if clicked == cancel_btn: + return False # Don't close + elif clicked == yes_btn: # Save settings before closing self.save_camera_settings.emit() + return True + elif clicked == reset_btn: + # Reset to defaults + if hasattr(camera_widget, '_reset_settings'): + camera_widget._reset_settings() + return True + elif clicked == no_btn: + # Don't save, just close + return True - self.reject() + return True # No unsaved changes, proceed with close + + def closeEvent(self, event: QCloseEvent) -> None: + """Handle window close event (X button)""" + if self._handle_close_with_unsaved_changes(): + event.accept() + else: + event.ignore() def _add_page(self, name: str, page: QWidget) -> None: """Add a page and create tree items for it and its groups""" diff --git a/color.py b/misc/color.py similarity index 100% rename from color.py rename to misc/color.py From 514e2b2f20ba6964097202c21cad177529947212 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Sun, 15 Feb 2026 17:34:31 -0900 Subject: [PATCH 45/46] moved files to common folder --- UI/main_window.py | 2 +- UI/settings/pages/camera_settings.py | 4 +- UI/tabs/logs_tab.py | 2 +- UI/tabs/navigate_tab.py | 2 +- UI/widgets/camera_controls_widget.py | 4 +- UI/widgets/camera_preview.py | 4 +- camera/camera_enumerator.py | 2 +- camera/camera_manager.py | 2 +- camera/cameras/amscope_camera.py | 2 +- camera/cameras/base_camera.py | 8 +- camera/sdk_loaders/amscope_sdk_loader.py | 2 +- camera/settings/amscope_settings.py | 2 +- camera/settings/camera_settings.py | 4 +- camera/threaded_camera.py | 2 +- app_context.py => common/app_context.py | 4 +- .../fieldweaveConfig.py | 4 +- generic_config.py => common/generic_config.py | 4 +- logger.py => common/logger.py | 0 main.py | 5 +- misc/.gitignore | 3 + misc/usb_camera.py | 247 ++++++++++++++++++ printer/automated_controller.py | 2 +- printer/automation_config.py | 2 +- printer/base_controller.py | 2 +- printer/printerConfig.py | 2 +- 25 files changed, 283 insertions(+), 34 deletions(-) rename app_context.py => common/app_context.py (97%) rename fieldweaveConfig.py => common/fieldweaveConfig.py (98%) rename generic_config.py => common/generic_config.py (99%) rename logger.py => common/logger.py (100%) create mode 100644 misc/.gitignore create mode 100644 misc/usb_camera.py diff --git a/UI/main_window.py b/UI/main_window.py index 2ca656a..0eaf67b 100644 --- a/UI/main_window.py +++ b/UI/main_window.py @@ -20,7 +20,7 @@ from .state import State from .settings.settings_main import SettingsButton, SettingsDialog -from app_context import get_app_context +from common.app_context import get_app_context class MainWindow(QMainWindow): diff --git a/UI/settings/pages/camera_settings.py b/UI/settings/pages/camera_settings.py index d485e0b..6fec457 100644 --- a/UI/settings/pages/camera_settings.py +++ b/UI/settings/pages/camera_settings.py @@ -22,8 +22,8 @@ ) from PySide6.QtCore import Qt, Signal, Slot, QTimer -from app_context import get_app_context -from logger import info, error, warning, debug +from common.app_context import get_app_context +from common.logger import info, error, warning, debug from common.setting_types import SettingMetadata # Interval (ms) between live-value polls for hardware-controlled fields. diff --git a/UI/tabs/logs_tab.py b/UI/tabs/logs_tab.py index 1300660..604c5d3 100644 --- a/UI/tabs/logs_tab.py +++ b/UI/tabs/logs_tab.py @@ -15,7 +15,7 @@ QLabel, ) -from logger import get_logger +from common.logger import get_logger class LogsTab(QWidget): diff --git a/UI/tabs/navigate_tab.py b/UI/tabs/navigate_tab.py index 887cc0a..6df3ba8 100644 --- a/UI/tabs/navigate_tab.py +++ b/UI/tabs/navigate_tab.py @@ -14,7 +14,7 @@ from UI.widgets.camera_controls_widget import CameraControlsWidget from UI.widgets.navigation_widget import NavigationWidget -from app_context import open_settings +from common.app_context import open_settings class NavigateTab(CameraWithSidebarPage): def __init__(self, parent: QWidget | None = None) -> None: diff --git a/UI/widgets/camera_controls_widget.py b/UI/widgets/camera_controls_widget.py index a3027ae..a47265c 100644 --- a/UI/widgets/camera_controls_widget.py +++ b/UI/widgets/camera_controls_widget.py @@ -7,8 +7,8 @@ QPushButton, QLineEdit, QLabel, QFileDialog, QMessageBox, QComboBox ) from PySide6.QtCore import Slot, Signal -from logger import info, error, warning, debug -from app_context import get_app_context +from common.logger import info, error, warning, debug +from common.app_context import get_app_context class CameraControlsWidget(QWidget): diff --git a/UI/widgets/camera_preview.py b/UI/widgets/camera_preview.py index 985b5c4..f54e700 100644 --- a/UI/widgets/camera_preview.py +++ b/UI/widgets/camera_preview.py @@ -8,8 +8,8 @@ QPushButton, QHBoxLayout ) -from app_context import get_app_context -from logger import info, error, warning +from common.app_context import get_app_context +from common.logger import info, error, warning class OverlayLabel(QLabel): diff --git a/camera/camera_enumerator.py b/camera/camera_enumerator.py index b06e163..28fe511 100644 --- a/camera/camera_enumerator.py +++ b/camera/camera_enumerator.py @@ -9,7 +9,7 @@ from dataclasses import dataclass from enum import Enum from typing import Any -from logger import error, exception, debug +from common.logger import error, exception, debug from camera.cameras.amscope_camera import AmscopeCamera diff --git a/camera/camera_manager.py b/camera/camera_manager.py index abcb265..29830d8 100644 --- a/camera/camera_manager.py +++ b/camera/camera_manager.py @@ -19,7 +19,7 @@ AmscopeEnumerator, GenericUSBEnumerator ) -from logger import info, error, warning, exception, debug +from common.logger import info, error, warning, exception, debug class CameraManager(QObject): diff --git a/camera/cameras/amscope_camera.py b/camera/cameras/amscope_camera.py index 0b4e64d..bc74b8e 100644 --- a/camera/cameras/amscope_camera.py +++ b/camera/cameras/amscope_camera.py @@ -14,7 +14,7 @@ import gc from camera.cameras.base_camera import BaseCamera, CameraResolution -from logger import info, debug, error, exception, warning +from common.logger import info, debug, error, exception, warning from camera.settings.amscope_settings import AmscopeSettings # Module-level reference to the loaded SDK diff --git a/camera/cameras/base_camera.py b/camera/cameras/base_camera.py index 82471e8..2199321 100644 --- a/camera/cameras/base_camera.py +++ b/camera/cameras/base_camera.py @@ -14,7 +14,7 @@ from PIL import PngImagePlugin import json -from logger import info, debug, error, exception +from common.logger import info, debug, error, exception from camera.settings.camera_settings import CameraSettings, CameraSettingsManager @@ -555,7 +555,7 @@ def _save_tiff_with_metadata( exif = Exif() # Add software information - from app_context import get_app_context + from common.app_context import get_app_context exif[base_tags['Software']] = f"FieldWeave - v{get_app_context().settings.version}" # Add timestamp @@ -620,7 +620,7 @@ def _save_jpeg_with_metadata( exif = Exif() # Add software information - from app_context import get_app_context + from common.app_context import get_app_context exif[base_tags['Software']] = f"FieldWeave - v{get_app_context().settings.version}" # Add timestamp @@ -677,7 +677,7 @@ def _save_png_with_metadata( pnginfo = PngImagePlugin.PngInfo() # Add software info - from app_context import get_app_context + from common.app_context import get_app_context pnginfo.add_text("Software", f"FieldWeave - v{get_app_context().settings.version}") pnginfo.add_text("Metadata", json.dumps(metadata, indent=2)) diff --git a/camera/sdk_loaders/amscope_sdk_loader.py b/camera/sdk_loaders/amscope_sdk_loader.py index 2477196..988cc9b 100644 --- a/camera/sdk_loaders/amscope_sdk_loader.py +++ b/camera/sdk_loaders/amscope_sdk_loader.py @@ -16,7 +16,7 @@ from pathlib import Path from typing import Optional -from logger import get_logger +from common.logger import get_logger class AmscopeSdkLoader: """ diff --git a/camera/settings/amscope_settings.py b/camera/settings/amscope_settings.py index d7def72..acc5e00 100644 --- a/camera/settings/amscope_settings.py +++ b/camera/settings/amscope_settings.py @@ -11,7 +11,7 @@ RGBALevel, FileFormat, ) -from logger import info, error, exception, debug, warning +from common.logger import info, error, exception, debug, warning if TYPE_CHECKING: from camera.cameras.base_camera import BaseCamera, CameraResolution diff --git a/camera/settings/camera_settings.py b/camera/settings/camera_settings.py index edc0ea8..e044f03 100644 --- a/camera/settings/camera_settings.py +++ b/camera/settings/camera_settings.py @@ -6,8 +6,8 @@ from pathlib import Path from typing import Any, NamedTuple, TYPE_CHECKING -from generic_config import ConfigManager -from logger import info, debug, exception, error +from common.generic_config import ConfigManager +from common.logger import info, debug, exception, error from common.setting_types import FileFormat, RGBALevel, SettingType, SettingMetadata diff --git a/camera/threaded_camera.py b/camera/threaded_camera.py index 83485f7..013a094 100644 --- a/camera/threaded_camera.py +++ b/camera/threaded_camera.py @@ -11,7 +11,7 @@ from PySide6.QtCore import QObject, Signal from camera.cameras.base_camera import BaseCamera -from logger import info, error, warning, debug, exception +from common.logger import info, error, warning, debug, exception T = TypeVar('T', bound=BaseCamera) diff --git a/app_context.py b/common/app_context.py similarity index 97% rename from app_context.py rename to common/app_context.py index 9bf8a0e..b5fc5e1 100644 --- a/app_context.py +++ b/common/app_context.py @@ -8,8 +8,8 @@ from typing import TYPE_CHECKING from camera.camera_manager import CameraManager from camera.cameras.base_camera import BaseCamera -from logger import info, error, warning, debug -from fieldweaveConfig import FieldWeaveSettingsManager, FieldWeaveSettings +from common.logger import info, error, warning, debug +from common.fieldweaveConfig import FieldWeaveSettingsManager, FieldWeaveSettings if TYPE_CHECKING: from UI.settings.settings_main import SettingsDialog diff --git a/fieldweaveConfig.py b/common/fieldweaveConfig.py similarity index 98% rename from fieldweaveConfig.py rename to common/fieldweaveConfig.py index 36cc9e8..82c0530 100644 --- a/fieldweaveConfig.py +++ b/common/fieldweaveConfig.py @@ -4,8 +4,8 @@ from pathlib import Path from typing import Any, Union -from generic_config import ConfigManager -from logger import info +from common.generic_config import ConfigManager +from common.logger import info @dataclass class FieldWeaveSettings: diff --git a/generic_config.py b/common/generic_config.py similarity index 99% rename from generic_config.py rename to common/generic_config.py index 00bd7d7..196a8e1 100644 --- a/generic_config.py +++ b/common/generic_config.py @@ -9,7 +9,7 @@ import time import yaml -from logger import info, debug, error, warning +from common.logger import info, debug, error, warning # File/dir names are generic—usable for ANY config ACTIVE_FILENAME = "settings.yaml" @@ -180,7 +180,7 @@ def migrate( def get_fieldweave_version(self) -> str: """Get the current FieldWeave version.""" - from app_context import FIELDWEAVE_VERSION + from common.app_context import FIELDWEAVE_VERSION return FIELDWEAVE_VERSION def active_path(self) -> Path: diff --git a/logger.py b/common/logger.py similarity index 100% rename from logger.py rename to common/logger.py diff --git a/main.py b/main.py index b4a7d15..63d17da 100644 --- a/main.py +++ b/main.py @@ -8,8 +8,8 @@ from UI.style import apply_style # Initialize app context early -from app_context import get_app_context -from logger import info +from common.app_context import get_app_context +from common.logger import info if __name__ == "__main__": @@ -19,7 +19,6 @@ app = QApplication(sys.argv) apply_style(app) - # Initialize app context (this will load camera SDK and config) ctx = get_app_context() info("FieldWeave application starting") diff --git a/misc/.gitignore b/misc/.gitignore new file mode 100644 index 0000000..b51236f --- /dev/null +++ b/misc/.gitignore @@ -0,0 +1,3 @@ +/qttest/ +/calculateDPIError.py +/color_test.py \ No newline at end of file diff --git a/misc/usb_camera.py b/misc/usb_camera.py new file mode 100644 index 0000000..239453a --- /dev/null +++ b/misc/usb_camera.py @@ -0,0 +1,247 @@ +#!/usr/bin/env python3 +""" +Camera Detection Script +Detects all available cameras using OpenCV and displays their capabilities. +""" + +from __future__ import annotations + +import cv2 +import sys +import os + +try: + from cv2_enumerate_cameras import enumerate_cameras +except ImportError: + print("Error: cv2_enumerate_cameras is not installed") + print("Install it with: pip install opencv-camera-enumeration") + sys.exit(1) + + +def get_camera_properties(cap: cv2.VideoCapture, camera_info: dict) -> dict[str, float | str | int]: + """Get detailed properties of a camera.""" + vid = camera_info.get('vid') + pid = camera_info.get('pid') + + properties = { + 'Index': camera_info['index'], + 'Name': camera_info.get('name', 'Unknown'), + 'Path': camera_info.get('path', 'Unknown'), + 'VID': f"0x{vid:04X}" if vid is not None else "N/A", + 'PID': f"0x{pid:04X}" if pid is not None else "N/A", + 'Backend': camera_info.get('backend_name', 'Unknown'), + 'Width': cap.get(cv2.CAP_PROP_FRAME_WIDTH), + 'Height': cap.get(cv2.CAP_PROP_FRAME_HEIGHT), + 'FPS': cap.get(cv2.CAP_PROP_FPS), + 'Codec': int(cap.get(cv2.CAP_PROP_FOURCC)), + 'Brightness': cap.get(cv2.CAP_PROP_BRIGHTNESS), + 'Contrast': cap.get(cv2.CAP_PROP_CONTRAST), + 'Saturation': cap.get(cv2.CAP_PROP_SATURATION), + 'Hue': cap.get(cv2.CAP_PROP_HUE), + 'Gain': cap.get(cv2.CAP_PROP_GAIN), + 'Exposure': cap.get(cv2.CAP_PROP_EXPOSURE), + } + + # Convert FOURCC code to readable format + fourcc = properties['Codec'] + if fourcc > 0: + properties['Codec_String'] = "".join([chr((fourcc >> 8 * i) & 0xFF) for i in range(4)]) + else: + properties['Codec_String'] = 'Unknown' + + return properties + + +def test_resolutions(cap: cv2.VideoCapture) -> list[tuple[int, int]]: + """Test common resolutions to see which ones are supported.""" + common_resolutions = [ + (320, 240), + (640, 480), + (800, 600), + (1024, 768), + (1280, 720), + (1280, 1024), + (1920, 1080), + (2560, 1440), + (3840, 2160), + ] + + supported = [] + + for width, height in common_resolutions: + cap.set(cv2.CAP_PROP_FRAME_WIDTH, width) + cap.set(cv2.CAP_PROP_FRAME_HEIGHT, height) + + actual_width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH)) + actual_height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)) + + if actual_width == width and actual_height == height: + supported.append((width, height)) + + return supported + + +def get_backend_name(backend: int) -> str: + """Convert backend ID to name.""" + backend_names = { + cv2.CAP_ANY: "CAP_ANY", + cv2.CAP_VFW: "CAP_VFW", + cv2.CAP_V4L: "CAP_V4L", + cv2.CAP_V4L2: "CAP_V4L2", + cv2.CAP_FIREWIRE: "CAP_FIREWIRE", + cv2.CAP_FIREWARE: "CAP_FIREWARE", + cv2.CAP_IEEE1394: "CAP_IEEE1394", + cv2.CAP_DC1394: "CAP_DC1394", + cv2.CAP_CMU1394: "CAP_CMU1394", + cv2.CAP_DSHOW: "CAP_DSHOW", + cv2.CAP_PVAPI: "CAP_PVAPI", + cv2.CAP_OPENNI: "CAP_OPENNI", + cv2.CAP_OPENNI_ASUS: "CAP_OPENNI_ASUS", + cv2.CAP_ANDROID: "CAP_ANDROID", + cv2.CAP_XIAPI: "CAP_XIAPI", + cv2.CAP_AVFOUNDATION: "CAP_AVFOUNDATION", + cv2.CAP_GIGANETIX: "CAP_GIGANETIX", + cv2.CAP_MSMF: "CAP_MSMF", + cv2.CAP_WINRT: "CAP_WINRT", + cv2.CAP_INTELPERC: "CAP_INTELPERC", + cv2.CAP_OPENNI2: "CAP_OPENNI2", + cv2.CAP_OPENNI2_ASUS: "CAP_OPENNI2_ASUS", + cv2.CAP_GPHOTO2: "CAP_GPHOTO2", + cv2.CAP_GSTREAMER: "CAP_GSTREAMER", + cv2.CAP_FFMPEG: "CAP_FFMPEG", + cv2.CAP_IMAGES: "CAP_IMAGES", + cv2.CAP_ARAVIS: "CAP_ARAVIS", + cv2.CAP_OPENCV_MJPEG: "CAP_OPENCV_MJPEG", + cv2.CAP_INTEL_MFX: "CAP_INTEL_MFX", + cv2.CAP_XINE: "CAP_XINE", + } + return backend_names.get(backend, f"Unknown ({backend})") + + +def detect_cameras(backend: int = cv2.CAP_ANY) -> list[dict]: + """Detect available cameras using cv2_enumerate_cameras.""" + cameras = [] + + # Suppress OpenCV error messages temporarily + original_stderr = sys.stderr + sys.stderr = open(os.devnull, 'w') + + try: + for camera_info in enumerate_cameras(backend): + cameras.append({ + 'index': camera_info.index, + 'name': camera_info.name, + 'path': camera_info.path, + 'vid': camera_info.vid, + 'pid': camera_info.pid, + 'backend': camera_info.backend, + 'backend_name': get_backend_name(camera_info.backend), + }) + finally: + # Restore stderr + sys.stderr.close() + sys.stderr = original_stderr + + return cameras + + +def main() -> None: + """Main function to detect and display camera information.""" + print("=" * 80) + print("OpenCV Camera Detection Script") + print("=" * 80) + print(f"OpenCV Version: {cv2.__version__}") + print() + + # Detect available cameras + print("Scanning for cameras...") + all_cameras = detect_cameras() + + # Filter out cameras with None VID/PID (virtual cameras, VR headsets, etc.) + cameras = [cam for cam in all_cameras if cam['vid'] is not None and cam['pid'] is not None] + + if not cameras: + print("No physical cameras detected!") + sys.exit(1) + + print(f"Found {len(cameras)} camera instance(s)") + print() + + # Group cameras by VID/PID to identify duplicates across backends + vid_pid_groups = {} + for cam in cameras: + key = (cam['vid'], cam['pid']) + if key not in vid_pid_groups: + vid_pid_groups[key] = [] + vid_pid_groups[key].append(cam) + + print(f"Unique physical cameras: {len(vid_pid_groups)}") + for (vid, pid), cam_list in vid_pid_groups.items(): + vid_str = f"0x{vid:04X}" + pid_str = f"0x{pid:04X}" + print(f" VID: {vid_str}, PID: {pid_str} - {cam_list[0]['name']}") + if len(cam_list) > 1: + backends = ', '.join([c['backend_name'] for c in cam_list]) + print(f" Available on {len(cam_list)} backend(s): {backends}") + print() + + # Process only one instance per unique VID/PID combination + # Prefer the first instance found for each unique camera + unique_cameras = {} + for cam in cameras: + key = (cam['vid'], cam['pid']) + if key not in unique_cameras: + unique_cameras[key] = cam + + # Get detailed information for each unique camera + for (vid, pid), camera_info in unique_cameras.items(): + print("=" * 80) + print(f"Camera: {camera_info['name']}") + print("=" * 80) + + cap = cv2.VideoCapture(camera_info['index'], camera_info['backend']) + + if not cap.isOpened(): + print(f"Error: Could not open camera {camera_info['index']}") + continue + + # Get camera properties + props = get_camera_properties(cap, camera_info) + + print(f"Name: {props['Name']}") + print(f"Path: {props['Path']}") + print(f"VID: {props['VID']}") + print(f"PID: {props['PID']}") + print(f"Backend: {props['Backend']}") + print(f"Resolution: {int(props['Width'])}x{int(props['Height'])}") + print(f"FPS: {props['FPS']}") + print(f"Codec: {props['Codec_String']} (FOURCC: {props['Codec']})") + print(f"Brightness: {props['Brightness']}") + print(f"Contrast: {props['Contrast']}") + print(f"Saturation: {props['Saturation']}") + print(f"Hue: {props['Hue']}") + print(f"Gain: {props['Gain']}") + print(f"Exposure: {props['Exposure']}") + print() + + # Test supported resolutions + print("Testing supported resolutions...") + supported_resolutions = test_resolutions(cap) + + if supported_resolutions: + print("Supported resolutions:") + for width, height in supported_resolutions: + print(f" - {width}x{height}") + else: + print("No standard resolutions detected") + + cap.release() + print() + + print("=" * 80) + print("Camera detection complete!") + print("=" * 80) + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/printer/automated_controller.py b/printer/automated_controller.py index 239cb28..fa4736d 100644 --- a/printer/automated_controller.py +++ b/printer/automated_controller.py @@ -14,7 +14,7 @@ from UI.input.toggle_button import ToggleButton from UI.input.text_field import TextField -from fieldweaveConfig import ( +from common.fieldweaveConfig import ( FieldWeaveSettings, ) from .automation_config import ( diff --git a/printer/automation_config.py b/printer/automation_config.py index 7d4b4e8..7e602d6 100644 --- a/printer/automation_config.py +++ b/printer/automation_config.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from generic_config import ConfigManager, DEFAULT_FILENAME, ACTIVE_FILENAME +from common.generic_config import ConfigManager, DEFAULT_FILENAME, ACTIVE_FILENAME @dataclass class AutomationSettings: diff --git a/printer/base_controller.py b/printer/base_controller.py index 33f4c8c..9ce608d 100644 --- a/printer/base_controller.py +++ b/printer/base_controller.py @@ -15,7 +15,7 @@ PrinterSettings, PrinterSettingsManager ) -from fieldweaveConfig import ( +from common.fieldweaveConfig import ( FieldWeaveSettings, ) diff --git a/printer/printerConfig.py b/printer/printerConfig.py index 74b995d..5e97547 100644 --- a/printer/printerConfig.py +++ b/printer/printerConfig.py @@ -2,7 +2,7 @@ from dataclasses import dataclass, field -from generic_config import ConfigManager, DEFAULT_FILENAME, ACTIVE_FILENAME +from common.generic_config import ConfigManager, DEFAULT_FILENAME, ACTIVE_FILENAME @dataclass class PrinterSettings(): From cb61e2357f4652c464f6320a0c283c4c0631c391 Mon Sep 17 00:00:00 2001 From: AnthonyvW Date: Wed, 18 Feb 2026 00:24:36 -0900 Subject: [PATCH 46/46] state changes --- UI/main_window.py | 4 ++-- {UI => common}/state.py | 34 +++++++++++++++++++++++----------- 2 files changed, 25 insertions(+), 13 deletions(-) rename {UI => common}/state.py (54%) diff --git a/UI/main_window.py b/UI/main_window.py index 0eaf67b..eb98a16 100644 --- a/UI/main_window.py +++ b/UI/main_window.py @@ -17,7 +17,7 @@ from .tabs.calibration_tab import CalibrationTab from .tabs.logs_tab import LogsTab -from .state import State +from common.state import State from .settings.settings_main import SettingsButton, SettingsDialog from common.app_context import get_app_context @@ -138,7 +138,7 @@ def _apply_status(self) -> None: percent = int(round(100.0 * self._state.progress_current / max(1, self._state.progress_total))) self.progress.setValue(max(0, min(100, percent))) - self.status_bar.setProperty("kind", self._state.status_type()) + self.status_bar.setProperty("kind", self._state.automation_state) self.status_bar.style().unpolish(self.status_bar) self.status_bar.style().polish(self.status_bar) diff --git a/UI/state.py b/common/state.py similarity index 54% rename from UI/state.py rename to common/state.py index 3b2e7f6..cf74f90 100644 --- a/UI/state.py +++ b/common/state.py @@ -1,11 +1,31 @@ from __future__ import annotations from dataclasses import dataclass +from enum import Enum + +class MachineState(str, Enum): + DISCONNECTED: str = "Disconnected" + CONNECTED: str = "Connected" + + def __str__(self) -> str: + return self.value + +class AutomationState(str, Enum): + IDLE: str = "Idle" + COMPLETE: str = "Completed" + RUNNING: str = "Running" + PAUSED: str = "Paused" + + def __str__(self) -> str: + return self.value + + @dataclass(frozen=True) class State: - machine_state: str = "Disconnected" - automation_state: str = "Idle" + machine_state: str = MachineState.DISCONNECTED + automation_state: str = AutomationState.IDLE + camera_state: str = "" activity: str = "-" job_name: str = "-" @@ -21,12 +41,4 @@ def format_status_text(self) -> str: parts.append(self.activity) if self.progress_total > 0: parts.append(f"{self.progress_current}/{self.progress_total}") - return " | ".join(parts) - - def status_type(self) -> str: - a = self.automation_state.strip().lower() - if a in ("finished", "done", "complete", "completed"): - return "done" - if a in ("running", "busy", "capturing", "moving", "scanning", "paused"): - return "active" - return "idle" \ No newline at end of file + return " | ".join(parts) \ No newline at end of file
@@ -87,7 +87,7 @@ Forge is an opensource, gigapixel imaging system designed to scan tree core samp
- Picture of the full width of a tree core sample taken using Forge and a 10x microscope objective. + Picture of the full width of a tree core sample taken using FieldWeave and a 10x microscope objective.