diff --git a/Dockerfile b/Dockerfile index 65ecadf..d1f1301 100644 --- a/Dockerfile +++ b/Dockerfile @@ -40,9 +40,6 @@ RUN pip install gunicorn # Copy built frontend from previous stage COPY --from=frontend-build /app/dist ./dist -# Create necessary directories -RUN mkdir -p saved_graphs plots - # Create non-root user for security RUN useradd --create-home --shell /bin/bash app \ && chown -R app:app /app diff --git a/src/App.jsx b/src/App.jsx index a17d5aa..7af9104 100644 --- a/src/App.jsx +++ b/src/App.jsx @@ -22,6 +22,7 @@ import EventsTab from './components/EventsTab.jsx'; import GlobalVariablesTab from './components/GlobalVariablesTab.jsx'; import { makeEdge } from './components/CustomEdge'; import { nodeTypes } from './nodeConfig.js'; +import LogDock from './components/LogDock.jsx'; // * Declaring variables * @@ -51,6 +52,12 @@ const DnDFlow = () => { const { screenToFlowPosition } = useReactFlow(); const [type] = useDnD(); + // for the log dock + const [dockOpen, setDockOpen] = useState(false); + const [logLines, setLogLines] = useState([]); + const sseRef = useRef(null); + const append = (line) => setLogLines((prev) => [...prev, line]); + // const onConnect = useCallback((params) => setEdges((eds) => addEdge(params, eds)), []); const onDragOver = useCallback((event) => { @@ -634,6 +641,17 @@ const DnDFlow = () => { }; // Function to run pathsim simulation const runPathsim = async () => { + setDockOpen(true); + setLogLines([]); + + if (sseRef.current) sseRef.current.close(); + const es = new EventSource(getApiEndpoint('/logs/stream')); + sseRef.current = es; + + es.addEventListener('start', () => append('log stream connected…')); + es.onmessage = (evt) => append(evt.data); + es.onerror = () => { append('log stream error'); es.close(); sseRef.current = null; }; + try { const graphData = { nodes, @@ -654,6 +672,8 @@ const DnDFlow = () => { const result = await response.json(); + if (sseRef.current) { sseRef.current.close(); sseRef.current = null; } + if (result.success) { // Store results and switch to results tab setSimulationResults(result.plot); @@ -1801,6 +1821,12 @@ const DnDFlow = () => { )} + { setDockOpen(false); if (sseRef.current) sseRef.current.close(); }} + lines={logLines} + progress={null} + /> ); } diff --git a/src/backend.py b/src/backend.py index 33af927..07dde98 100644 --- a/src/backend.py +++ b/src/backend.py @@ -18,6 +18,12 @@ # Sphinx imports for docstring processing from docutils.core import publish_parts +# imports for logging progress +from flask import Response, stream_with_context +import time +import logging +from queue import Queue, Empty + def docstring_to_html(docstring): """Convert a Python docstring to HTML using docutils (like Sphinx does).""" @@ -80,9 +86,42 @@ def docstring_to_html(docstring): ) -# Creates directory for saved graphs -SAVE_DIR = "saved_graphs" -os.makedirs(SAVE_DIR, exist_ok=True) +### for capturing logs from pathsim + + +@app.get("/logs/stream") +def logs_stream(): + def gen(): + yield "retry: 500\n\n" + while True: + line = log_queue.get() + for chunk in line.replace("\r", "\n").splitlines(): + yield f"data: {chunk}\n\n" + + return Response(gen(), mimetype="text/event-stream") + + +log_queue = Queue() + + +class QueueHandler(logging.Handler): + def emit(self, record): + try: + msg = self.format(record) + log_queue.put_nowait(msg) + except Exception: + pass + + +qhandler = QueueHandler() +qhandler.setLevel(logging.INFO) +qhandler.setFormatter(logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")) + +root = logging.getLogger() +root.setLevel(logging.INFO) +root.addHandler(qhandler) + +### log backend ends # Serve React frontend for production @@ -218,42 +257,6 @@ def get_docs(node_type): return jsonify({"error": f"Could not get docs for {node_type}: {str(e)}"}), 400 -# Function to save graphs -@app.route("/save", methods=["POST"]) -def save_graph(): - data = request.json - filename = data.get( - "filename", "file_1" - ) # sets file_1 as default filename if not provided - graph_data = data.get("graph") - - # Enforces .json extension and valid filenames - valid_name = f"{filename}.json" if not filename.endswith(".json") else filename - file_path = os.path.join(SAVE_DIR, valid_name) - - with open(file_path, "w") as f: - json.dump(graph_data, f, indent=2) - - return jsonify({"message": f"Graph saved as {valid_name}"}) - - -# Function to load saved graphs -@app.route("/load", methods=["POST"]) -def load_graph(): - data = request.json - filename = data.get("filename") - validname = filename if not filename.endswith(".json") else filename[:-5] - filepath = os.path.join(SAVE_DIR, f"{validname}.json") - - if not os.path.exists(filepath): - return jsonify({"error": "File not found"}), 404 - - with open(filepath, "r") as f: - graph_data = json.load(f) - - return jsonify(graph_data) - - # Function to convert graph to Python script @app.route("/convert-to-python", methods=["POST"]) def convert_to_python(): @@ -306,6 +309,10 @@ def run_pathsim(): my_simulation, duration = make_pathsim_model(graph_data) + # get the pathsim logger and add the queue handler + logger = my_simulation.logger + logger.addHandler(qhandler) + # Run the simulation my_simulation.run(duration) diff --git a/src/components/LogDock.jsx b/src/components/LogDock.jsx new file mode 100644 index 0000000..08a5c6e --- /dev/null +++ b/src/components/LogDock.jsx @@ -0,0 +1,48 @@ +// components/LogDock.jsx +import React from 'react'; + +export default function LogDock({ open, onClose, lines, progress }) { + if (!open) return null; // don’t render if it's closed + + return ( +
+ {/* Header */} +
+ Simulation Logs + {typeof progress === 'number' && ( +
+
+
+ )} + +
+ + {/* Log Lines */} +
+ {lines.length ? lines.join('\n') : 'Waiting for output…'} +
+
+ ); +}