From 15c889d73f0b82c088116fe35f83e9433ff31270 Mon Sep 17 00:00:00 2001 From: sid <35936587+siddharth-shringarpure@users.noreply.github.com> Date: Sun, 23 Feb 2025 05:48:41 +0000 Subject: [PATCH 1/3] =?UTF-8?q?=F0=9F=9A=91=EF=B8=8FFix=20broken=20code?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/learn/chat/page.tsx | 12 +- backend/app.py | 66 ++++-- .../{Requirements.txt => requirements.txt} | 0 package-lock.json | 210 +++++++++--------- 4 files changed, 158 insertions(+), 130 deletions(-) rename backend/{Requirements.txt => requirements.txt} (100%) diff --git a/app/learn/chat/page.tsx b/app/learn/chat/page.tsx index bbb1051..b499345 100644 --- a/app/learn/chat/page.tsx +++ b/app/learn/chat/page.tsx @@ -58,8 +58,10 @@ const Chat: React.FC = () => { const savedResponse = localStorage.getItem("chatResponse"); if (savedResponse) { const parsedResponse = JSON.parse(savedResponse); - const moduleContent = - parsedResponse.data[0]?.module || "No content available"; + const moduleContent = parsedResponse.explanation || + parsedResponse.response || + parsedResponse.summary || + "No content available"; setMessages([{ id: Date.now(), content: moduleContent, sender: "ai" }]); localStorage.removeItem("chatResponse"); } @@ -88,7 +90,11 @@ const Chat: React.FC = () => { files: [] // Future enhancement: add file upload support }); - const aiContent = response.data.data[0]?.module || "Sorry, I couldn't generate a response"; + const aiContent = response.data.explanation || + response.data.response || + response.data.summary || + response.data.learning_plan || + "Sorry, I couldn't generate a response"; const aiMessage: Message = { id: Date.now() + 1, diff --git a/backend/app.py b/backend/app.py index 82072ba..4c506ab 100644 --- a/backend/app.py +++ b/backend/app.py @@ -110,6 +110,21 @@ def process_interaction(): current_topic = data.get('current_topic') active_subtopic = data.get('active_subtopic') session_history = data.get('session_history') + + # Process the interaction through the agent service + response = agent_service.start_new_topic(user_input, current_topic=current_topic, active_subtopic=active_subtopic, session_history=session_history) + + # Convert the response to a dictionary + response_dict = response.to_dict() + + return jsonify(response_dict) + + except Exception as e: + print(f"Error processing interaction: {e}") + return jsonify({ + 'error': str(e) + }), 500 + def generate_audio(text): generator = pipeline( text, voice='af_heart', # <= change voice here @@ -150,8 +165,6 @@ def process_text2speech(): if not text: return jsonify({"error": "No text provided"}), 400 - if not text: - return jsonify({"error": "No text provided"}), 400 audio = generate_audio(text) wav_file = io.BytesIO() @@ -160,20 +173,6 @@ def process_text2speech(): return send_file(wav_file, mimetype='audio/wav', as_attachment=False) - # Process the interaction through the agent service - response = agent_service.start_new_topic(user_input, current_topic=current_topic, active_subtopic=active_subtopic, session_history=session_history) - - # Convert the response to a dictionary - response_dict = response.to_dict() - - return jsonify(response_dict) - - except Exception as e: - print(f"Error processing interaction: {e}") - return jsonify({ - 'error': str(e) - }), 500 - @app.route('/process-content', methods=['POST']) def process_content(): """Process uploaded content.""" @@ -184,18 +183,41 @@ def process_content(): # Process files if any processed_files = [] + all_text = [] + + # Add notes if provided + if notes.strip(): + all_text.append(notes) + + # Process each file for file_url in files: local_file = download_file(file_url) if local_file: processed_files.append(local_file) + text = extract_text_from_pdf(local_file) + if text: + all_text.append(text) - # TODO: Process the content and generate learning plan - # For now, return a mock response - response = [{ - 'learning_plan': f"Generated learning plan from {len(processed_files)} files and notes: {notes[:100]}..." - }] + # If no content was processed, return error + if not all_text: + return jsonify({ + 'error': 'No content could be processed' + }), 400 - return jsonify(response) + # Combine all text and process with Gemini + combined_text = "\n\n".join(all_text) + processed_content = process_with_gemini(combined_text) + + if not processed_content: + return jsonify({ + 'error': 'Failed to process content with AI' + }), 500 + + # Return the processed content + return jsonify({ + 'response': processed_content, + 'status': 'success' + }) except Exception as e: print(f"Error processing content: {e}") diff --git a/backend/Requirements.txt b/backend/requirements.txt similarity index 100% rename from backend/Requirements.txt rename to backend/requirements.txt diff --git a/package-lock.json b/package-lock.json index 2373768..145f00f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -440,6 +440,111 @@ "node": ">= 10" } }, + "node_modules/@next/swc-darwin-x64": { + "version": "15.1.7", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-15.1.7.tgz", + "integrity": "sha512-2qoas+fO3OQKkU0PBUfwTiw/EYpN+kdAx62cePRyY1LqKtP09Vp5UcUntfZYajop5fDFTjSxCHfZVRxzi+9FYQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-arm64-gnu": { + "version": "15.1.7", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-15.1.7.tgz", + "integrity": "sha512-sKLLwDX709mPdzxMnRIXLIT9zaX2w0GUlkLYQnKGoXeWUhcvpCrK+yevcwCJPdTdxZEUA0mOXGLdPsGkudGdnA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-arm64-musl": { + "version": "15.1.7", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-15.1.7.tgz", + "integrity": "sha512-zblK1OQbQWdC8fxdX4fpsHDw+VSpBPGEUX4PhSE9hkaWPrWoeIJn+baX53vbsbDRaDKd7bBNcXRovY1hEhFd7w==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-x64-gnu": { + "version": "15.1.7", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-15.1.7.tgz", + "integrity": "sha512-GOzXutxuLvLHFDAPsMP2zDBMl1vfUHHpdNpFGhxu90jEzH6nNIgmtw/s1MDwpTOiM+MT5V8+I1hmVFeAUhkbgQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-x64-musl": { + "version": "15.1.7", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-15.1.7.tgz", + "integrity": "sha512-WrZ7jBhR7ATW1z5iEQ0ZJfE2twCNSXbpCSaAunF3BKcVeHFADSI/AW1y5Xt3DzTqPF1FzQlwQTewqetAABhZRQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-arm64-msvc": { + "version": "15.1.7", + "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-15.1.7.tgz", + "integrity": "sha512-LDnj1f3OVbou1BqvvXVqouJZKcwq++mV2F+oFHptToZtScIEnhNRJAhJzqAtTE2dB31qDYL45xJwrc+bLeKM2Q==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-x64-msvc": { + "version": "15.1.7", + "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-15.1.7.tgz", + "integrity": "sha512-dC01f1quuf97viOfW05/K8XYv2iuBgAxJZl7mbCKEjMgdQl5JjAKJ0D2qMKZCgPWDeFbFT0Q0nYWwytEW0DWTQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -6874,111 +6979,6 @@ "type": "github", "url": "https://github.com/sponsors/wooorm" } - }, - "node_modules/@next/swc-darwin-x64": { - "version": "15.1.7", - "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-15.1.7.tgz", - "integrity": "sha512-2qoas+fO3OQKkU0PBUfwTiw/EYpN+kdAx62cePRyY1LqKtP09Vp5UcUntfZYajop5fDFTjSxCHfZVRxzi+9FYQ==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-arm64-gnu": { - "version": "15.1.7", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-15.1.7.tgz", - "integrity": "sha512-sKLLwDX709mPdzxMnRIXLIT9zaX2w0GUlkLYQnKGoXeWUhcvpCrK+yevcwCJPdTdxZEUA0mOXGLdPsGkudGdnA==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-arm64-musl": { - "version": "15.1.7", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-15.1.7.tgz", - "integrity": "sha512-zblK1OQbQWdC8fxdX4fpsHDw+VSpBPGEUX4PhSE9hkaWPrWoeIJn+baX53vbsbDRaDKd7bBNcXRovY1hEhFd7w==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-x64-gnu": { - "version": "15.1.7", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-15.1.7.tgz", - "integrity": "sha512-GOzXutxuLvLHFDAPsMP2zDBMl1vfUHHpdNpFGhxu90jEzH6nNIgmtw/s1MDwpTOiM+MT5V8+I1hmVFeAUhkbgQ==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-x64-musl": { - "version": "15.1.7", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-15.1.7.tgz", - "integrity": "sha512-WrZ7jBhR7ATW1z5iEQ0ZJfE2twCNSXbpCSaAunF3BKcVeHFADSI/AW1y5Xt3DzTqPF1FzQlwQTewqetAABhZRQ==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-win32-arm64-msvc": { - "version": "15.1.7", - "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-15.1.7.tgz", - "integrity": "sha512-LDnj1f3OVbou1BqvvXVqouJZKcwq++mV2F+oFHptToZtScIEnhNRJAhJzqAtTE2dB31qDYL45xJwrc+bLeKM2Q==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-win32-x64-msvc": { - "version": "15.1.7", - "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-15.1.7.tgz", - "integrity": "sha512-dC01f1quuf97viOfW05/K8XYv2iuBgAxJZl7mbCKEjMgdQl5JjAKJ0D2qMKZCgPWDeFbFT0Q0nYWwytEW0DWTQ==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } } } } From ca5ca1f082ae986be4efb58c4247c91e833a224d Mon Sep 17 00:00:00 2001 From: sid <35936587+siddharth-shringarpure@users.noreply.github.com> Date: Sun, 23 Feb 2025 06:00:18 +0000 Subject: [PATCH 2/3] =?UTF-8?q?=F0=9F=92=AC=20Update=20frontend?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/learn/page.tsx | 113 +++++++++++++++++++++++++++++++++++++++------ app/page.tsx | 2 +- backend/app.py | 35 ++++++++++++-- 3 files changed, 133 insertions(+), 17 deletions(-) diff --git a/app/learn/page.tsx b/app/learn/page.tsx index 2ef112b..5bf81ff 100644 --- a/app/learn/page.tsx +++ b/app/learn/page.tsx @@ -50,9 +50,30 @@ export default function UploadModule() { { name: string; url: string }[] >([]); + const validateFiles = (files: File[]): { validFiles: File[], invalidFiles: File[] } => { + return files.reduce((acc, file) => { + if (file.type === 'application/pdf' || file.name.toLowerCase().endsWith('.pdf')) { + acc.validFiles.push(file); + } else { + acc.invalidFiles.push(file); + } + return acc; + }, { validFiles: [] as File[], invalidFiles: [] as File[] }); + }; + const onDragOver = (e: React.DragEvent) => { e.preventDefault(); - setIsDragging(true); + // Only show the drag effect if at least one file is a PDF + const hasValidFile = Array.from(e.dataTransfer.items).some(item => + item.type === 'application/pdf' || + (item.kind === 'file' && item.type.includes('pdf')) + ); + setIsDragging(hasValidFile); + + // Add visual feedback for invalid files + if (!hasValidFile) { + e.dataTransfer.dropEffect = 'none'; + } }; const onDragLeave = (e: React.DragEvent) => { @@ -64,22 +85,92 @@ export default function UploadModule() { e.preventDefault(); setIsDragging(false); const files = Array.from(e.dataTransfer.files); - await handleFiles(files); + + const { validFiles, invalidFiles } = validateFiles(files); + + if (invalidFiles.length > 0) { + toast.error(`${invalidFiles.length} file(s) were rejected. Only PDF files are allowed.`); + } + + if (validFiles.length > 0) { + await handleFiles(validFiles); + } }; const onFileSelect = async (e: React.ChangeEvent) => { - if (e.target.files) { - const files = Array.from(e.target.files); - await handleFiles(files); + const files = Array.from(e.target.files || []); + + // Validate file types - only allow PDFs + const invalidFiles = files.filter(file => + file.type !== 'application/pdf' && + !file.name.toLowerCase().endsWith('.pdf') + ); + + if (invalidFiles.length > 0) { + toast.error('Only PDF files are allowed'); + e.target.value = ''; // Clear the file input + return; + } + + setUploading(true); + setProgress(0); + + try { + const uploadedFiles = await Promise.all( + files.map(async (file) => { + const formData = new FormData(); + formData.append("file", file); + + const response = await fetch("/api/upload", { + method: "POST", + body: formData, + }); + + if (!response.ok) throw new Error("Upload failed"); + + const data = await response.json(); + return { + name: file.name, + url: data.url, + type: file.type, + }; + }) + ); + + setUploadedFiles((prev) => [...prev, ...uploadedFiles]); + toast.success("Files uploaded successfully"); + } + + catch (error) { + console.error("Upload error:", error); + toast.error("Error uploading files"); + } + + finally { + setUploading(false); + setProgress(0); + e.target.value = ''; // Clear the file input } }; const handleFiles = async (files: File[]) => { + // Double-check validation before uploading + const { validFiles, invalidFiles } = validateFiles(files); + + if (invalidFiles.length > 0) { + toast.error('Only PDF files are allowed'); + return; + } + + if (validFiles.length === 0) { + return; + } + setUploading(true); setProgress(0); const uploadedData: { name: string; url: string }[] = []; - for (const file of files) { + for (const file of validFiles) { try { const { cdnUrl } = await client.uploadFile(file); uploadedData.push({ name: file.name, url: cdnUrl }); @@ -274,15 +365,11 @@ export default function UploadModule() {
- PDFs + Lecture Slides
- Presentations -
-
-
@@ -290,7 +377,7 @@ export default function UploadModule() {

Drop Any Content

- PDFs, slides, videos, notes - we'll make them interactive. + Lecture slides, research papers, notes - we'll make them interactive.

diff --git a/backend/app.py b/backend/app.py index 4c506ab..e4b5a97 100644 --- a/backend/app.py +++ b/backend/app.py @@ -173,6 +173,23 @@ def process_text2speech(): return send_file(wav_file, mimetype='audio/wav', as_attachment=False) +def is_valid_pdf(file_url): + """Check if the file is a valid PDF.""" + try: + # Check file extension + if not file_url.lower().endswith('.pdf'): + return False + + # Download and verify file content + response = requests.get(file_url, stream=True) + response.raise_for_status() + + # Check the magic numbers for PDF + magic_numbers = response.raw.read(4) + return magic_numbers.startswith(b'%PDF') + except: + return False + @app.route('/process-content', methods=['POST']) def process_content(): """Process uploaded content.""" @@ -191,12 +208,24 @@ def process_content(): # Process each file for file_url in files: + # Validate PDF + if not is_valid_pdf(file_url): + return jsonify({ + 'error': f'Invalid or unsupported file format. Only PDF files are allowed.' + }), 400 + local_file = download_file(file_url) if local_file: processed_files.append(local_file) - text = extract_text_from_pdf(local_file) - if text: - all_text.append(text) + try: + text = extract_text_from_pdf(local_file) + if text: + all_text.append(text) + except Exception as e: + print(f"Error extracting text from PDF: {e}") + return jsonify({ + 'error': 'Could not extract text from PDF. Please ensure it is a valid PDF file with extractable text.' + }), 400 # If no content was processed, return error if not all_text: From 42b6f112c2a16834230345ce96507698915d4d4f Mon Sep 17 00:00:00 2001 From: sid <35936587+siddharth-shringarpure@users.noreply.github.com> Date: Sun, 23 Feb 2025 06:15:59 +0000 Subject: [PATCH 3/3] =?UTF-8?q?=F0=9F=92=A1=20Debug=20help?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/learn/page.tsx | 21 ++++++++++++++++----- backend/app.py | 30 ++++++++++++++++++++++++------ 2 files changed, 40 insertions(+), 11 deletions(-) diff --git a/app/learn/page.tsx b/app/learn/page.tsx index 5bf81ff..3e76d4f 100644 --- a/app/learn/page.tsx +++ b/app/learn/page.tsx @@ -251,6 +251,9 @@ export default function UploadModule() { files: uploadedFiles.map((file) => file.url), }; + // Debug log + console.log("Sending payload:", payload); + const response = await fetch("http://127.0.0.1:5000/process-content", { method: "POST", headers: { @@ -259,9 +262,17 @@ export default function UploadModule() { body: JSON.stringify(payload), }); - if (!response.ok) throw new Error("Failed to process content"); - const data = await response.json(); + // Debug log + console.log("Response data:", data); + + if (!response.ok) { + // Get the error message from the backend response + const errorMessage = data.error || "Failed to process content"; + console.error("Backend error:", errorMessage); // Debug log + throw new Error(errorMessage); + } + console.log("Processed Data:", data); localStorage.setItem("chatResponse", JSON.stringify(data)); @@ -272,8 +283,8 @@ export default function UploadModule() { setNotes(""); router.push("/learn/chat"); } catch (error) { - console.error(error); - toast.error("There was an error processing your content"); + console.error("Error details:", error); // Debug log + toast.error(error instanceof Error ? error.message : "There was an error processing your content"); } setUploading(false); @@ -357,7 +368,7 @@ export default function UploadModule() { Drop your files here

- or click to select files + or click to select PDFs

diff --git a/backend/app.py b/backend/app.py index e4b5a97..65e587d 100644 --- a/backend/app.py +++ b/backend/app.py @@ -176,18 +176,24 @@ def process_text2speech(): def is_valid_pdf(file_url): """Check if the file is a valid PDF.""" try: - # Check file extension - if not file_url.lower().endswith('.pdf'): - return False + # For Uploadcare URLs, we can trust the file extension + if 'ucarecdn.com' in file_url: + return True - # Download and verify file content + # For other URLs, check the content response = requests.get(file_url, stream=True) response.raise_for_status() - # Check the magic numbers for PDF + # Check content type header first + content_type = response.headers.get('content-type', '').lower() + if 'application/pdf' in content_type: + return True + + # If no content type header, check magic numbers magic_numbers = response.raw.read(4) return magic_numbers.startswith(b'%PDF') - except: + except Exception as e: + print(f"Error validating PDF: {e}") return False @app.route('/process-content', methods=['POST']) @@ -195,6 +201,11 @@ def process_content(): """Process uploaded content.""" try: data = request.json + if not data: + return jsonify({'error': 'No data provided'}), 400 + + print("Received data:", data) # Debug log + notes = data.get('notes', '') files = data.get('files', []) @@ -208,8 +219,15 @@ def process_content(): # Process each file for file_url in files: + print(f"Processing file URL: {file_url}") # Debug log + + # Skip empty URLs + if not file_url: + continue + # Validate PDF if not is_valid_pdf(file_url): + print(f"Invalid PDF URL: {file_url}") # Debug log return jsonify({ 'error': f'Invalid or unsupported file format. Only PDF files are allowed.' }), 400