diff --git a/backend/backend/__init__.py b/backend/backend/__init__.py index e69de29bb..5b3a4dc85 100644 --- a/backend/backend/__init__.py +++ b/backend/backend/__init__.py @@ -0,0 +1,2 @@ +from .celery import app as celery_app +__all__ = ('celery_app',) diff --git a/backend/backend/celery.py b/backend/backend/celery.py new file mode 100644 index 000000000..7182019cd --- /dev/null +++ b/backend/backend/celery.py @@ -0,0 +1,8 @@ +import os +from celery import Celery + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings') + +app = Celery('backend') +app.config_from_object('django.conf:settings', namespace='CELERY') +app.autodiscover_tasks() diff --git a/backend/backend/settings.py b/backend/backend/settings.py index 9de4f024a..c1d5a51fc 100644 --- a/backend/backend/settings.py +++ b/backend/backend/settings.py @@ -12,7 +12,6 @@ import os from pathlib import Path - from dotenv import load_dotenv load_dotenv() @@ -47,8 +46,23 @@ "authentication", "chat", "gpt", + "django_crontab", + "django_celery_beat", +] + +# Crontab Configuration +CRONJOBS = [ + ('0 0 * * *', 'django.core.management.call_command', ['cleanup_conversations']), ] +INSTALLED_APPS += ['rest_framework.authtoken'] + +REST_FRAMEWORK = { + 'DEFAULT_AUTHENTICATION_CLASSES': [ + 'rest_framework.authentication.TokenAuthentication', + ], +} + MIDDLEWARE = [ "django.middleware.security.SecurityMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", @@ -84,10 +98,15 @@ # Database # https://docs.djangoproject.com/en/4.2/ref/settings/#databases + DATABASES = { - "default": { - "ENGINE": "django.db.backends.sqlite3", - "NAME": BASE_DIR / "db.sqlite3", + 'default': { + 'ENGINE': 'django.db.backends.postgresql', + 'NAME': 'DjangoDB', + 'USER': 'postgres', + 'PASSWORD': 'vyshu@123', + 'HOST': 'localhost', + 'PORT': '5432', } } @@ -131,6 +150,7 @@ STATIC_ROOT = BASE_DIR / "static" STATIC_URL = "/static/" +OPENAI_API_KEY = os.environ.get('OPENAI_API_KEY', '') # Default primary key field type # https://docs.djangoproject.com/en/4.2/ref/settings/#default-auto-field @@ -149,3 +169,14 @@ SESSION_COOKIE_SECURE = True CSRF_COOKIE_SECURE = True CSRF_COOKIE_SAMESITE = "None" +# Media files configuration +MEDIA_URL = '/media/' +MEDIA_ROOT = os.path.join(BASE_DIR, 'media') + +# File upload settings +FILE_UPLOAD_MAX_MEMORY_SIZE = 10485760 # 10MB +DATA_UPLOAD_MAX_MEMORY_SIZE = 10485760 # 10MB + +CELERY_BROKER_URL = 'redis://localhost:6379/0' +CELERY_ACCEPT_CONTENT = ['json'] +CELERY_TASK_SERIALIZER = 'json' diff --git a/backend/chat/admin.py b/backend/chat/admin.py index a4e7d15fc..5a9731f82 100644 --- a/backend/chat/admin.py +++ b/backend/chat/admin.py @@ -2,8 +2,8 @@ from django.utils import timezone from nested_admin.nested import NestedModelAdmin, NestedStackedInline, NestedTabularInline -from chat.models import Conversation, Message, Role, Version - +from chat.models import Conversation, Message, Role, Version,UploadedFile +from django.utils.html import format_html class RoleAdmin(NestedModelAdmin): list_display = ["id", "name"] @@ -51,9 +51,20 @@ def queryset(self, request, queryset): class ConversationAdmin(NestedModelAdmin): actions = ["undelete_selected", "soft_delete_selected"] inlines = [VersionInline] - list_display = ("title", "id", "created_at", "modified_at", "deleted_at", "version_count", "is_deleted", "user") + list_display = ("title", "id", "created_at", "modified_at","short_summary", "deleted_at", "version_count", "is_deleted", "user") list_filter = (DeletedListFilter,) ordering = ("-modified_at",) + readonly_fields = ("summary",) # prevent manual editing + fields = ( + "title", + "summary", # SHOW SUMMARY + "user", + "deleted_at", + ) + def short_summary(self, obj): + return (obj.summary[:50] + "...") if obj.summary else "— No summary generated " + short_summary.short_description = "Summary" + def undelete_selected(self, request, queryset): queryset.update(deleted_at=None) @@ -86,7 +97,50 @@ class VersionAdmin(NestedModelAdmin): list_display = ("id", "conversation", "parent_version", "root_message") + +# NEW: UploadedFile Admin +class UploadedFileAdmin(admin.ModelAdmin): + list_display = ('original_filename', 'user', 'file_size_display', 'file_type', 'uploaded_at', 'file_link') + list_filter = ('file_type', 'uploaded_at') + search_fields = ('original_filename', 'user__username', 'user__email', 'file_hash') + readonly_fields = ('id', 'file_hash', 'file_size', 'file_type', 'uploaded_at', 'file_preview') + + fieldsets = ( + ('File Information', { + 'fields': ('id', 'file', 'file_preview', 'original_filename', 'file_type') + }), + ('Metadata', { + 'fields': ('file_size', 'file_hash', 'uploaded_at', 'user') + }), + ) + + @admin.display(description="File Size") + def file_size_display(self, obj): + """Display file size in human-readable format""" + size = obj.file_size + for unit in ['B', 'KB', 'MB', 'GB']: + if size < 1024.0: + return f"{size:.2f} {unit}" + size /= 1024.0 + return f"{size:.2f} TB" + + @admin.display(description="File Link") + def file_link(self, obj): + """Display download link for file""" + if obj.file: + return format_html('Download', obj.file.url) + return "No file" + + @admin.display(description="Preview") + def file_preview(self, obj): + """Display image preview if file is an image""" + if obj.file and obj.file_type.startswith('image/'): + return format_html('', obj.file.url) + return "No preview available" + + admin.site.register(Role, RoleAdmin) admin.site.register(Message, MessageAdmin) admin.site.register(Conversation, ConversationAdmin) admin.site.register(Version, VersionAdmin) +admin.site.register(UploadedFile, UploadedFileAdmin) diff --git a/backend/chat/management/commands/cleanup_conversations.py b/backend/chat/management/commands/cleanup_conversations.py new file mode 100644 index 000000000..dd0b84d22 --- /dev/null +++ b/backend/chat/management/commands/cleanup_conversations.py @@ -0,0 +1,23 @@ +from django.core.management.base import BaseCommand +from django.utils import timezone +from datetime import timedelta + +from chat.models import Conversation + +class Command(BaseCommand): + help = "Delete conversations older than 30 days" + + def handle(self, *args, **kwargs): + cutoff_date = timezone.now() - timedelta(days=30) + old_conversations = Conversation.objects.filter( + created_at__lt=cutoff_date + ) + + count = old_conversations.count() + old_conversations.delete() + + self.stdout.write( + self.style.SUCCESS( + f"Deleted {count} old conversations" + ) + ) diff --git a/backend/chat/migrations/0002_conversation_summary.py b/backend/chat/migrations/0002_conversation_summary.py new file mode 100644 index 000000000..fc0eeb660 --- /dev/null +++ b/backend/chat/migrations/0002_conversation_summary.py @@ -0,0 +1,18 @@ +# Generated by Django 6.0.1 on 2026-01-30 16:04 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('chat', '0001_initial'), + ] + + operations = [ + migrations.AddField( + model_name='conversation', + name='summary', + field=models.TextField(blank=True, help_text='Auto-generated conversation summary', null=True), + ), + ] diff --git a/backend/chat/migrations/0003_alter_conversation_summary.py b/backend/chat/migrations/0003_alter_conversation_summary.py new file mode 100644 index 000000000..0c984f15f --- /dev/null +++ b/backend/chat/migrations/0003_alter_conversation_summary.py @@ -0,0 +1,18 @@ +# Generated by Django 6.0.1 on 2026-01-30 18:33 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('chat', '0002_conversation_summary'), + ] + + operations = [ + migrations.AlterField( + model_name='conversation', + name='summary', + field=models.TextField(blank=True, null=True), + ), + ] diff --git a/backend/chat/migrations/0004_uploadedfile.py b/backend/chat/migrations/0004_uploadedfile.py new file mode 100644 index 000000000..a01a5b2ca --- /dev/null +++ b/backend/chat/migrations/0004_uploadedfile.py @@ -0,0 +1,34 @@ +# Generated by Django 6.0.1 on 2026-01-30 20:02 + +import django.db.models.deletion +import uuid +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('chat', '0003_alter_conversation_summary'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name='UploadedFile', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('file', models.FileField(upload_to='uploads/%Y/%m/%d/')), + ('original_filename', models.CharField(max_length=255)), + ('file_size', models.BigIntegerField(help_text='File size in bytes')), + ('file_type', models.CharField(max_length=100)), + ('file_hash', models.CharField(help_text='SHA-256 hash for duplicate detection', max_length=64, unique=True)), + ('uploaded_at', models.DateTimeField(auto_now_add=True)), + ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='uploaded_files', to=settings.AUTH_USER_MODEL)), + ], + options={ + 'ordering': ['-uploaded_at'], + 'indexes': [models.Index(fields=['file_hash'], name='chat_upload_file_ha_43c6d5_idx'), models.Index(fields=['user', '-uploaded_at'], name='chat_upload_user_id_618957_idx')], + }, + ), + ] diff --git a/backend/chat/models.py b/backend/chat/models.py index 242788f14..4fbc7dde3 100644 --- a/backend/chat/models.py +++ b/backend/chat/models.py @@ -1,8 +1,10 @@ import uuid - +import os from django.db import models - +import hashlib +import mimetypes from authentication.models import CustomUser +from django.core.exceptions import ValidationError class Role(models.Model): @@ -15,6 +17,7 @@ def __str__(self): class Conversation(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) title = models.CharField(max_length=100, blank=False, null=False, default="Mock title") + summary = models.TextField(blank=True, null=True) # Added NEW FIELD SUMMARY created_at = models.DateTimeField(auto_now_add=True) modified_at = models.DateTimeField(auto_now=True) active_version = models.ForeignKey( @@ -58,8 +61,137 @@ class Meta: ordering = ["created_at"] def save(self, *args, **kwargs): - self.version.conversation.save() + print(" Message.save() called") + + # Save the message first super().save(*args, **kwargs) + conversation = self.version.conversation + print("Conversation ID:", conversation.id) + + # Ensure active_version is set + if not conversation.active_version: + conversation.active_version = self.version + print(" active_version set") + + # Get all messages from the active version + messages = conversation.active_version.messages.all() + message_texts = [f"{m.role.name}: {m.content}" for m in messages] + + print("Messages count:", len(message_texts)) + + # Generate summary if there are messages + if message_texts: + try: + from chat.utils.openai_summary import generate_conversation_summary + summary = generate_conversation_summary(message_texts) + print(" Summary generated:", summary) + + conversation.summary = summary + conversation.save(update_fields=["summary", "active_version"]) + print(" Summary saved") + + except Exception as e: + print(" OpenAI error:", e) + else: + print(" No messages found") + def __str__(self): return f"{self.role}: {self.content[:20]}..." + +# NEW MODEL FOR FILE UPLOADS +class UploadedFile(models.Model): + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + user = models.ForeignKey(CustomUser, on_delete=models.CASCADE, related_name="uploaded_files") + file = models.FileField(upload_to='uploads/%Y/%m/%d/') + original_filename = models.CharField(max_length=255) + file_size = models.BigIntegerField(help_text="File size in bytes") + file_type = models.CharField(max_length=100) + file_hash = models.CharField(max_length=64, unique=True, help_text="SHA-256 hash for duplicate detection") + uploaded_at = models.DateTimeField(auto_now_add=True) + + class Meta: + ordering = ['-uploaded_at'] + indexes = [ + models.Index(fields=['file_hash']), + models.Index(fields=['user', '-uploaded_at']), + ] + + def __str__(self): + return f"{self.original_filename} - {self.user.get_username()}" + + # @staticmethod + # def calculate_file_hash(file): + # """Calculate SHA-256 hash of file content""" + # sha256_hash = hashlib.sha256() + + # # Reset file pointer to beginning + # file.seek(0) + + # # Read file in chunks to handle large files + # for chunk in file.chunks(): + # sha256_hash.update(chunk) + + # # Reset file pointer again + # file.seek(0) + + # return sha256_hash.hexdigest() + + # def save(self, *args, **kwargs): + # """Override save to calculate file hash before saving""" + # if not self.file_hash and self.file: + # self.file_hash = self.calculate_file_hash(self.file) + + # if not self.file_size and self.file: + # self.file_size = self.file.size + + # if not self.file_type and self.file: + # mime_type, _ = mimetypes.guess_type(self.file.name) + # self.file_type = mime_type or "application/octet-stream" + + # if not self.original_filename and self.file: + # self.original_filename = self.file.name + + # super().save(*args, **kwargs) + + + @staticmethod + def calculate_file_hash(file): + sha256_hash = hashlib.sha256() + file.seek(0) + for chunk in file.chunks(): + sha256_hash.update(chunk) + file.seek(0) + return sha256_hash.hexdigest() + + def clean(self): + if self.file: + file_hash = self.calculate_file_hash(self.file) + if UploadedFile.objects.filter(file_hash=file_hash).exists(): + raise ValidationError( + {"file": "This file already exists (duplicate upload)."} + ) + + def save(self, *args, **kwargs): + if self.file and not self.file_hash: + self.file_hash = self.calculate_file_hash(self.file) + + if self.file and not self.file_size: + self.file_size = self.file.size + + if self.file and not self.file_type: + mime_type, _ = mimetypes.guess_type(self.file.name) + self.file_type = mime_type or "application/octet-stream" + + if self.file and not self.original_filename: + self.original_filename = self.file.name + + super().save(*args, **kwargs) + + def delete(self, *args, **kwargs): + """Override delete to also delete the physical file""" + # Delete the physical file + if self.file and os.path.isfile(self.file.path): + os.remove(self.file.path) + + super().delete(*args, **kwargs) diff --git a/backend/chat/serializers.py b/backend/chat/serializers.py index 0c721c061..cd8f54d19 100644 --- a/backend/chat/serializers.py +++ b/backend/chat/serializers.py @@ -1,8 +1,8 @@ from django.core.exceptions import ValidationError from django.utils import timezone from rest_framework import serializers - -from chat.models import Conversation, Message, Role, Version +from rest_framework.pagination import PageNumberPagination +from chat.models import Conversation, Message, Role, Version,UploadedFile def should_serialize(validated_data, field_name) -> bool: @@ -150,3 +150,94 @@ def update(self, instance, validated_data): version_serializer.save(conversation=instance) return instance + + +class ConversationSummarySerializer(serializers.ModelSerializer): + """Serializer specifically for conversation summaries endpoint""" + version_count = serializers.IntegerField(read_only=True) + message_count = serializers.SerializerMethodField() + + class Meta: + model = Conversation + fields = ['id', 'title', 'summary', 'created_at', 'modified_at', 'version_count', 'message_count', 'user'] + read_only_fields = ['id', 'created_at', 'modified_at', 'summary'] + + def get_message_count(self, obj): + """Get total message count in active version""" + if obj.active_version: + return obj.active_version.messages.count() + return 0 + +class TitleSerializer(serializers.Serializer): + title = serializers.CharField(max_length=100, required=True) + + +class UploadedFileSerializer(serializers.ModelSerializer): + file_url = serializers.SerializerMethodField() + uploaded_by = serializers.CharField(source='user.username', read_only=True) + + class Meta: + model = UploadedFile + fields = [ + 'id', + 'file', + 'file_url', + 'original_filename', + 'file_size', + 'file_type', + 'file_hash', + 'uploaded_at', + 'uploaded_by', + 'user' + ] + read_only_fields = ['id', 'file_hash', 'file_size', 'file_type', 'uploaded_at', 'file_url', 'uploaded_by'] + + def get_file_url(self, obj): + """Get the full URL of the uploaded file""" + request = self.context.get('request') + if obj.file and request: + return request.build_absolute_uri(obj.file.url) + return None + + def validate_file(self, value): + """Validate file upload""" + # Check file size (max 10MB) + max_size = 10 * 1024 * 1024 # 10MB + if value.size > max_size: + raise serializers.ValidationError(f"File size exceeds maximum allowed size of {max_size / (1024*1024)}MB") + + return value + + def create(self, validated_data): + """Override create to handle duplicate file check""" + file = validated_data.get('file') + user = validated_data.get('user') + + # Calculate file hash + file_hash = UploadedFile.calculate_file_hash(file) + + # Check if file with same hash already exists for this user + existing_file = UploadedFile.objects.filter(file_hash=file_hash, user=user).first() + + if existing_file: + raise serializers.ValidationError({ + 'file': 'This file has already been uploaded.', + 'existing_file_id': str(existing_file.id), + 'existing_file_name': existing_file.original_filename, + 'uploaded_at': existing_file.uploaded_at + }) + + # Set file metadata + validated_data['file_hash'] = file_hash + validated_data['file_size'] = file.size + validated_data['file_type'] = file.content_type or 'application/octet-stream' + validated_data['original_filename'] = file.name + + return super().create(validated_data) + + +# Pagination class for summaries +class SummaryPagination(PageNumberPagination): + page_size = 10 + page_size_query_param = 'page_size' + max_page_size = 100 \ No newline at end of file diff --git a/backend/chat/signals.py b/backend/chat/signals.py new file mode 100644 index 000000000..4f496b33c --- /dev/null +++ b/backend/chat/signals.py @@ -0,0 +1,32 @@ +from django.db.models.signals import post_save +from django.dispatch import receiver +from .models import Message + + +def generate_summary(text, max_length=200): + if not text: + return "" + return text[:max_length] + ("..." if len(text) > max_length else "") + + +@receiver(post_save, sender=Message) +def create_conversation_summary(sender, instance, created, **kwargs): + conversation = instance.version.conversation + + # Do nothing if summary already exists + if conversation.summary: + return + + # Get messages of this conversation (ordered by created_at) + messages = Message.objects.filter( + version__conversation=conversation + ).order_by("created_at") + + if not messages.exists(): + return + + # Combine message contents (you can limit to first N messages) + combined_text = " ".join(m.content for m in messages[:3]) + + conversation.summary = generate_summary(combined_text) + conversation.save(update_fields=["summary"]) diff --git a/backend/chat/tasks.py b/backend/chat/tasks.py new file mode 100644 index 000000000..1712ce609 --- /dev/null +++ b/backend/chat/tasks.py @@ -0,0 +1,6 @@ +from celery import shared_task +from django.core.management import call_command + +@shared_task +def cleanup_conversations_task(): + call_command('cleanup_conversations') diff --git a/backend/chat/urls.py b/backend/chat/urls.py index bd8ceadc0..9ed130e38 100644 --- a/backend/chat/urls.py +++ b/backend/chat/urls.py @@ -1,4 +1,4 @@ -from django.urls import path +from django.urls import path,include from chat import views @@ -19,4 +19,16 @@ ), path("conversations//delete/", views.conversation_soft_delete, name="conversation_delete"), path("versions//add_message/", views.version_add_message, name="version_add_message"), + # NEW: Task 3 endpoints + # Endpoint 8: Conversation summaries with pagination and filtering + path("summaries/", views.get_conversation_summaries, name="get_conversation_summaries"), + # Endpoint 9: File upload with duplicate detection + path("files/upload/", views.upload_file, name="upload_file"), + # Endpoint 10: List uploaded files with metadata + path("files/", views.list_uploaded_files, name="list_uploaded_files"), + # Get specific file details + path("files//", views.get_uploaded_file, name="get_uploaded_file"), + # Endpoint 11: Delete uploaded file + path("files//delete/", views.delete_uploaded_file, name="delete_uploaded_file"), + ] diff --git a/backend/chat/utils/openai_summary.py b/backend/chat/utils/openai_summary.py new file mode 100644 index 000000000..a061542c8 --- /dev/null +++ b/backend/chat/utils/openai_summary.py @@ -0,0 +1,28 @@ +import os +from openai import OpenAI + +client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY")) + + +def generate_conversation_summary(messages: list[str]) -> str: + """ + Generates a short summary from conversation messages + """ + if not messages: + return "" + + prompt = ( + "Summarize the following conversation briefly:\n\n" + + "\n".join(messages) + ) + + response = client.chat.completions.create( + model="gpt-4o-mini", + messages=[ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": prompt}, + ], + max_tokens=150, + ) + print("response ======= ",response) + return response.choices[0].message.content.strip() diff --git a/backend/chat/views.py b/backend/chat/views.py index 0d18f7a69..f6c26c98b 100644 --- a/backend/chat/views.py +++ b/backend/chat/views.py @@ -3,10 +3,12 @@ from rest_framework import status from rest_framework.decorators import api_view from rest_framework.response import Response - -from chat.models import Conversation, Message, Version -from chat.serializers import ConversationSerializer, MessageSerializer, TitleSerializer, VersionSerializer +from django.db.models import Count,Q +from chat.models import Conversation, Message, Version , UploadedFile +from chat.serializers import ConversationSerializer, MessageSerializer, TitleSerializer, VersionSerializer,ConversationSummarySerializer,UploadedFileSerializer,SummaryPagination from chat.utils.branching import make_branched_conversation +from rest_framework.decorators import parser_classes +from rest_framework.parsers import MultiPartParser, FormParser @api_view(["GET"]) @@ -230,3 +232,193 @@ def version_add_message(request, pk): status=status.HTTP_201_CREATED, ) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + +@login_required +@api_view(["GET"]) +def get_conversation_summaries(request): + """ + API endpoint to retrieve conversation summaries with pagination and filtering. + + Query Parameters: + - page: Page number (default: 1) + - page_size: Number of items per page (default: 10, max: 100) + - search: Search in title and summary + - created_after: Filter by creation date (YYYY-MM-DD) + - created_before: Filter by creation date (YYYY-MM-DD) + - has_summary: Filter conversations with/without summaries (true/false) + """ + + # Get queryset + queryset = Conversation.objects.filter( + user=request.user, + deleted_at__isnull=True + ).annotate( + version_count=Count('versions') + ).select_related('user', 'active_version').order_by('-modified_at') + + # Apply filters + search = request.GET.get('search', None) + if search: + queryset = queryset.filter( + Q(title__icontains=search) | Q(summary__icontains=search) + ) + + created_after = request.GET.get('created_after', None) + if created_after: + queryset = queryset.filter(created_at__gte=created_after) + + created_before = request.GET.get('created_before', None) + if created_before: + queryset = queryset.filter(created_at__lte=created_before) + + has_summary = request.GET.get('has_summary', None) + if has_summary is not None: + if has_summary.lower() == 'true': + queryset = queryset.filter(summary__isnull=False).exclude(summary='') + elif has_summary.lower() == 'false': + queryset = queryset.filter(Q(summary__isnull=True) | Q(summary='')) + + # Paginate + paginator = SummaryPagination() + paginated_queryset = paginator.paginate_queryset(queryset, request) + + # Serialize + serializer = ConversationSummarySerializer(paginated_queryset, many=True, context={'request': request}) + + return paginator.get_paginated_response(serializer.data) + + +@login_required +@api_view(["POST"]) +@parser_classes([MultiPartParser, FormParser]) +def upload_file(request): + """ + API endpoint to upload a file with duplicate detection. + + Request: + - file: File to upload (multipart/form-data) + + Returns: + - 201: File uploaded successfully + - 400: Validation error or duplicate file + """ + + if 'file' not in request.FILES: + return Response( + {"detail": "No file provided"}, + status=status.HTTP_400_BAD_REQUEST + ) + + file = request.FILES['file'] + + # Create serializer with file and user + serializer = UploadedFileSerializer( + data={'file': file, 'user': request.user.id}, + context={'request': request} + ) + + try: + if serializer.is_valid(): + serializer.save(user=request.user) + return Response(serializer.data, status=status.HTTP_201_CREATED) + else: + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except Exception as e: + return Response( + {"detail": str(e)}, + status=status.HTTP_400_BAD_REQUEST + ) + + +@login_required +@api_view(["GET"]) +def list_uploaded_files(request): + """ + API endpoint to list uploaded files with metadata and filtering. + + Query Parameters: + - file_type: Filter by file type (e.g., 'image/png') + - uploaded_after: Filter by upload date (YYYY-MM-DD) + - uploaded_before: Filter by upload date (YYYY-MM-DD) + - search: Search in filename + - page: Page number + - page_size: Items per page + """ + + # Get queryset + queryset = UploadedFile.objects.filter(user=request.user).order_by('-uploaded_at') + + # Apply filters + file_type = request.GET.get('file_type', None) + if file_type: + queryset = queryset.filter(file_type__icontains=file_type) + + uploaded_after = request.GET.get('uploaded_after', None) + if uploaded_after: + queryset = queryset.filter(uploaded_at__gte=uploaded_after) + + uploaded_before = request.GET.get('uploaded_before', None) + if uploaded_before: + queryset = queryset.filter(uploaded_at__lte=uploaded_before) + + search = request.GET.get('search', None) + if search: + queryset = queryset.filter(original_filename__icontains=search) + + # Paginate + paginator = SummaryPagination() + paginated_queryset = paginator.paginate_queryset(queryset, request) + + # Serialize + serializer = UploadedFileSerializer( + paginated_queryset, + many=True, + context={'request': request} + ) + + return paginator.get_paginated_response(serializer.data) + + +@login_required +@api_view(["GET"]) +def get_uploaded_file(request, pk): + """ + API endpoint to get details of a specific uploaded file. + """ + try: + uploaded_file = UploadedFile.objects.get(pk=pk, user=request.user) + except UploadedFile.DoesNotExist: + return Response( + {"detail": "File not found"}, + status=status.HTTP_404_NOT_FOUND + ) + + serializer = UploadedFileSerializer(uploaded_file, context={'request': request}) + return Response(serializer.data, status=status.HTTP_200_OK) + + +@login_required +@api_view(["DELETE"]) +def delete_uploaded_file(request, pk): + """ + API endpoint to delete an uploaded file. + + Deletes both the database record and the physical file. + """ + try: + uploaded_file = UploadedFile.objects.get(pk=pk, user=request.user) + except UploadedFile.DoesNotExist: + return Response( + {"detail": "File not found"}, + status=status.HTTP_404_NOT_FOUND + ) + + # Delete the file (this also deletes the physical file due to model override) + filename = uploaded_file.original_filename + uploaded_file.delete() + + return Response( + {"detail": f"File '{filename}' deleted successfully"}, + status=status.HTTP_200_OK + ) diff --git a/backend/src/utils/gpt.py b/backend/src/utils/gpt.py index f8a4aa023..b5b246dc1 100644 --- a/backend/src/utils/gpt.py +++ b/backend/src/utils/gpt.py @@ -74,4 +74,4 @@ def get_conversation_answer(conversation: list[dict[str, str]], model: str, stre continue chunk = choices.pop()["delta"].get("content") if chunk: - yield chunk + yield chunk \ No newline at end of file