diff --git a/example/app/settings.py b/example/app/settings.py index af26c18fb1..668f0aa414 100644 --- a/example/app/settings.py +++ b/example/app/settings.py @@ -24,6 +24,7 @@ "plain.toolbar", "plain.redirection", "plain.observer", + "plain.s3", "app.users", ] @@ -56,3 +57,7 @@ AUTH_LOGIN_URL = "login" AUTH_USER_MODEL = "users.User" + +# S3 connection settings (configure for your storage provider) +S3_ACCESS_KEY_ID = "" +S3_SECRET_ACCESS_KEY = "" diff --git a/example/pyproject.toml b/example/pyproject.toml index d3de24fb16..ce47c37422 100644 --- a/example/pyproject.toml +++ b/example/pyproject.toml @@ -33,6 +33,7 @@ dependencies = [ "plain-tunnel", "plain-vendor", "plain-observer", + "plain-s3", ] [tool.plain.tailwind] diff --git a/plain-s3/LICENSE b/plain-s3/LICENSE new file mode 100644 index 0000000000..4a29315c05 --- /dev/null +++ b/plain-s3/LICENSE @@ -0,0 +1,28 @@ +BSD 3-Clause License + +Copyright (c) 2025, Dropseed, LLC + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plain-s3/README.md b/plain-s3/README.md new file mode 120000 index 0000000000..406db9dd45 --- /dev/null +++ b/plain-s3/README.md @@ -0,0 +1 @@ +plain/s3/README.md \ No newline at end of file diff --git a/plain-s3/plain/s3/README.md b/plain-s3/plain/s3/README.md new file mode 100644 index 0000000000..ab432b231e --- /dev/null +++ b/plain-s3/plain/s3/README.md @@ -0,0 +1,271 @@ +# plain.s3 + +**S3-compatible file storage for Plain models.** + +Store files in S3, Cloudflare R2, DigitalOcean Spaces, MinIO, or any S3-compatible storage. + +- [Overview](#overview) +- [Uploading files](#uploading-files) +- [Presigned uploads](#presigned-uploads) +- [Downloading files](#downloading-files) +- [Settings](#settings) +- [Installation](#installation) + +## Overview + +Add file uploads to your models with `S3FileField`: + +```python +from plain import models +from plain.models import types +from plain.s3.fields import S3FileField +from plain.s3.models import S3File + + +@models.register_model +class Document(models.Model): + title: str = types.CharField(max_length=200) + file: S3File | None = S3FileField() # Uses S3_BUCKET setting +``` + +Override the bucket or add path prefixes per-field: + +```python +@models.register_model +class User(models.Model): + name: str = types.CharField(max_length=100) + + # Public avatars with custom path prefix + avatar: S3File | None = S3FileField( + key_prefix="avatars/", + acl="public-read", + ) + + # Private documents in a different bucket + id_document: S3File | None = S3FileField( + bucket="private-docs", + key_prefix="id-verification/", + ) +``` + +Access file properties and generate download URLs: + +```python +doc = Document.query.get(id=some_id) + +doc.file.filename # "report.pdf" +doc.file.content_type # "application/pdf" +doc.file.byte_size # 1048576 +doc.file.size_display # "1.0 MB" +doc.file.presigned_download_url() # Presigned S3 URL +``` + +## Uploading files + +### Using a form + +Use `S3FileField` in your form to handle file uploads: + +```python +# app/documents/forms.py +from plain import forms +from plain.s3.forms import S3FileField + + +class DocumentForm(forms.Form): + title = forms.CharField() + file = S3FileField() # Uses S3_BUCKET setting +``` + +```python +# app/documents/views.py +from plain.views import FormView + +from .forms import DocumentForm +from .models import Document + + +class DocumentCreateView(FormView): + form_class = DocumentForm + template_name = "documents/create.html" + + def form_valid(self, form): + doc = Document.query.create( + title=form.cleaned_data["title"], + file=form.cleaned_data["file"], # S3File instance + ) + return redirect("documents:detail", doc.id) +``` + +### Direct upload in a view + +Upload files directly using the model field's `upload` method: + +```python +from plain.views import View + +from .models import Document + + +class DocumentUploadView(View): + def post(self): + uploaded_file = self.request.files["file"] + + # Get the field and use its configuration + file_field = Document._meta.get_field("file") + s3_file = file_field.upload(uploaded_file) + + doc = Document.query.create( + title=self.request.POST["title"], + file=s3_file, + ) + return {"id": doc.id} +``` + +Or upload directly via `S3File.upload()`: + +```python +from plain.s3.models import S3File + + +s3_file = S3File.upload(file=uploaded_file) +``` + +## Presigned uploads + +For large files, upload directly from the browser to S3 to avoid server load. + +**1. Create a view that returns presigned upload data:** + +```python +# app/documents/views.py +import json + +from plain.views import View +from plain.s3.models import S3File + +from .models import Document + + +class PresignUploadView(View): + def post(self): + data = json.loads(self.request.body) + + file_field = Document._meta.get_field("file") + return file_field.create_presigned_upload( + filename=data["filename"], + byte_size=data["byte_size"], + ) + # Returns: { + # "key": "abc123.pdf", + # "upload_url": "https://bucket.s3...", + # } + + +class DocumentCreateView(View): + def post(self): + data = json.loads(self.request.body) + file = S3File.query.get(key=data["key"]) + doc = Document.query.create( + title=data["title"], + file=file, + ) + return {"id": str(doc.id)} +``` + +**2. Upload from the browser:** + +```javascript +// Get presigned URL +const presign = await fetch('/documents/presign/', { + method: 'POST', + body: JSON.stringify({ + filename: file.name, + byte_size: file.size, + }), +}).then(r => r.json()); + +// Upload directly to S3 +await fetch(presign.upload_url, { + method: 'PUT', + body: file, + headers: { 'Content-Type': file.type }, +}); + +// Now attach to your record +await fetch('/documents/', { + method: 'POST', + body: JSON.stringify({ + title: 'My Document', + key: presign.key, + }), +}); +``` + +## Downloading files + +Generate presigned download URLs: + +```python +# Default expiration (1 hour), triggers download +url = doc.file.presigned_download_url() + +# Custom expiration (5 minutes) +url = doc.file.presigned_download_url(expires_in=300) + +# Display in browser instead of downloading (for images, PDFs, etc.) +url = doc.file.presigned_download_url(inline=True) +``` + +## Settings + +Configure your S3 connection in settings: + +```python +S3_ACCESS_KEY_ID = "..." +S3_SECRET_ACCESS_KEY = "..." +S3_BUCKET = "my-bucket" +S3_REGION = "us-east-1" +S3_ENDPOINT_URL = "" # For R2, MinIO, DigitalOcean Spaces, etc. +``` + +### Cloudflare R2 + +```python +S3_ACCESS_KEY_ID = "..." +S3_SECRET_ACCESS_KEY = "..." +S3_BUCKET = "my-bucket" +S3_REGION = "auto" +S3_ENDPOINT_URL = "https://ACCOUNT_ID.r2.cloudflarestorage.com" +``` + +### DigitalOcean Spaces + +```python +S3_ACCESS_KEY_ID = "..." +S3_SECRET_ACCESS_KEY = "..." +S3_BUCKET = "my-bucket" +S3_REGION = "nyc3" # Your Spaces region +S3_ENDPOINT_URL = "https://nyc3.digitaloceanspaces.com" +``` + +## Installation + +1. Add `plain.s3` to your `INSTALLED_PACKAGES`: + +```python +INSTALLED_PACKAGES = [ + # ... + "plain.s3", +] +``` + +2. Configure your S3 settings (see Settings above). + +3. Run migrations: + +```bash +plain migrate +``` + +4. Add `S3FileField` to your models. diff --git a/plain-s3/plain/s3/__init__.py b/plain-s3/plain/s3/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/plain-s3/plain/s3/admin.py b/plain-s3/plain/s3/admin.py new file mode 100644 index 0000000000..d40a1c6b18 --- /dev/null +++ b/plain-s3/plain/s3/admin.py @@ -0,0 +1,40 @@ +from __future__ import annotations + +from plain.admin.views import ( + AdminModelDetailView, + AdminModelListView, + AdminViewset, + register_viewset, +) + +from .models import S3File + + +@register_viewset +class S3FileViewset(AdminViewset): + class ListView(AdminModelListView): + nav_section = "S3" + model = S3File + title = "Files" + fields = [ + "id", + "filename", + "content_type", + "size_display", + "created_at", + ] + search_fields = [ + "filename", + "key", + ] + queryset_order = ["-created_at"] + actions = ["Delete"] + + def perform_action(self, action: str, target_ids: list) -> None: + if action == "Delete": + for file in S3File.query.filter(id__in=target_ids): + file.delete() # This also deletes from S3 + + class DetailView(AdminModelDetailView): + model = S3File + title = "File" diff --git a/plain-s3/plain/s3/config.py b/plain-s3/plain/s3/config.py new file mode 100644 index 0000000000..de00258bcb --- /dev/null +++ b/plain-s3/plain/s3/config.py @@ -0,0 +1,6 @@ +from plain.packages import PackageConfig, register_config + + +@register_config +class Config(PackageConfig): + package_label = "plains3" diff --git a/plain-s3/plain/s3/default_settings.py b/plain-s3/plain/s3/default_settings.py new file mode 100644 index 0000000000..f07237f508 --- /dev/null +++ b/plain-s3/plain/s3/default_settings.py @@ -0,0 +1,8 @@ +# S3 connection settings (global) +S3_ACCESS_KEY_ID: str +S3_SECRET_ACCESS_KEY: str +S3_REGION: str +S3_ENDPOINT_URL: str = "" # For R2, MinIO, DigitalOcean Spaces, etc. + +# Default upload settings +S3_BUCKET: str diff --git a/plain-s3/plain/s3/fields.py b/plain-s3/plain/s3/fields.py new file mode 100644 index 0000000000..6bfa1e5c58 --- /dev/null +++ b/plain-s3/plain/s3/fields.py @@ -0,0 +1,114 @@ +from __future__ import annotations + +from typing import Any + +from plain import models +from plain.models.fields.related import ForeignKeyField + +from .models import PresignedUpload + + +class S3FileField(ForeignKeyField): + """ + A ForeignKey field that links to an S3File with S3 configuration. + + Usage: + class Document(models.Model): + # Uses S3_BUCKET setting: + file: S3File | None = S3FileField() + + # With optional configuration: + avatar: S3File | None = S3FileField( + bucket="avatars-bucket", + key_prefix="users/", + acl="public-read", + ) + + By default, the field is optional (allow_null=True) and uses SET_NULL + on delete to avoid cascading deletes of your records when files are removed. + """ + + def __init__( + self, + bucket: str = "", + *, + key_prefix: str = "", + acl: str = "", + on_delete: Any = None, + **kwargs: Any, + ): + # Import here to avoid circular imports + from .models import S3File + + # Store S3 configuration + self.bucket = bucket + self.key_prefix = key_prefix + self.acl = acl + + # Set FK defaults + if on_delete is None: + on_delete = models.SET_NULL + kwargs.setdefault("allow_null", True) + kwargs.setdefault("required", False) + + super().__init__(S3File, on_delete=on_delete, **kwargs) + + def upload(self, file: Any) -> Any: + """ + Upload a file using this field's configuration. + + Returns the created S3File instance. + """ + from .models import S3File + + return S3File.upload( + bucket=self.bucket, + file=file, + key_prefix=self.key_prefix, + acl=self.acl, + ) + + def create_presigned_upload( + self, + *, + filename: str, + byte_size: int, + content_type: str | None = None, + ) -> PresignedUpload: + """Create a presigned upload using this field's configuration.""" + from .models import S3File + + return S3File.create_presigned_upload( + bucket=self.bucket, + filename=filename, + byte_size=byte_size, + content_type=content_type, + key_prefix=self.key_prefix, + acl=self.acl, + ) + + def formfield(self, **kwargs: Any) -> Any: + """Return an S3FileField form field for use in ModelForms.""" + from .forms import S3FileField as S3FileFormField + + return S3FileFormField( + bucket=self.bucket, + key_prefix=self.key_prefix, + acl=self.acl, + required=kwargs.pop("required", not self.allow_null), + **kwargs, + ) + + def deconstruct(self) -> tuple: + """Support migrations by including S3 configuration.""" + name, path, args, kwargs = super().deconstruct() + # Add our custom attributes + if self.bucket: + kwargs["bucket"] = self.bucket + if self.key_prefix: + kwargs["key_prefix"] = self.key_prefix + if self.acl: + kwargs["acl"] = self.acl + # Remove the 'to' argument since we always point to S3File + args = () + return name, path, args, kwargs diff --git a/plain-s3/plain/s3/forms.py b/plain-s3/plain/s3/forms.py new file mode 100644 index 0000000000..6b656b43d0 --- /dev/null +++ b/plain-s3/plain/s3/forms.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +from typing import Any + +from plain.forms.fields import FileField + + +class S3FileField(FileField): + """ + A form field that uploads files to S3. + + Usage in a form: + class DocumentForm(Form): + title = fields.CharField() + file = S3FileField() # Uses S3_BUCKET setting + + The cleaned value is an S3File instance (or None if no file uploaded). + """ + + def __init__( + self, + bucket: str = "", + *, + key_prefix: str = "", + acl: str = "", + **kwargs: Any, + ): + self.bucket = bucket + self.key_prefix = key_prefix + self.acl = acl + super().__init__(**kwargs) + + def clean(self, data: Any, initial: Any = None) -> Any: # type: ignore[override] + file = super().clean(data, initial) + + if file is None or file is False: + return file + + # Upload to S3 and return S3File instance + from .models import S3File + + return S3File.upload( + bucket=self.bucket, + file=file, + key_prefix=self.key_prefix, + acl=self.acl, + ) diff --git a/plain-s3/plain/s3/migrations/0001_initial.py b/plain-s3/plain/s3/migrations/0001_initial.py new file mode 100644 index 0000000000..1f92b3b1ca --- /dev/null +++ b/plain-s3/plain/s3/migrations/0001_initial.py @@ -0,0 +1,37 @@ +# Generated by Plain 0.94.0 on 2025-12-15 03:55 + +from plain import models +from plain.models import migrations + + +class Migration(migrations.Migration): + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="S3File", + fields=[ + ("id", models.PrimaryKeyField()), + ("bucket", models.CharField(max_length=255)), + ("key", models.CharField(max_length=500)), + ("filename", models.CharField(max_length=255)), + ("content_type", models.CharField(max_length=100)), + ("byte_size", models.PositiveBigIntegerField()), + ("created_at", models.DateTimeField(auto_now_add=True)), + ], + ), + migrations.AddIndex( + model_name="s3file", + index=models.Index( + fields=["created_at"], name="plains3_s3f_created_76240a_idx" + ), + ), + migrations.AddConstraint( + model_name="s3file", + constraint=models.UniqueConstraint( + fields=("key",), name="plains3_s3file_unique_key" + ), + ), + ] diff --git a/plain-s3/plain/s3/migrations/__init__.py b/plain-s3/plain/s3/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/plain-s3/plain/s3/models.py b/plain-s3/plain/s3/models.py new file mode 100644 index 0000000000..0cdbc5d058 --- /dev/null +++ b/plain-s3/plain/s3/models.py @@ -0,0 +1,231 @@ +from __future__ import annotations + +import mimetypes +from datetime import datetime +from typing import TYPE_CHECKING, Any, TypedDict +from uuid import uuid4 + +import boto3 + +from plain import models +from plain.models import types +from plain.runtime import settings + +if TYPE_CHECKING: + from types_boto3_s3 import S3Client + + +class PresignedUpload(TypedDict): + """Return type for create_presigned_upload.""" + + key: str + upload_url: str + + +@models.register_model +class S3File(models.Model): + """ + Represents a file stored in S3-compatible storage. + + This model stores metadata about files. The actual file content + is stored in S3. Link to this model using S3FileField() on your models. + """ + + query: models.QuerySet[S3File] = models.QuerySet() + + # S3 storage location + bucket: str = types.CharField(max_length=255) + key: str = types.CharField(max_length=500) + + # File metadata + filename: str = types.CharField(max_length=255) + content_type: str = types.CharField(max_length=100) + byte_size: int = types.PositiveBigIntegerField() + + created_at: datetime = types.DateTimeField(auto_now_add=True) + + model_options = models.Options( + indexes=[ + models.Index(fields=["created_at"]), + ], + constraints=[ + models.UniqueConstraint(fields=["key"], name="plains3_s3file_unique_key"), + ], + ) + + def __str__(self) -> str: + return self.filename + + @classmethod + def get_s3_client(cls) -> S3Client: + """Create an S3 client using settings.""" + kwargs: dict[str, Any] = { + "aws_access_key_id": settings.S3_ACCESS_KEY_ID, + "aws_secret_access_key": settings.S3_SECRET_ACCESS_KEY, + "region_name": settings.S3_REGION, + } + if settings.S3_ENDPOINT_URL: + kwargs["endpoint_url"] = settings.S3_ENDPOINT_URL + return boto3.client("s3", **kwargs) + + @classmethod + def _generate_key(cls, filename: str, *, key_prefix: str = "") -> str: + """Generate a unique S3 key for a new file.""" + ext = "" + if "." in filename: + ext = "." + filename.rsplit(".", 1)[-1].lower() + return f"{key_prefix}{uuid4()}{ext}" + + @classmethod + def upload( + cls, + *, + file: Any, + bucket: str = "", + key_prefix: str = "", + acl: str = "", + ) -> S3File: + """ + Upload a file to S3 and create the S3File record. + """ + bucket = bucket or settings.S3_BUCKET + filename = file.name + content_type = getattr(file, "content_type", None) + if content_type is None: + content_type, _ = mimetypes.guess_type(filename) + content_type = content_type or "application/octet-stream" + + key = cls._generate_key(filename, key_prefix=key_prefix) + body = file.read() + byte_size = len(body) + + # Upload to S3 + client = cls.get_s3_client() + put_kwargs = { + "Bucket": bucket, + "Key": key, + "Body": body, + "ContentType": content_type, + } + if acl: + put_kwargs["ACL"] = acl + client.put_object(**put_kwargs) + + # Create the database record + return cls.query.create( + bucket=bucket, + key=key, + filename=filename, + content_type=content_type, + byte_size=byte_size, + ) + + @classmethod + def create_presigned_upload( + cls, + *, + filename: str, + byte_size: int, + bucket: str = "", + content_type: str | None = None, + key_prefix: str = "", + acl: str = "", + ) -> PresignedUpload: + """ + Create a new S3File record and return presigned upload data. + + The file record is created immediately but the file isn't uploaded yet. + After the client uploads directly to S3, the file will be available. + """ + bucket = bucket or settings.S3_BUCKET + + if content_type is None: + content_type, _ = mimetypes.guess_type(filename) + content_type = content_type or "application/octet-stream" + + key = cls._generate_key(filename, key_prefix=key_prefix) + + # Create the file record + cls.query.create( + bucket=bucket, + key=key, + filename=filename, + content_type=content_type, + byte_size=byte_size, + ) + + client = cls.get_s3_client() + + params: dict[str, Any] = { + "Bucket": bucket, + "Key": key, + "ContentType": content_type, + } + if acl: + params["ACL"] = acl + + upload_url = client.generate_presigned_url( + "put_object", + Params=params, + ExpiresIn=3600, + ) + + return PresignedUpload( + key=key, + upload_url=upload_url, + ) + + def presigned_download_url( + self, *, expires_in: int = 3600, inline: bool = False + ) -> str: + """Generate a presigned URL for downloading this file. + + Use inline=True to display in browser (for images, PDFs, etc.) + instead of triggering a download. + """ + client = self.get_s3_client() + disposition = "inline" if inline else "attachment" + params = { + "Bucket": self.bucket, + "Key": self.key, + "ResponseContentDisposition": f'{disposition}; filename="{self.filename}"', + } + return client.generate_presigned_url( + "get_object", + Params=params, + ExpiresIn=expires_in, + ) + + def exists_in_storage(self) -> bool: + """Check if the file actually exists in S3.""" + client = self.get_s3_client() + try: + client.head_object(Bucket=self.bucket, Key=self.key) + return True + except client.exceptions.ClientError as e: + if e.response["Error"]["Code"] == "404": + return False + raise + + def delete(self) -> tuple[int, dict[str, int]]: + """Delete the file from S3 and the database record.""" + client = self.get_s3_client() + client.delete_object(Bucket=self.bucket, Key=self.key) + return super().delete() + + @property + def extension(self) -> str: + """Get the file extension (lowercase, without dot).""" + if "." in self.filename: + return self.filename.rsplit(".", 1)[-1].lower() + return "" + + @property + def size_display(self) -> str: + """Human-readable file size.""" + size = self.byte_size + for unit in ["B", "KB", "MB", "GB", "TB"]: + if size < 1024: + return f"{size:.1f} {unit}" if unit != "B" else f"{size} {unit}" + size /= 1024 + return f"{size:.1f} PB" diff --git a/plain-s3/pyproject.toml b/plain-s3/pyproject.toml new file mode 100644 index 0000000000..55d9215f53 --- /dev/null +++ b/plain-s3/pyproject.toml @@ -0,0 +1,26 @@ +[project] +name = "plain.s3" +version = "0.1.0" +description = "S3-compatible file storage for Plain models." +authors = [{name = "Dave Gaeddert", email = "dave.gaeddert@dropseed.dev"}] +readme = "README.md" +license = "BSD-3-Clause" +requires-python = ">=3.13" +dependencies = [ + "plain<1.0.0", + "plain.models<1.0.0", + "boto3>=1.35.0", +] + +[dependency-groups] +dev = [ + "plain.pytest<1.0.0", + "types-boto3[s3]>=1.35.0", +] + +[tool.hatch.build.targets.wheel] +packages = ["plain"] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" diff --git a/plain-s3/tests/app/settings.py b/plain-s3/tests/app/settings.py new file mode 100644 index 0000000000..a43f59ad78 --- /dev/null +++ b/plain-s3/tests/app/settings.py @@ -0,0 +1,13 @@ +SECRET_KEY = "test" +URLS_ROUTER = "app.urls.AppRouter" +INSTALLED_PACKAGES = [ + "plain.models", + "plain.s3", +] + +# S3 settings (will be mocked in tests) +S3_ACCESS_KEY_ID = "test-key" +S3_SECRET_ACCESS_KEY = "test-secret" +S3_REGION = "us-east-1" +S3_BUCKET = "test-bucket" +S3_ENDPOINT_URL = "" diff --git a/plain-s3/tests/app/urls.py b/plain-s3/tests/app/urls.py new file mode 100644 index 0000000000..49af284112 --- /dev/null +++ b/plain-s3/tests/app/urls.py @@ -0,0 +1,6 @@ +from plain.urls import Router + + +class AppRouter(Router): + namespace = "" + urls = [] diff --git a/plain-s3/tests/test_s3.py b/plain-s3/tests/test_s3.py new file mode 100644 index 0000000000..f7030f03e0 --- /dev/null +++ b/plain-s3/tests/test_s3.py @@ -0,0 +1,234 @@ +from io import BytesIO +from unittest.mock import MagicMock, patch + +import pytest + +from plain.s3.models import S3File + + +@pytest.fixture +def mock_s3_client(): + """Mock the boto3 S3 client.""" + with patch("plain.s3.models.boto3.client") as mock_client: + client = MagicMock() + mock_client.return_value = client + yield client + + +@pytest.fixture +def s3_file(db): + """Create a test S3File record.""" + return S3File.query.create( + bucket="test-bucket", + key="abc123.pdf", + filename="document.pdf", + content_type="application/pdf", + byte_size=1024, + ) + + +class TestGenerateKey: + def test_generates_unique_keys(self): + key1 = S3File._generate_key("test.pdf") + key2 = S3File._generate_key("test.pdf") + assert key1 != key2 + + def test_preserves_extension(self): + key = S3File._generate_key("document.pdf") + assert key.endswith(".pdf") + + def test_lowercases_extension(self): + key = S3File._generate_key("document.PDF") + assert key.endswith(".pdf") + + def test_handles_no_extension(self): + key = S3File._generate_key("README") + assert "." not in key + + def test_applies_key_prefix(self): + key = S3File._generate_key("test.pdf", key_prefix="uploads/") + assert key.startswith("uploads/") + assert key.endswith(".pdf") + + +class TestUpload: + def test_uploads_file_to_s3(self, db, mock_s3_client): + file = BytesIO(b"test content") + file.name = "test.txt" + + S3File.upload(file=file) + + mock_s3_client.put_object.assert_called_once() + call_kwargs = mock_s3_client.put_object.call_args.kwargs + assert call_kwargs["Bucket"] == "test-bucket" + assert call_kwargs["Body"] == b"test content" + assert call_kwargs["ContentType"] == "text/plain" + + def test_creates_database_record(self, db, mock_s3_client): + file = BytesIO(b"test content") + file.name = "test.txt" + + s3_file = S3File.upload(file=file) + + assert s3_file.id is not None + assert s3_file.filename == "test.txt" + assert s3_file.byte_size == 12 + assert s3_file.content_type == "text/plain" + + def test_uses_custom_bucket(self, db, mock_s3_client): + file = BytesIO(b"test") + file.name = "test.txt" + + s3_file = S3File.upload(file=file, bucket="custom-bucket") + + call_kwargs = mock_s3_client.put_object.call_args.kwargs + assert call_kwargs["Bucket"] == "custom-bucket" + assert s3_file.bucket == "custom-bucket" + + def test_applies_acl(self, db, mock_s3_client): + file = BytesIO(b"test") + file.name = "test.txt" + + S3File.upload(file=file, acl="public-read") + + call_kwargs = mock_s3_client.put_object.call_args.kwargs + assert call_kwargs["ACL"] == "public-read" + + +class TestCreatePresignedUpload: + def test_creates_presigned_url(self, db, mock_s3_client): + mock_s3_client.generate_presigned_url.return_value = ( + "https://s3.example.com/presigned-url" + ) + + result = S3File.create_presigned_upload( + filename="document.pdf", + byte_size=1024, + ) + + assert "key" in result + assert result["upload_url"] == "https://s3.example.com/presigned-url" + mock_s3_client.generate_presigned_url.assert_called_once() + + def test_creates_database_record(self, db, mock_s3_client): + mock_s3_client.generate_presigned_url.return_value = ( + "https://s3.example.com/presigned-url" + ) + + result = S3File.create_presigned_upload( + filename="document.pdf", + byte_size=1024, + ) + + s3_file = S3File.query.get(key=result["key"]) + assert s3_file.filename == "document.pdf" + assert s3_file.byte_size == 1024 + + +class TestPresignedDownloadUrl: + def test_generates_download_url(self, s3_file, mock_s3_client): + mock_s3_client.generate_presigned_url.return_value = "https://signed-url" + + url = s3_file.presigned_download_url() + + assert url == "https://signed-url" + call_args = mock_s3_client.generate_presigned_url.call_args + params = call_args.kwargs["Params"] + assert ( + 'attachment; filename="document.pdf"' + in params["ResponseContentDisposition"] + ) + + def test_inline_disposition(self, s3_file, mock_s3_client): + mock_s3_client.generate_presigned_url.return_value = "https://signed-url" + + s3_file.presigned_download_url(inline=True) + + call_args = mock_s3_client.generate_presigned_url.call_args + params = call_args.kwargs["Params"] + assert 'inline; filename="document.pdf"' in params["ResponseContentDisposition"] + + def test_custom_expiration(self, s3_file, mock_s3_client): + mock_s3_client.generate_presigned_url.return_value = "https://signed-url" + + s3_file.presigned_download_url(expires_in=7200) + + call_args = mock_s3_client.generate_presigned_url.call_args + assert call_args.kwargs["ExpiresIn"] == 7200 + + +class TestExistsInStorage: + def test_returns_true_when_exists(self, s3_file, mock_s3_client): + mock_s3_client.head_object.return_value = {} + + assert s3_file.exists_in_storage() is True + + def test_returns_false_when_not_exists(self, s3_file, mock_s3_client): + error = MagicMock() + error.response = {"Error": {"Code": "404"}} + mock_s3_client.head_object.side_effect = mock_s3_client.exceptions.ClientError( + error.response, "HeadObject" + ) + mock_s3_client.exceptions.ClientError = type( + "ClientError", + (Exception,), + {"response": property(lambda self: error.response)}, + ) + + # Re-raise the mock exception + mock_s3_client.head_object.side_effect = mock_s3_client.exceptions.ClientError( + error.response, "HeadObject" + ) + + assert s3_file.exists_in_storage() is False + + +class TestDelete: + def test_deletes_from_s3_and_database(self, s3_file, mock_s3_client): + file_id = s3_file.id + + s3_file.delete() + + mock_s3_client.delete_object.assert_called_once_with( + Bucket="test-bucket", + Key="abc123.pdf", + ) + assert S3File.query.filter(id=file_id).count() == 0 + + +class TestProperties: + def test_extension(self, s3_file): + assert s3_file.extension == "pdf" + + def test_extension_no_dot(self, db): + file = S3File.query.create( + bucket="test", + key="abc", + filename="README", + content_type="text/plain", + byte_size=100, + ) + assert file.extension == "" + + def test_size_display_bytes(self, db): + file = S3File.query.create( + bucket="test", + key="abc", + filename="test", + content_type="text/plain", + byte_size=500, + ) + assert file.size_display == "500 B" + + def test_size_display_kb(self, db): + file = S3File.query.create( + bucket="test", + key="abc", + filename="test", + content_type="text/plain", + byte_size=2048, + ) + assert file.size_display == "2.0 KB" + + def test_str(self, s3_file): + assert str(s3_file) == "document.pdf" diff --git a/pyproject.toml b/pyproject.toml index 476713bc22..d121d1f272 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,7 @@ dev = [ "plain-toolbar", "plain-tunnel", "plain-vendor", + "plain-s3", # Type checking and better dev experience "ty>=0.0.1a33", "psycopg[binary]>=3.2.12", @@ -70,6 +71,7 @@ dev = [ "plain-toolbar" = { workspace = true } "plain-tunnel" = { workspace = true } "plain-vendor" = { workspace = true } +"plain-s3" = { workspace = true } [tool.uv.workspace] members = [ diff --git a/scripts/test b/scripts/test index 2c80c93d40..412c7ad2f4 100755 --- a/scripts/test +++ b/scripts/test @@ -22,6 +22,7 @@ plain-auth plain-api plain-elements plain-htmx +plain-s3 EOF )" diff --git a/scripts/type-validate b/scripts/type-validate index adbff32f5c..88d49c1365 100755 --- a/scripts/type-validate +++ b/scripts/type-validate @@ -45,6 +45,7 @@ FULLY_TYPED_PATHS = [ "plain-passwords", "plain-pytest", "plain-redirection", + "plain-s3", "plain-sessions", "plain-observer", "plain-support", diff --git a/uv.lock b/uv.lock index a1f0eb889a..a4bc85c891 100644 --- a/uv.lock +++ b/uv.lock @@ -27,6 +27,7 @@ members = [ "plain-passwords", "plain-pytest", "plain-redirection", + "plain-s3", "plain-scan", "plain-sessions", "plain-start", @@ -51,6 +52,46 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" }, ] +[[package]] +name = "boto3" +version = "1.42.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, + { name = "jmespath" }, + { name = "s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8c/07/dfa651dbd57bfc34d952a101280928bab08ed6186f009c660a36c211ccff/boto3-1.42.9.tar.gz", hash = "sha256:cdd4cc3e5bb08ed8a0c5cc77eca78f98f0239521de0991f14e44b788b0c639b2", size = 112827, upload-time = "2025-12-12T20:33:20.236Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/eb/97fdf6fbc8066fb1475b8ef260c1a58798b2b4f1e8839b501550de5d5ba1/boto3-1.42.9-py3-none-any.whl", hash = "sha256:d21d22af9aeb1bad8e9b670a221d6534c0120f7e7baf523dafaca83f1f5c3f90", size = 140561, upload-time = "2025-12-12T20:33:18.035Z" }, +] + +[[package]] +name = "botocore" +version = "1.42.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fd/f3/2d2cfb500e2dc00b0e33e3c8743306e6330f3cf219d19e9260dab2f3d6c2/botocore-1.42.9.tar.gz", hash = "sha256:74f69bfd116cc7c8215481284957eecdb48580e071dd50cb8c64356a866abd8c", size = 14861916, upload-time = "2025-12-12T20:33:08.017Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/2a/e9275f40042f7a09915c4be86b092cb02dc4bd74e77ab8864f485d998af1/botocore-1.42.9-py3-none-any.whl", hash = "sha256:f99ba2ca34e24c4ebec150376c815646970753c032eb84f230874b2975a185a8", size = 14537810, upload-time = "2025-12-12T20:33:04.069Z" }, +] + +[[package]] +name = "botocore-stubs" +version = "1.42.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "types-awscrt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/98/4ce007813d13ff107b8c5a39a85916f53bd58923dbb358882a6031d8f53a/botocore_stubs-1.42.9.tar.gz", hash = "sha256:92fdd2a1d911355166da3e30e9bb9b1803f7e2caec0d913f5fad3a920352ce6d", size = 42413, upload-time = "2025-12-12T21:24:37.054Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/9f/fbed79fa17af56cea7543b70d3f69c11426575a38b3421666411becefeef/botocore_stubs-1.42.9-py3-none-any.whl", hash = "sha256:9f8b652549d4f727aa69e09d462d18e54a1bd10f3dbb593da56d5d0aafe9756e", size = 66748, upload-time = "2025-12-12T21:24:35.832Z" }, +] + [[package]] name = "certifi" version = "2025.6.15" @@ -187,6 +228,7 @@ dependencies = [ { name = "plain-passwords" }, { name = "plain-pytest" }, { name = "plain-redirection" }, + { name = "plain-s3" }, { name = "plain-scan" }, { name = "plain-sessions" }, { name = "plain-support" }, @@ -220,6 +262,7 @@ requires-dist = [ { name = "plain-passwords", editable = "plain-passwords" }, { name = "plain-pytest", editable = "plain-pytest" }, { name = "plain-redirection", editable = "plain-redirection" }, + { name = "plain-s3", editable = "plain-s3" }, { name = "plain-scan", editable = "plain-scan" }, { name = "plain-sessions", editable = "plain-sessions" }, { name = "plain-support", editable = "plain-support" }, @@ -280,6 +323,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, ] +[[package]] +name = "jmespath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" }, +] + [[package]] name = "markdown-it-py" version = "3.0.0" @@ -837,6 +889,35 @@ requires-dist = [ { name = "plain-models", editable = "plain-models" }, ] +[[package]] +name = "plain-s3" +version = "0.1.0" +source = { editable = "plain-s3" } +dependencies = [ + { name = "boto3" }, + { name = "plain" }, + { name = "plain-models" }, +] + +[package.dev-dependencies] +dev = [ + { name = "plain-pytest" }, + { name = "types-boto3", extra = ["s3"] }, +] + +[package.metadata] +requires-dist = [ + { name = "boto3", specifier = ">=1.35.0" }, + { name = "plain", editable = "plain" }, + { name = "plain-models", editable = "plain-models" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "plain-pytest", editable = "plain-pytest" }, + { name = "types-boto3", extras = ["s3"], specifier = ">=1.35.0" }, +] + [[package]] name = "plain-scan" version = "0.5.0" @@ -1005,6 +1086,7 @@ dev = [ { name = "plain-passwords" }, { name = "plain-pytest" }, { name = "plain-redirection" }, + { name = "plain-s3" }, { name = "plain-scan" }, { name = "plain-sessions" }, { name = "plain-support" }, @@ -1043,6 +1125,7 @@ dev = [ { name = "plain-passwords", editable = "plain-passwords" }, { name = "plain-pytest", editable = "plain-pytest" }, { name = "plain-redirection", editable = "plain-redirection" }, + { name = "plain-s3", editable = "plain-s3" }, { name = "plain-scan", editable = "plain-scan" }, { name = "plain-sessions", editable = "plain-sessions" }, { name = "plain-support", editable = "plain-support" }, @@ -1139,6 +1222,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" }, ] +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + [[package]] name = "python-dotenv" version = "1.0.1" @@ -1230,6 +1325,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d0/33/4d3e79e4a84533d6cd526bfb42c020a23256ae5e4265d858bd1287831f7d/ruff-0.12.0-py3-none-win_arm64.whl", hash = "sha256:8cd24580405ad8c1cc64d61725bca091d6b6da7eb3d36f72cc605467069d7e8b", size = 10724946, upload-time = "2025-06-17T15:19:23.952Z" }, ] +[[package]] +name = "s3transfer" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/05/04/74127fc843314818edfa81b5540e26dd537353b123a4edc563109d8f17dd/s3transfer-0.16.0.tar.gz", hash = "sha256:8e990f13268025792229cd52fa10cb7163744bf56e719e0b9cb925ab79abf920", size = 153827, upload-time = "2025-12-01T02:30:59.114Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/51/727abb13f44c1fcf6d145979e1535a35794db0f6e450a0cb46aa24732fe2/s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:18e25d66fed509e3868dc1572b3f427ff947dd2c56f844a5bf09481ad3f3b2fe", size = 86830, upload-time = "2025-12-01T02:30:57.729Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + [[package]] name = "sniffio" version = "1.3.1" @@ -1282,6 +1398,51 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2f/fc/1825f1f8c77d4d8fe75543882d9ad5934e568aa807e1a4cb7e999f701750/ty-0.0.1a33-py3-none-win_arm64.whl", hash = "sha256:d9937e9ddc7b383c6b1ab3065982fb2b8d0a2884ae5bd7b542e4208a807e326e", size = 9471473, upload-time = "2025-12-09T22:35:12.105Z" }, ] +[[package]] +name = "types-awscrt" +version = "0.30.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/30/1f/febd2df22e24f77b759db0dd9ecdd7f07f055e6a4dbbb699c5eb34b617ef/types_awscrt-0.30.0.tar.gz", hash = "sha256:362fd8f5eaebcfcd922cb9fd8274fb375df550319f78031ee3779eac0b9ecc79", size = 17761, upload-time = "2025-12-12T01:55:59.626Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5b/5f/15999051fca2949a67562c3f80fae2dd5d3404a3f97b326b614533843281/types_awscrt-0.30.0-py3-none-any.whl", hash = "sha256:8204126e01a00eaa4a746e7a0076538ca0e4e3f52408adec0ab9b471bb0bb64b", size = 42392, upload-time = "2025-12-12T01:55:58.194Z" }, +] + +[[package]] +name = "types-boto3" +version = "1.42.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore-stubs" }, + { name = "types-s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0c/8f/f079336d4058e28f124f0b0d7aacbc4a9d78cc69f6284395c525630007ef/types_boto3-1.42.9.tar.gz", hash = "sha256:48714b22b411eec0996be0836370732605ac5d78085afe79c3a0123ea9cd30f7", size = 101245, upload-time = "2025-12-12T20:36:35.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/fb/41f0649754f3916a5959dbec5ff3559b95a4f8f9c830569635945164f144/types_boto3-1.42.9-py3-none-any.whl", hash = "sha256:057595465ccc6275a1f1ddd8f2c031f490229f0f8c9d65c11fb49dfbac85b35f", size = 69666, upload-time = "2025-12-12T20:36:29.96Z" }, +] + +[package.optional-dependencies] +s3 = [ + { name = "types-boto3-s3" }, +] + +[[package]] +name = "types-boto3-s3" +version = "1.42.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/78/20/f436a130ec7c959abebea7164a4dc4780819dfb80b2e451511d2007885e5/types_boto3_s3-1.42.3.tar.gz", hash = "sha256:9c7810300b9944d4c00cb99bfb8685c300c93d75c00273ecb340c91bd6f426b4", size = 75905, upload-time = "2025-12-04T21:11:18.675Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/99/3aff9dc31c8264a693433a7229f892d2f8c5d27ae8d9d1dccedfb25cce4d/types_boto3_s3-1.42.3-py3-none-any.whl", hash = "sha256:a685a9808d158ca1a3642898062133532b0be064e372c27926d1a73a5533f47e", size = 82963, upload-time = "2025-12-04T21:11:16.51Z" }, +] + +[[package]] +name = "types-s3transfer" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/64/42689150509eb3e6e82b33ee3d89045de1592488842ddf23c56957786d05/types_s3transfer-0.16.0.tar.gz", hash = "sha256:b4636472024c5e2b62278c5b759661efeb52a81851cde5f092f24100b1ecb443", size = 13557, upload-time = "2025-12-08T08:13:09.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/27/e88220fe6274eccd3bdf95d9382918716d312f6f6cef6a46332d1ee2feff/types_s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:1c0cd111ecf6e21437cb410f5cddb631bfb2263b77ad973e79b9c6d0cb24e0ef", size = 19247, upload-time = "2025-12-08T08:13:08.426Z" }, +] + [[package]] name = "typing-extensions" version = "4.14.0"