From 5690fe89b80f177ac024676304f64a0c263667f4 Mon Sep 17 00:00:00 2001 From: Claude Date: Sat, 13 Dec 2025 23:49:29 +0000 Subject: [PATCH 01/11] Add plain-s3 package for S3-compatible file storage Initial implementation of plain.s3 with: - S3File model for storing file metadata (key, filename, content_type, byte_size) - S3FileField for linking files to user models via ForeignKey - Presigned URL generation for direct browser-to-S3 uploads - Presigned download URLs with configurable expiration - boto3-based storage client supporting S3, Cloudflare R2, MinIO - Admin viewset for managing files - Settings for bucket, credentials, endpoint URL, ACL, and upload prefix --- example/app/settings.py | 6 + example/pyproject.toml | 1 + plain-s3/README.md | 1 + plain-s3/plain/s3/README.md | 169 +++++++++++++++++++ plain-s3/plain/s3/__init__.py | 3 + plain-s3/plain/s3/admin.py | 41 +++++ plain-s3/plain/s3/config.py | 6 + plain-s3/plain/s3/default_settings.py | 8 + plain-s3/plain/s3/fields.py | 25 +++ plain-s3/plain/s3/migrations/0001_initial.py | 48 ++++++ plain-s3/plain/s3/migrations/__init__.py | 0 plain-s3/plain/s3/models.py | 157 +++++++++++++++++ plain-s3/plain/s3/storage.py | 116 +++++++++++++ plain-s3/pyproject.toml | 25 +++ pyproject.toml | 2 + uv.lock | 100 +++++++++++ 16 files changed, 708 insertions(+) create mode 120000 plain-s3/README.md create mode 100644 plain-s3/plain/s3/README.md create mode 100644 plain-s3/plain/s3/__init__.py create mode 100644 plain-s3/plain/s3/admin.py create mode 100644 plain-s3/plain/s3/config.py create mode 100644 plain-s3/plain/s3/default_settings.py create mode 100644 plain-s3/plain/s3/fields.py create mode 100644 plain-s3/plain/s3/migrations/0001_initial.py create mode 100644 plain-s3/plain/s3/migrations/__init__.py create mode 100644 plain-s3/plain/s3/models.py create mode 100644 plain-s3/plain/s3/storage.py create mode 100644 plain-s3/pyproject.toml diff --git a/example/app/settings.py b/example/app/settings.py index af26c18fb1..2e94357c79 100644 --- a/example/app/settings.py +++ b/example/app/settings.py @@ -24,6 +24,7 @@ "plain.toolbar", "plain.redirection", "plain.observer", + "plain.s3", "app.users", ] @@ -56,3 +57,8 @@ AUTH_LOGIN_URL = "login" AUTH_USER_MODEL = "users.User" + +# S3 settings (configure for your storage provider) +S3_BUCKET = "" +S3_ACCESS_KEY_ID = "" +S3_SECRET_ACCESS_KEY = "" diff --git a/example/pyproject.toml b/example/pyproject.toml index d3de24fb16..ce47c37422 100644 --- a/example/pyproject.toml +++ b/example/pyproject.toml @@ -33,6 +33,7 @@ dependencies = [ "plain-tunnel", "plain-vendor", "plain-observer", + "plain-s3", ] [tool.plain.tailwind] diff --git a/plain-s3/README.md b/plain-s3/README.md new file mode 120000 index 0000000000..406db9dd45 --- /dev/null +++ b/plain-s3/README.md @@ -0,0 +1 @@ +plain/s3/README.md \ No newline at end of file diff --git a/plain-s3/plain/s3/README.md b/plain-s3/plain/s3/README.md new file mode 100644 index 0000000000..b6ad0a87fd --- /dev/null +++ b/plain-s3/plain/s3/README.md @@ -0,0 +1,169 @@ +# plain.s3 + +**S3-compatible file storage for Plain models.** + +Store files in S3, Cloudflare R2, MinIO, or any S3-compatible storage service. Designed for direct browser uploads using presigned URLs. + +- [Overview](#overview) +- [Direct uploads](#direct-uploads) +- [Downloading files](#downloading-files) +- [Settings](#settings) +- [Installation](#installation) + +## Overview + +Add file uploads to your models with `S3FileField`: + +```python +from plain import models +from plain.models import types +from plain.s3.fields import S3FileField +from plain.s3.models import S3File + + +@models.register_model +class Document(models.Model): + title: str = types.CharField(max_length=200) + file: S3File | None = S3FileField() +``` + +Access file properties and generate download URLs: + +```python +doc = Document.query.get(id=some_id) + +doc.file.filename # "report.pdf" +doc.file.content_type # "application/pdf" +doc.file.byte_size # 1048576 +doc.file.size_display # "1.0 MB" +doc.file.download_url() # presigned S3 URL +``` + +## Direct uploads + +For large files, upload directly from the browser to S3 to avoid tying up your server. + +**1. Create a presigned upload from your backend:** + +```python +from plain.api import api +from plain.s3.models import S3File + + +@api.route("/uploads/presign", method="POST") +def create_presign(request): + data = S3File.create_presigned_upload( + filename=request.data["filename"], + content_type=request.data["content_type"], + byte_size=request.data["byte_size"], + ) + return data + # Returns: { + # "file_id": "uuid...", + # "upload_url": "https://bucket.s3...", + # "upload_fields": {"key": "...", "policy": "...", ...}, + # } +``` + +**2. Upload from the browser:** + +```javascript +// Get presigned URL +const presign = await fetch('/uploads/presign', { + method: 'POST', + body: JSON.stringify({ + filename: file.name, + content_type: file.type, + byte_size: file.size, + }), +}).then(r => r.json()); + +// Upload directly to S3 +const formData = new FormData(); +Object.entries(presign.upload_fields).forEach(([k, v]) => formData.append(k, v)); +formData.append('file', file); + +await fetch(presign.upload_url, { method: 'POST', body: formData }); + +// Now attach to your record +await fetch('/documents', { + method: 'POST', + body: JSON.stringify({ + title: 'My Document', + file_id: presign.file_id, + }), +}); +``` + +**3. Link the file to your record:** + +```python +@api.route("/documents", method="POST") +def create_document(request): + file = S3File.query.get(uuid=request.data["file_id"]) + doc = Document.query.create( + title=request.data["title"], + file=file, + ) + return {"id": str(doc.id)} +``` + +## Downloading files + +Generate presigned download URLs: + +```python +# Default expiration (1 hour) +url = doc.file.download_url() + +# Custom expiration +url = doc.file.download_url(expires_in=300) # 5 minutes +``` + +## Settings + +Configure your S3 connection in settings: + +```python +# Required +S3_BUCKET = "my-bucket" +S3_ACCESS_KEY_ID = "..." +S3_SECRET_ACCESS_KEY = "..." + +# Optional +S3_REGION = "us-east-1" +S3_ENDPOINT_URL = "https://..." # For R2, MinIO, etc. +S3_DEFAULT_ACL = "private" # or "public-read" +S3_PRESIGNED_URL_EXPIRATION = 3600 # seconds +S3_UPLOAD_KEY_PREFIX = "uploads/" +``` + +**Cloudflare R2 example:** + +```python +S3_BUCKET = "my-bucket" +S3_ACCESS_KEY_ID = "..." +S3_SECRET_ACCESS_KEY = "..." +S3_ENDPOINT_URL = "https://ACCOUNT_ID.r2.cloudflarestorage.com" +``` + +## Installation + +1. Add `plain.s3` to your `INSTALLED_PACKAGES`: + +```python +INSTALLED_PACKAGES = [ + # ... + "plain.s3", +] +``` + +2. Configure your S3 settings (see above). + +3. Run migrations: + +```bash +plain migrate +``` + +4. Add `S3FileField` to your models where needed. diff --git a/plain-s3/plain/s3/__init__.py b/plain-s3/plain/s3/__init__.py new file mode 100644 index 0000000000..8fe9e05456 --- /dev/null +++ b/plain-s3/plain/s3/__init__.py @@ -0,0 +1,3 @@ +# Models and fields should be imported from submodules: +# from plain.s3.models import S3File +# from plain.s3.fields import S3FileField diff --git a/plain-s3/plain/s3/admin.py b/plain-s3/plain/s3/admin.py new file mode 100644 index 0000000000..979b62bc6a --- /dev/null +++ b/plain-s3/plain/s3/admin.py @@ -0,0 +1,41 @@ +from __future__ import annotations + +from plain.admin.views import ( + AdminModelDetailView, + AdminModelListView, + AdminViewset, + register_viewset, +) + +from .models import S3File + + +@register_viewset +class S3FileViewset(AdminViewset): + class ListView(AdminModelListView): + nav_section = "S3" + model = S3File + title = "Files" + fields = [ + "id", + "filename", + "content_type", + "size_display", + "created_at", + ] + search_fields = [ + "uuid", + "filename", + "key", + ] + queryset_order = ["-created_at"] + actions = ["Delete"] + + def perform_action(self, action: str, target_ids: list) -> None: + if action == "Delete": + for file in S3File.query.filter(id__in=target_ids): + file.delete() # This also deletes from S3 + + class DetailView(AdminModelDetailView): + model = S3File + title = "File" diff --git a/plain-s3/plain/s3/config.py b/plain-s3/plain/s3/config.py new file mode 100644 index 0000000000..de00258bcb --- /dev/null +++ b/plain-s3/plain/s3/config.py @@ -0,0 +1,6 @@ +from plain.packages import PackageConfig, register_config + + +@register_config +class Config(PackageConfig): + package_label = "plains3" diff --git a/plain-s3/plain/s3/default_settings.py b/plain-s3/plain/s3/default_settings.py new file mode 100644 index 0000000000..68bf400632 --- /dev/null +++ b/plain-s3/plain/s3/default_settings.py @@ -0,0 +1,8 @@ +S3_BUCKET: str = "" +S3_REGION: str = "" +S3_ACCESS_KEY_ID: str = "" +S3_SECRET_ACCESS_KEY: str = "" +S3_ENDPOINT_URL: str = "" # For R2, MinIO, etc. +S3_DEFAULT_ACL: str = "" # e.g., "private" or "public-read" +S3_PRESIGNED_URL_EXPIRATION: int = 3600 # 1 hour +S3_UPLOAD_KEY_PREFIX: str = "uploads/" diff --git a/plain-s3/plain/s3/fields.py b/plain-s3/plain/s3/fields.py new file mode 100644 index 0000000000..6978f4f3ee --- /dev/null +++ b/plain-s3/plain/s3/fields.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +from plain import models +from plain.models import types + + +def S3FileField(**kwargs) -> types.ForeignKeyField: + """ + A ForeignKey field that links to an S3File. + + Usage: + class Document(models.Model): + file: S3File | None = S3FileField() + + By default, the field is optional (allow_null=True) and uses SET_NULL + on delete to avoid cascading deletes of your records when files are removed. + """ + # Import here to avoid circular imports + from .models import S3File + + kwargs.setdefault("on_delete", models.SET_NULL) + kwargs.setdefault("allow_null", True) + kwargs.setdefault("required", False) + + return types.ForeignKeyField(S3File, **kwargs) diff --git a/plain-s3/plain/s3/migrations/0001_initial.py b/plain-s3/plain/s3/migrations/0001_initial.py new file mode 100644 index 0000000000..e1211618ea --- /dev/null +++ b/plain-s3/plain/s3/migrations/0001_initial.py @@ -0,0 +1,48 @@ +# Generated by Plain 0.94.0 on 2025-12-13 23:47 + +import uuid +from plain import models +from plain.models import migrations + + +class Migration(migrations.Migration): + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="S3File", + fields=[ + ("id", models.PrimaryKeyField()), + ("uuid", models.UUIDField(default=uuid.uuid4)), + ("key", models.CharField(max_length=500)), + ("filename", models.CharField(max_length=255)), + ("content_type", models.CharField(max_length=100)), + ("byte_size", models.PositiveBigIntegerField()), + ("checksum", models.CharField(max_length=64, required=False)), + ("metadata", models.JSONField(default=dict)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ], + ), + migrations.AddIndex( + model_name="s3file", + index=models.Index(fields=["uuid"], name="plains3_s3f_uuid_f52e97_idx"), + ), + migrations.AddIndex( + model_name="s3file", + index=models.Index(fields=["key"], name="plains3_s3f_key_a488eb_idx"), + ), + migrations.AddIndex( + model_name="s3file", + index=models.Index( + fields=["created_at"], name="plains3_s3f_created_76240a_idx" + ), + ), + migrations.AddConstraint( + model_name="s3file", + constraint=models.UniqueConstraint( + fields=("uuid",), name="plains3_s3file_unique_uuid" + ), + ), + ] diff --git a/plain-s3/plain/s3/migrations/__init__.py b/plain-s3/plain/s3/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/plain-s3/plain/s3/models.py b/plain-s3/plain/s3/models.py new file mode 100644 index 0000000000..f519294415 --- /dev/null +++ b/plain-s3/plain/s3/models.py @@ -0,0 +1,157 @@ +from __future__ import annotations + +import mimetypes +from datetime import datetime +from uuid import UUID, uuid4 + +from plain import models +from plain.models import types +from plain.runtime import settings + +from . import storage + + +@models.register_model +class S3File(models.Model): + """ + Represents a file stored in S3-compatible storage. + + This model stores metadata about files. The actual file content + is stored in S3. Link to this model using S3FileField() on your models. + """ + + query: models.QuerySet[S3File] = models.QuerySet() + + uuid: UUID = types.UUIDField(default=uuid4) + + # S3 storage location + key: str = types.CharField(max_length=500) + + # File metadata + filename: str = types.CharField(max_length=255) + content_type: str = types.CharField(max_length=100) + byte_size: int = types.PositiveBigIntegerField() + checksum: str = types.CharField(max_length=64, required=False) + + # Extensible metadata (dimensions, duration, etc.) + metadata: dict = types.JSONField(default=dict) + + created_at: datetime = types.DateTimeField(auto_now_add=True) + + model_options = models.Options( + indexes=[ + models.Index(fields=["uuid"]), + models.Index(fields=["key"]), + models.Index(fields=["created_at"]), + ], + constraints=[ + models.UniqueConstraint(fields=["uuid"], name="plains3_s3file_unique_uuid"), + ], + ) + + def __str__(self) -> str: + return self.filename + + @classmethod + def generate_key(cls, filename: str) -> str: + """Generate a unique S3 key for a new file.""" + ext = "" + if "." in filename: + ext = "." + filename.rsplit(".", 1)[-1].lower() + return f"{settings.S3_UPLOAD_KEY_PREFIX}{uuid4()}{ext}" + + @classmethod + def create_presigned_upload( + cls, + *, + filename: str, + content_type: str | None = None, + byte_size: int, + ) -> dict: + """ + Create a new S3File record and return presigned upload data. + + The file record is created immediately but the file isn't uploaded yet. + After the client uploads directly to S3, call verify_upload() to confirm. + + Returns: + { + "file_id": UUID, + "key": str, + "upload_url": str, + "upload_fields": dict, + } + """ + if content_type is None: + content_type, _ = mimetypes.guess_type(filename) + content_type = content_type or "application/octet-stream" + + key = cls.generate_key(filename) + + # Create the file record + file = cls.query.create( + key=key, + filename=filename, + content_type=content_type, + byte_size=byte_size, + ) + + # Generate presigned upload URL + presign = storage.generate_presigned_upload_url(key, content_type) + + return { + "file_id": str(file.uuid), + "key": key, + "upload_url": presign["url"], + "upload_fields": presign["fields"], + } + + def download_url(self, *, expires_in: int | None = None) -> str: + """Generate a presigned URL for downloading this file.""" + return storage.generate_presigned_download_url( + self.key, + expires_in=expires_in, + filename=self.filename, + ) + + def exists_in_storage(self) -> bool: + """Check if the file actually exists in S3.""" + return storage.head_object(self.key) is not None + + def delete(self) -> None: + """Delete the file from S3 and the database record.""" + storage.delete_object(self.key) + super().delete() + + @property + def extension(self) -> str: + """Get the file extension (lowercase, without dot).""" + if "." in self.filename: + return self.filename.rsplit(".", 1)[-1].lower() + return "" + + def is_image(self) -> bool: + """Check if this file is an image based on content type.""" + return self.content_type.startswith("image/") + + def is_video(self) -> bool: + """Check if this file is a video based on content type.""" + return self.content_type.startswith("video/") + + def is_audio(self) -> bool: + """Check if this file is audio based on content type.""" + return self.content_type.startswith("audio/") + + def is_pdf(self) -> bool: + """Check if this file is a PDF.""" + return self.content_type == "application/pdf" + + @property + def size_display(self) -> str: + """Human-readable file size.""" + size = self.byte_size + for unit in ["B", "KB", "MB", "GB", "TB"]: + if size < 1024: + return f"{size:.1f} {unit}" if unit != "B" else f"{size} {unit}" + size /= 1024 + return f"{size:.1f} PB" diff --git a/plain-s3/plain/s3/storage.py b/plain-s3/plain/s3/storage.py new file mode 100644 index 0000000000..b186ca5a43 --- /dev/null +++ b/plain-s3/plain/s3/storage.py @@ -0,0 +1,116 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +import boto3 + +from plain.runtime import settings + +if TYPE_CHECKING: + from mypy_boto3_s3 import S3Client + + +_client: S3Client | None = None + + +def get_client() -> S3Client: + """Get or create the S3 client singleton.""" + global _client + if _client is None: + kwargs = { + "aws_access_key_id": settings.S3_ACCESS_KEY_ID, + "aws_secret_access_key": settings.S3_SECRET_ACCESS_KEY, + } + if settings.S3_REGION: + kwargs["region_name"] = settings.S3_REGION + if settings.S3_ENDPOINT_URL: + kwargs["endpoint_url"] = settings.S3_ENDPOINT_URL + _client = boto3.client("s3", **kwargs) + return _client + + +def generate_presigned_upload_url( + key: str, + content_type: str, + *, + expires_in: int | None = None, +) -> dict: + """ + Generate a presigned URL for uploading a file directly to S3. + + Returns a dict with 'url' and 'fields' for form-based uploads, + or just 'url' for PUT-based uploads. + """ + if expires_in is None: + expires_in = settings.S3_PRESIGNED_URL_EXPIRATION + + client = get_client() + + # Use presigned POST for browser uploads (more flexible) + conditions = [ + {"Content-Type": content_type}, + ] + fields = { + "Content-Type": content_type, + } + + if settings.S3_DEFAULT_ACL: + conditions.append({"acl": settings.S3_DEFAULT_ACL}) + fields["acl"] = settings.S3_DEFAULT_ACL + + response = client.generate_presigned_post( + Bucket=settings.S3_BUCKET, + Key=key, + Fields=fields, + Conditions=conditions, + ExpiresIn=expires_in, + ) + return response + + +def generate_presigned_download_url( + key: str, + *, + expires_in: int | None = None, + filename: str | None = None, +) -> str: + """Generate a presigned URL for downloading a file from S3.""" + if expires_in is None: + expires_in = settings.S3_PRESIGNED_URL_EXPIRATION + + client = get_client() + + params = { + "Bucket": settings.S3_BUCKET, + "Key": key, + } + + if filename: + params["ResponseContentDisposition"] = f'attachment; filename="{filename}"' + + return client.generate_presigned_url( + "get_object", + Params=params, + ExpiresIn=expires_in, + ) + + +def delete_object(key: str) -> None: + """Delete an object from S3.""" + client = get_client() + client.delete_object(Bucket=settings.S3_BUCKET, Key=key) + + +def head_object(key: str) -> dict | None: + """ + Get object metadata from S3. + + Returns None if the object doesn't exist. + """ + client = get_client() + try: + return client.head_object(Bucket=settings.S3_BUCKET, Key=key) + except client.exceptions.ClientError as e: + if e.response["Error"]["Code"] == "404": + return None + raise diff --git a/plain-s3/pyproject.toml b/plain-s3/pyproject.toml new file mode 100644 index 0000000000..7d8e5fc35f --- /dev/null +++ b/plain-s3/pyproject.toml @@ -0,0 +1,25 @@ +[project] +name = "plain.s3" +version = "0.1.0" +description = "S3-compatible file storage for Plain models." +authors = [{name = "Dave Gaeddert", email = "dave.gaeddert@dropseed.dev"}] +readme = "README.md" +license = "BSD-3-Clause" +requires-python = ">=3.13" +dependencies = [ + "plain<1.0.0", + "plain.models<1.0.0", + "boto3>=1.35.0", +] + +[dependency-groups] +dev = [ + "pytest>=8.0.0", +] + +[tool.hatch.build.targets.wheel] +packages = ["plain"] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" diff --git a/pyproject.toml b/pyproject.toml index 476713bc22..d121d1f272 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,7 @@ dev = [ "plain-toolbar", "plain-tunnel", "plain-vendor", + "plain-s3", # Type checking and better dev experience "ty>=0.0.1a33", "psycopg[binary]>=3.2.12", @@ -70,6 +71,7 @@ dev = [ "plain-toolbar" = { workspace = true } "plain-tunnel" = { workspace = true } "plain-vendor" = { workspace = true } +"plain-s3" = { workspace = true } [tool.uv.workspace] members = [ diff --git a/uv.lock b/uv.lock index a1f0eb889a..378369ff60 100644 --- a/uv.lock +++ b/uv.lock @@ -27,6 +27,7 @@ members = [ "plain-passwords", "plain-pytest", "plain-redirection", + "plain-s3", "plain-scan", "plain-sessions", "plain-start", @@ -51,6 +52,34 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" }, ] +[[package]] +name = "boto3" +version = "1.42.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, + { name = "jmespath" }, + { name = "s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8c/07/dfa651dbd57bfc34d952a101280928bab08ed6186f009c660a36c211ccff/boto3-1.42.9.tar.gz", hash = "sha256:cdd4cc3e5bb08ed8a0c5cc77eca78f98f0239521de0991f14e44b788b0c639b2", size = 112827, upload-time = "2025-12-12T20:33:20.236Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/eb/97fdf6fbc8066fb1475b8ef260c1a58798b2b4f1e8839b501550de5d5ba1/boto3-1.42.9-py3-none-any.whl", hash = "sha256:d21d22af9aeb1bad8e9b670a221d6534c0120f7e7baf523dafaca83f1f5c3f90", size = 140561, upload-time = "2025-12-12T20:33:18.035Z" }, +] + +[[package]] +name = "botocore" +version = "1.42.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fd/f3/2d2cfb500e2dc00b0e33e3c8743306e6330f3cf219d19e9260dab2f3d6c2/botocore-1.42.9.tar.gz", hash = "sha256:74f69bfd116cc7c8215481284957eecdb48580e071dd50cb8c64356a866abd8c", size = 14861916, upload-time = "2025-12-12T20:33:08.017Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/2a/e9275f40042f7a09915c4be86b092cb02dc4bd74e77ab8864f485d998af1/botocore-1.42.9-py3-none-any.whl", hash = "sha256:f99ba2ca34e24c4ebec150376c815646970753c032eb84f230874b2975a185a8", size = 14537810, upload-time = "2025-12-12T20:33:04.069Z" }, +] + [[package]] name = "certifi" version = "2025.6.15" @@ -187,6 +216,7 @@ dependencies = [ { name = "plain-passwords" }, { name = "plain-pytest" }, { name = "plain-redirection" }, + { name = "plain-s3" }, { name = "plain-scan" }, { name = "plain-sessions" }, { name = "plain-support" }, @@ -220,6 +250,7 @@ requires-dist = [ { name = "plain-passwords", editable = "plain-passwords" }, { name = "plain-pytest", editable = "plain-pytest" }, { name = "plain-redirection", editable = "plain-redirection" }, + { name = "plain-s3", editable = "plain-s3" }, { name = "plain-scan", editable = "plain-scan" }, { name = "plain-sessions", editable = "plain-sessions" }, { name = "plain-support", editable = "plain-support" }, @@ -280,6 +311,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, ] +[[package]] +name = "jmespath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" }, +] + [[package]] name = "markdown-it-py" version = "3.0.0" @@ -837,6 +877,31 @@ requires-dist = [ { name = "plain-models", editable = "plain-models" }, ] +[[package]] +name = "plain-s3" +version = "0.1.0" +source = { editable = "plain-s3" } +dependencies = [ + { name = "boto3" }, + { name = "plain" }, + { name = "plain-models" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pytest" }, +] + +[package.metadata] +requires-dist = [ + { name = "boto3", specifier = ">=1.35.0" }, + { name = "plain", editable = "plain" }, + { name = "plain-models", editable = "plain-models" }, +] + +[package.metadata.requires-dev] +dev = [{ name = "pytest", specifier = ">=8.0.0" }] + [[package]] name = "plain-scan" version = "0.5.0" @@ -1005,6 +1070,7 @@ dev = [ { name = "plain-passwords" }, { name = "plain-pytest" }, { name = "plain-redirection" }, + { name = "plain-s3" }, { name = "plain-scan" }, { name = "plain-sessions" }, { name = "plain-support" }, @@ -1043,6 +1109,7 @@ dev = [ { name = "plain-passwords", editable = "plain-passwords" }, { name = "plain-pytest", editable = "plain-pytest" }, { name = "plain-redirection", editable = "plain-redirection" }, + { name = "plain-s3", editable = "plain-s3" }, { name = "plain-scan", editable = "plain-scan" }, { name = "plain-sessions", editable = "plain-sessions" }, { name = "plain-support", editable = "plain-support" }, @@ -1139,6 +1206,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" }, ] +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + [[package]] name = "python-dotenv" version = "1.0.1" @@ -1230,6 +1309,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d0/33/4d3e79e4a84533d6cd526bfb42c020a23256ae5e4265d858bd1287831f7d/ruff-0.12.0-py3-none-win_arm64.whl", hash = "sha256:8cd24580405ad8c1cc64d61725bca091d6b6da7eb3d36f72cc605467069d7e8b", size = 10724946, upload-time = "2025-06-17T15:19:23.952Z" }, ] +[[package]] +name = "s3transfer" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/05/04/74127fc843314818edfa81b5540e26dd537353b123a4edc563109d8f17dd/s3transfer-0.16.0.tar.gz", hash = "sha256:8e990f13268025792229cd52fa10cb7163744bf56e719e0b9cb925ab79abf920", size = 153827, upload-time = "2025-12-01T02:30:59.114Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/51/727abb13f44c1fcf6d145979e1535a35794db0f6e450a0cb46aa24732fe2/s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:18e25d66fed509e3868dc1572b3f427ff947dd2c56f844a5bf09481ad3f3b2fe", size = 86830, upload-time = "2025-12-01T02:30:57.729Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + [[package]] name = "sniffio" version = "1.3.1" From 2e0d973d2edfaf8b4d50b37cf6900426d92ea4a3 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 14 Dec 2025 00:40:15 +0000 Subject: [PATCH 02/11] Refactor plain-s3 to use per-field configuration instead of global settings - S3FileField now requires bucket parameter and accepts key_prefix and acl - Move bucket, key_prefix, acl from global settings to per-field configuration - Global settings now only contain connection credentials (S3_ACCESS_KEY_ID, S3_SECRET_ACCESS_KEY, S3_REGION, S3_ENDPOINT_URL) - Add create_presigned_upload() method to S3FileField for convenient uploads - Store bucket on S3File model for download_url() and delete() operations - Update README with per-field configuration examples --- example/app/settings.py | 3 +- plain-s3/plain/s3/README.md | 52 +++++++---- plain-s3/plain/s3/default_settings.py | 7 +- plain-s3/plain/s3/fields.py | 93 +++++++++++++++++--- plain-s3/plain/s3/migrations/0001_initial.py | 7 +- plain-s3/plain/s3/models.py | 32 ++++--- plain-s3/plain/s3/storage.py | 39 ++++---- 7 files changed, 167 insertions(+), 66 deletions(-) diff --git a/example/app/settings.py b/example/app/settings.py index 2e94357c79..668f0aa414 100644 --- a/example/app/settings.py +++ b/example/app/settings.py @@ -58,7 +58,6 @@ AUTH_LOGIN_URL = "login" AUTH_USER_MODEL = "users.User" -# S3 settings (configure for your storage provider) -S3_BUCKET = "" +# S3 connection settings (configure for your storage provider) S3_ACCESS_KEY_ID = "" S3_SECRET_ACCESS_KEY = "" diff --git a/plain-s3/plain/s3/README.md b/plain-s3/plain/s3/README.md index b6ad0a87fd..5392b75542 100644 --- a/plain-s3/plain/s3/README.md +++ b/plain-s3/plain/s3/README.md @@ -12,7 +12,7 @@ Store files in S3, Cloudflare R2, MinIO, or any S3-compatible storage service. D ## Overview -Add file uploads to your models with `S3FileField`: +Add file uploads to your models with `S3FileField`. Each field specifies which bucket to use: ```python from plain import models @@ -24,7 +24,28 @@ from plain.s3.models import S3File @models.register_model class Document(models.Model): title: str = types.CharField(max_length=200) - file: S3File | None = S3FileField() + file: S3File | None = S3FileField(bucket="my-bucket") +``` + +Configure per-field storage options: + +```python +@models.register_model +class User(models.Model): + name: str = types.CharField(max_length=100) + + # Public avatars with custom path prefix + avatar: S3File | None = S3FileField( + bucket="public-assets", + key_prefix="avatars/", + acl="public-read", + ) + + # Private documents in a different bucket + id_document: S3File | None = S3FileField( + bucket="private-docs", + key_prefix="id-verification/", + ) ``` Access file properties and generate download URLs: @@ -47,14 +68,18 @@ For large files, upload directly from the browser to S3 to avoid tying up your s ```python from plain.api import api -from plain.s3.models import S3File + +from app.documents.models import Document @api.route("/uploads/presign", method="POST") def create_presign(request): - data = S3File.create_presigned_upload( + # Get the field configuration + file_field = Document._meta.get_field("file") + + # Create presigned upload using field's bucket/prefix/acl + data = file_field.create_presigned_upload( filename=request.data["filename"], - content_type=request.data["content_type"], byte_size=request.data["byte_size"], ) return data @@ -73,7 +98,6 @@ const presign = await fetch('/uploads/presign', { method: 'POST', body: JSON.stringify({ filename: file.name, - content_type: file.type, byte_size: file.size, }), }).then(r => r.json()); @@ -98,6 +122,9 @@ await fetch('/documents', { **3. Link the file to your record:** ```python +from plain.s3.models import S3File + + @api.route("/documents", method="POST") def create_document(request): file = S3File.query.get(uuid=request.data["file_id"]) @@ -122,26 +149,21 @@ url = doc.file.download_url(expires_in=300) # 5 minutes ## Settings -Configure your S3 connection in settings: +Configure your S3 connection credentials in settings. Bucket and path configuration is per-field (see Overview above). ```python -# Required -S3_BUCKET = "my-bucket" +# Required - connection credentials S3_ACCESS_KEY_ID = "..." S3_SECRET_ACCESS_KEY = "..." # Optional S3_REGION = "us-east-1" S3_ENDPOINT_URL = "https://..." # For R2, MinIO, etc. -S3_DEFAULT_ACL = "private" # or "public-read" -S3_PRESIGNED_URL_EXPIRATION = 3600 # seconds -S3_UPLOAD_KEY_PREFIX = "uploads/" ``` **Cloudflare R2 example:** ```python -S3_BUCKET = "my-bucket" S3_ACCESS_KEY_ID = "..." S3_SECRET_ACCESS_KEY = "..." S3_ENDPOINT_URL = "https://ACCOUNT_ID.r2.cloudflarestorage.com" @@ -158,7 +180,7 @@ INSTALLED_PACKAGES = [ ] ``` -2. Configure your S3 settings (see above). +2. Configure your S3 connection credentials (see Settings above). 3. Run migrations: @@ -166,4 +188,4 @@ INSTALLED_PACKAGES = [ plain migrate ``` -4. Add `S3FileField` to your models where needed. +4. Add `S3FileField` to your models, specifying the bucket for each field. diff --git a/plain-s3/plain/s3/default_settings.py b/plain-s3/plain/s3/default_settings.py index 68bf400632..eea109fe97 100644 --- a/plain-s3/plain/s3/default_settings.py +++ b/plain-s3/plain/s3/default_settings.py @@ -1,8 +1,5 @@ -S3_BUCKET: str = "" -S3_REGION: str = "" +# S3 connection settings (global) S3_ACCESS_KEY_ID: str = "" S3_SECRET_ACCESS_KEY: str = "" +S3_REGION: str = "" S3_ENDPOINT_URL: str = "" # For R2, MinIO, etc. -S3_DEFAULT_ACL: str = "" # e.g., "private" or "public-read" -S3_PRESIGNED_URL_EXPIRATION: int = 3600 # 1 hour -S3_UPLOAD_KEY_PREFIX: str = "uploads/" diff --git a/plain-s3/plain/s3/fields.py b/plain-s3/plain/s3/fields.py index 6978f4f3ee..2a2de95d88 100644 --- a/plain-s3/plain/s3/fields.py +++ b/plain-s3/plain/s3/fields.py @@ -1,25 +1,98 @@ from __future__ import annotations +from typing import TYPE_CHECKING, Any + from plain import models -from plain.models import types +from plain.models.fields.related import ForeignKeyField + +if TYPE_CHECKING: + pass -def S3FileField(**kwargs) -> types.ForeignKeyField: +class S3FileField(ForeignKeyField): """ - A ForeignKey field that links to an S3File. + A ForeignKey field that links to an S3File with S3 configuration. Usage: class Document(models.Model): - file: S3File | None = S3FileField() + file: S3File | None = S3FileField(bucket="my-bucket") + + # With optional configuration: + avatar: S3File | None = S3FileField( + bucket="avatars-bucket", + key_prefix="users/", + acl="public-read", + ) + + The bucket is required. key_prefix and acl are optional. By default, the field is optional (allow_null=True) and uses SET_NULL on delete to avoid cascading deletes of your records when files are removed. """ - # Import here to avoid circular imports - from .models import S3File - kwargs.setdefault("on_delete", models.SET_NULL) - kwargs.setdefault("allow_null", True) - kwargs.setdefault("required", False) + def __init__( + self, + bucket: str, + *, + key_prefix: str = "", + acl: str = "", + on_delete: Any = None, + **kwargs: Any, + ): + # Import here to avoid circular imports + from .models import S3File + + # Store S3 configuration + self.bucket = bucket + self.key_prefix = key_prefix + self.acl = acl + + # Set FK defaults + if on_delete is None: + on_delete = models.SET_NULL + kwargs.setdefault("allow_null", True) + kwargs.setdefault("required", False) + + super().__init__(S3File, on_delete=on_delete, **kwargs) + + def create_presigned_upload( + self, + *, + filename: str, + byte_size: int, + content_type: str | None = None, + ) -> dict: + """ + Create a presigned upload using this field's configuration. + + Returns: + { + "file_id": str (UUID), + "key": str, + "upload_url": str, + "upload_fields": dict, + } + """ + from .models import S3File + + return S3File.create_presigned_upload( + bucket=self.bucket, + filename=filename, + byte_size=byte_size, + content_type=content_type, + key_prefix=self.key_prefix, + acl=self.acl, + ) - return types.ForeignKeyField(S3File, **kwargs) + def deconstruct(self) -> tuple: + """Support migrations by including S3 configuration.""" + name, path, args, kwargs = super().deconstruct() + # Add our custom attributes + kwargs["bucket"] = self.bucket + if self.key_prefix: + kwargs["key_prefix"] = self.key_prefix + if self.acl: + kwargs["acl"] = self.acl + # Remove the 'to' argument since we always point to S3File + args = () + return name, path, args, kwargs diff --git a/plain-s3/plain/s3/migrations/0001_initial.py b/plain-s3/plain/s3/migrations/0001_initial.py index e1211618ea..33fbaaf577 100644 --- a/plain-s3/plain/s3/migrations/0001_initial.py +++ b/plain-s3/plain/s3/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Plain 0.94.0 on 2025-12-13 23:47 +# Generated by Plain 0.94.0 on 2025-12-14 00:39 import uuid from plain import models @@ -16,6 +16,7 @@ class Migration(migrations.Migration): fields=[ ("id", models.PrimaryKeyField()), ("uuid", models.UUIDField(default=uuid.uuid4)), + ("bucket", models.CharField(max_length=255)), ("key", models.CharField(max_length=500)), ("filename", models.CharField(max_length=255)), ("content_type", models.CharField(max_length=100)), @@ -31,7 +32,9 @@ class Migration(migrations.Migration): ), migrations.AddIndex( model_name="s3file", - index=models.Index(fields=["key"], name="plains3_s3f_key_a488eb_idx"), + index=models.Index( + fields=["bucket", "key"], name="plains3_s3f_bucket_fbc61f_idx" + ), ), migrations.AddIndex( model_name="s3file", diff --git a/plain-s3/plain/s3/models.py b/plain-s3/plain/s3/models.py index f519294415..c7cdab85e8 100644 --- a/plain-s3/plain/s3/models.py +++ b/plain-s3/plain/s3/models.py @@ -6,7 +6,6 @@ from plain import models from plain.models import types -from plain.runtime import settings from . import storage @@ -25,6 +24,7 @@ class S3File(models.Model): uuid: UUID = types.UUIDField(default=uuid4) # S3 storage location + bucket: str = types.CharField(max_length=255) key: str = types.CharField(max_length=500) # File metadata @@ -41,7 +41,7 @@ class S3File(models.Model): model_options = models.Options( indexes=[ models.Index(fields=["uuid"]), - models.Index(fields=["key"]), + models.Index(fields=["bucket", "key"]), models.Index(fields=["created_at"]), ], constraints=[ @@ -53,20 +53,23 @@ def __str__(self) -> str: return self.filename @classmethod - def generate_key(cls, filename: str) -> str: + def generate_key(cls, filename: str, *, key_prefix: str = "") -> str: """Generate a unique S3 key for a new file.""" ext = "" if "." in filename: ext = "." + filename.rsplit(".", 1)[-1].lower() - return f"{settings.S3_UPLOAD_KEY_PREFIX}{uuid4()}{ext}" + return f"{key_prefix}{uuid4()}{ext}" @classmethod def create_presigned_upload( cls, *, + bucket: str, filename: str, - content_type: str | None = None, byte_size: int, + content_type: str | None = None, + key_prefix: str = "", + acl: str = "", ) -> dict: """ Create a new S3File record and return presigned upload data. @@ -76,7 +79,7 @@ def create_presigned_upload( Returns: { - "file_id": UUID, + "file_id": str (UUID), "key": str, "upload_url": str, "upload_fields": dict, @@ -86,10 +89,11 @@ def create_presigned_upload( content_type, _ = mimetypes.guess_type(filename) content_type = content_type or "application/octet-stream" - key = cls.generate_key(filename) + key = cls.generate_key(filename, key_prefix=key_prefix) # Create the file record file = cls.query.create( + bucket=bucket, key=key, filename=filename, content_type=content_type, @@ -97,7 +101,9 @@ def create_presigned_upload( ) # Generate presigned upload URL - presign = storage.generate_presigned_upload_url(key, content_type) + presign = storage.generate_presigned_upload_url( + bucket, key, content_type, acl=acl + ) return { "file_id": str(file.uuid), @@ -108,19 +114,23 @@ def create_presigned_upload( def download_url(self, *, expires_in: int | None = None) -> str: """Generate a presigned URL for downloading this file.""" + kwargs = {} + if expires_in is not None: + kwargs["expires_in"] = expires_in return storage.generate_presigned_download_url( + self.bucket, self.key, - expires_in=expires_in, filename=self.filename, + **kwargs, ) def exists_in_storage(self) -> bool: """Check if the file actually exists in S3.""" - return storage.head_object(self.key) is not None + return storage.head_object(self.bucket, self.key) is not None def delete(self) -> None: """Delete the file from S3 and the database record.""" - storage.delete_object(self.key) + storage.delete_object(self.bucket, self.key) super().delete() @property diff --git a/plain-s3/plain/s3/storage.py b/plain-s3/plain/s3/storage.py index b186ca5a43..ab4490d6c8 100644 --- a/plain-s3/plain/s3/storage.py +++ b/plain-s3/plain/s3/storage.py @@ -12,6 +12,8 @@ _client: S3Client | None = None +DEFAULT_PRESIGNED_URL_EXPIRATION = 3600 # 1 hour + def get_client() -> S3Client: """Get or create the S3 client singleton.""" @@ -30,36 +32,33 @@ def get_client() -> S3Client: def generate_presigned_upload_url( + bucket: str, key: str, content_type: str, *, - expires_in: int | None = None, + acl: str = "", + expires_in: int = DEFAULT_PRESIGNED_URL_EXPIRATION, ) -> dict: """ Generate a presigned URL for uploading a file directly to S3. - Returns a dict with 'url' and 'fields' for form-based uploads, - or just 'url' for PUT-based uploads. + Returns a dict with 'url' and 'fields' for form-based uploads. """ - if expires_in is None: - expires_in = settings.S3_PRESIGNED_URL_EXPIRATION - client = get_client() - # Use presigned POST for browser uploads (more flexible) - conditions = [ + conditions: list = [ {"Content-Type": content_type}, ] fields = { "Content-Type": content_type, } - if settings.S3_DEFAULT_ACL: - conditions.append({"acl": settings.S3_DEFAULT_ACL}) - fields["acl"] = settings.S3_DEFAULT_ACL + if acl: + conditions.append({"acl": acl}) + fields["acl"] = acl response = client.generate_presigned_post( - Bucket=settings.S3_BUCKET, + Bucket=bucket, Key=key, Fields=fields, Conditions=conditions, @@ -69,19 +68,17 @@ def generate_presigned_upload_url( def generate_presigned_download_url( + bucket: str, key: str, *, - expires_in: int | None = None, + expires_in: int = DEFAULT_PRESIGNED_URL_EXPIRATION, filename: str | None = None, ) -> str: """Generate a presigned URL for downloading a file from S3.""" - if expires_in is None: - expires_in = settings.S3_PRESIGNED_URL_EXPIRATION - client = get_client() params = { - "Bucket": settings.S3_BUCKET, + "Bucket": bucket, "Key": key, } @@ -95,13 +92,13 @@ def generate_presigned_download_url( ) -def delete_object(key: str) -> None: +def delete_object(bucket: str, key: str) -> None: """Delete an object from S3.""" client = get_client() - client.delete_object(Bucket=settings.S3_BUCKET, Key=key) + client.delete_object(Bucket=bucket, Key=key) -def head_object(key: str) -> dict | None: +def head_object(bucket: str, key: str) -> dict | None: """ Get object metadata from S3. @@ -109,7 +106,7 @@ def head_object(key: str) -> dict | None: """ client = get_client() try: - return client.head_object(Bucket=settings.S3_BUCKET, Key=key) + return client.head_object(Bucket=bucket, Key=key) except client.exceptions.ClientError as e: if e.response["Error"]["Code"] == "404": return None From d87351c0a188bd9b52d3a1220869457792b195fa Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 14 Dec 2025 01:55:03 +0000 Subject: [PATCH 03/11] Clean up plain-s3 package - Empty __init__.py (no comments needed) - Remove unused mypy_boto3_s3 type stub import from storage.py - Fix README to use class-based APIView instead of @api.route decorator - Remove unused TYPE_CHECKING import from fields.py --- plain-s3/plain/s3/README.md | 80 ++++++++++++++++++++--------------- plain-s3/plain/s3/__init__.py | 3 -- plain-s3/plain/s3/fields.py | 5 +-- plain-s3/plain/s3/storage.py | 9 +--- 4 files changed, 48 insertions(+), 49 deletions(-) diff --git a/plain-s3/plain/s3/README.md b/plain-s3/plain/s3/README.md index 5392b75542..cc55c4c5db 100644 --- a/plain-s3/plain/s3/README.md +++ b/plain-s3/plain/s3/README.md @@ -64,37 +64,63 @@ doc.file.download_url() # presigned S3 URL For large files, upload directly from the browser to S3 to avoid tying up your server. -**1. Create a presigned upload from your backend:** +**1. Create a presigned upload view:** ```python -from plain.api import api +# app/api/views.py +from plain.api.views import APIView +from plain.s3.models import S3File from app.documents.models import Document -@api.route("/uploads/presign", method="POST") -def create_presign(request): - # Get the field configuration - file_field = Document._meta.get_field("file") +class PresignUploadView(APIView): + def post(self): + # Get the field configuration + file_field = Document._meta.get_field("file") + + # Create presigned upload using field's bucket/prefix/acl + return file_field.create_presigned_upload( + filename=self.data["filename"], + byte_size=self.data["byte_size"], + ) + # Returns: { + # "file_id": "uuid...", + # "upload_url": "https://bucket.s3...", + # "upload_fields": {"key": "...", "policy": "...", ...}, + # } + + +class DocumentView(APIView): + def post(self): + file = S3File.query.get(uuid=self.data["file_id"]) + doc = Document.query.create( + title=self.data["title"], + file=file, + ) + return {"id": str(doc.id)} +``` - # Create presigned upload using field's bucket/prefix/acl - data = file_field.create_presigned_upload( - filename=request.data["filename"], - byte_size=request.data["byte_size"], - ) - return data - # Returns: { - # "file_id": "uuid...", - # "upload_url": "https://bucket.s3...", - # "upload_fields": {"key": "...", "policy": "...", ...}, - # } +```python +# app/api/urls.py +from plain.urls import Router, path + +from . import views + + +class APIRouter(Router): + namespace = "api" + urls = [ + path("uploads/presign/", views.PresignUploadView), + path("documents/", views.DocumentView), + ] ``` **2. Upload from the browser:** ```javascript // Get presigned URL -const presign = await fetch('/uploads/presign', { +const presign = await fetch('/api/uploads/presign/', { method: 'POST', body: JSON.stringify({ filename: file.name, @@ -110,7 +136,7 @@ formData.append('file', file); await fetch(presign.upload_url, { method: 'POST', body: formData }); // Now attach to your record -await fetch('/documents', { +await fetch('/api/documents/', { method: 'POST', body: JSON.stringify({ title: 'My Document', @@ -119,22 +145,6 @@ await fetch('/documents', { }); ``` -**3. Link the file to your record:** - -```python -from plain.s3.models import S3File - - -@api.route("/documents", method="POST") -def create_document(request): - file = S3File.query.get(uuid=request.data["file_id"]) - doc = Document.query.create( - title=request.data["title"], - file=file, - ) - return {"id": str(doc.id)} -``` - ## Downloading files Generate presigned download URLs: diff --git a/plain-s3/plain/s3/__init__.py b/plain-s3/plain/s3/__init__.py index 8fe9e05456..e69de29bb2 100644 --- a/plain-s3/plain/s3/__init__.py +++ b/plain-s3/plain/s3/__init__.py @@ -1,3 +0,0 @@ -# Models and fields should be imported from submodules: -# from plain.s3.models import S3File -# from plain.s3.fields import S3FileField diff --git a/plain-s3/plain/s3/fields.py b/plain-s3/plain/s3/fields.py index 2a2de95d88..69ef07dac0 100644 --- a/plain-s3/plain/s3/fields.py +++ b/plain-s3/plain/s3/fields.py @@ -1,13 +1,10 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any +from typing import Any from plain import models from plain.models.fields.related import ForeignKeyField -if TYPE_CHECKING: - pass - class S3FileField(ForeignKeyField): """ diff --git a/plain-s3/plain/s3/storage.py b/plain-s3/plain/s3/storage.py index ab4490d6c8..0542c26153 100644 --- a/plain-s3/plain/s3/storage.py +++ b/plain-s3/plain/s3/storage.py @@ -1,21 +1,16 @@ from __future__ import annotations -from typing import TYPE_CHECKING - import boto3 from plain.runtime import settings -if TYPE_CHECKING: - from mypy_boto3_s3 import S3Client - -_client: S3Client | None = None +_client = None DEFAULT_PRESIGNED_URL_EXPIRATION = 3600 # 1 hour -def get_client() -> S3Client: +def get_client(): """Get or create the S3 client singleton.""" global _client if _client is None: From 18b9aa35d5993aa0356102190a422e19976d3b4b Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 14 Dec 2025 01:56:23 +0000 Subject: [PATCH 04/11] Use plain.views.View instead of APIView in README examples --- plain-s3/plain/s3/README.md | 44 ++++++++++++++----------------------- 1 file changed, 17 insertions(+), 27 deletions(-) diff --git a/plain-s3/plain/s3/README.md b/plain-s3/plain/s3/README.md index cc55c4c5db..91b2eca09e 100644 --- a/plain-s3/plain/s3/README.md +++ b/plain-s3/plain/s3/README.md @@ -64,25 +64,29 @@ doc.file.download_url() # presigned S3 URL For large files, upload directly from the browser to S3 to avoid tying up your server. -**1. Create a presigned upload view:** +**1. Create views for presigned uploads:** ```python -# app/api/views.py -from plain.api.views import APIView +# app/documents/views.py +import json + +from plain.views import View from plain.s3.models import S3File -from app.documents.models import Document +from .models import Document -class PresignUploadView(APIView): +class PresignUploadView(View): def post(self): + data = json.loads(self.request.body) + # Get the field configuration file_field = Document._meta.get_field("file") # Create presigned upload using field's bucket/prefix/acl return file_field.create_presigned_upload( - filename=self.data["filename"], - byte_size=self.data["byte_size"], + filename=data["filename"], + byte_size=data["byte_size"], ) # Returns: { # "file_id": "uuid...", @@ -91,36 +95,22 @@ class PresignUploadView(APIView): # } -class DocumentView(APIView): +class DocumentCreateView(View): def post(self): - file = S3File.query.get(uuid=self.data["file_id"]) + data = json.loads(self.request.body) + file = S3File.query.get(uuid=data["file_id"]) doc = Document.query.create( - title=self.data["title"], + title=data["title"], file=file, ) return {"id": str(doc.id)} ``` -```python -# app/api/urls.py -from plain.urls import Router, path - -from . import views - - -class APIRouter(Router): - namespace = "api" - urls = [ - path("uploads/presign/", views.PresignUploadView), - path("documents/", views.DocumentView), - ] -``` - **2. Upload from the browser:** ```javascript // Get presigned URL -const presign = await fetch('/api/uploads/presign/', { +const presign = await fetch('/documents/presign/', { method: 'POST', body: JSON.stringify({ filename: file.name, @@ -136,7 +126,7 @@ formData.append('file', file); await fetch(presign.upload_url, { method: 'POST', body: formData }); // Now attach to your record -await fetch('/api/documents/', { +await fetch('/documents/', { method: 'POST', body: JSON.stringify({ title: 'My Document', From fdddda264d1bb746df23cd065a8f540759aa4396 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 14 Dec 2025 02:00:45 +0000 Subject: [PATCH 05/11] Default S3_REGION to 'auto' for R2 compatibility --- plain-s3/plain/s3/README.md | 6 ++---- plain-s3/plain/s3/default_settings.py | 2 +- plain-s3/plain/s3/storage.py | 3 +-- 3 files changed, 4 insertions(+), 7 deletions(-) diff --git a/plain-s3/plain/s3/README.md b/plain-s3/plain/s3/README.md index 91b2eca09e..d153bb8b16 100644 --- a/plain-s3/plain/s3/README.md +++ b/plain-s3/plain/s3/README.md @@ -152,13 +152,10 @@ url = doc.file.download_url(expires_in=300) # 5 minutes Configure your S3 connection credentials in settings. Bucket and path configuration is per-field (see Overview above). ```python -# Required - connection credentials S3_ACCESS_KEY_ID = "..." S3_SECRET_ACCESS_KEY = "..." - -# Optional -S3_REGION = "us-east-1" S3_ENDPOINT_URL = "https://..." # For R2, MinIO, etc. +S3_REGION = "auto" # Default; set to actual region for AWS (e.g., "us-east-1") ``` **Cloudflare R2 example:** @@ -167,6 +164,7 @@ S3_ENDPOINT_URL = "https://..." # For R2, MinIO, etc. S3_ACCESS_KEY_ID = "..." S3_SECRET_ACCESS_KEY = "..." S3_ENDPOINT_URL = "https://ACCOUNT_ID.r2.cloudflarestorage.com" +# S3_REGION defaults to "auto", which works for R2 ``` ## Installation diff --git a/plain-s3/plain/s3/default_settings.py b/plain-s3/plain/s3/default_settings.py index eea109fe97..f118c08b25 100644 --- a/plain-s3/plain/s3/default_settings.py +++ b/plain-s3/plain/s3/default_settings.py @@ -1,5 +1,5 @@ # S3 connection settings (global) S3_ACCESS_KEY_ID: str = "" S3_SECRET_ACCESS_KEY: str = "" -S3_REGION: str = "" S3_ENDPOINT_URL: str = "" # For R2, MinIO, etc. +S3_REGION: str = "auto" # "auto" works for R2, set actual region for AWS diff --git a/plain-s3/plain/s3/storage.py b/plain-s3/plain/s3/storage.py index 0542c26153..1aa088b13d 100644 --- a/plain-s3/plain/s3/storage.py +++ b/plain-s3/plain/s3/storage.py @@ -17,9 +17,8 @@ def get_client(): kwargs = { "aws_access_key_id": settings.S3_ACCESS_KEY_ID, "aws_secret_access_key": settings.S3_SECRET_ACCESS_KEY, + "region_name": settings.S3_REGION, } - if settings.S3_REGION: - kwargs["region_name"] = settings.S3_REGION if settings.S3_ENDPOINT_URL: kwargs["endpoint_url"] = settings.S3_ENDPOINT_URL _client = boto3.client("s3", **kwargs) From e4a78fcced72a7b859bcf4e2c1a2bc3d59a2d302 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 14 Dec 2025 02:01:29 +0000 Subject: [PATCH 06/11] Remove default values from required S3 settings --- plain-s3/plain/s3/default_settings.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plain-s3/plain/s3/default_settings.py b/plain-s3/plain/s3/default_settings.py index f118c08b25..ff3245189a 100644 --- a/plain-s3/plain/s3/default_settings.py +++ b/plain-s3/plain/s3/default_settings.py @@ -1,5 +1,5 @@ # S3 connection settings (global) -S3_ACCESS_KEY_ID: str = "" -S3_SECRET_ACCESS_KEY: str = "" +S3_ACCESS_KEY_ID: str +S3_SECRET_ACCESS_KEY: str S3_ENDPOINT_URL: str = "" # For R2, MinIO, etc. S3_REGION: str = "auto" # "auto" works for R2, set actual region for AWS From 28cf2b04c91bf3aeb9de0e357bbfb711c1c9b517 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 14 Dec 2025 02:06:56 +0000 Subject: [PATCH 07/11] Add standard upload flow and form field - Add S3File.upload() for server-side file uploads - Add S3FileField.upload() method on model field - Add upload_object() to storage module - Add S3FileField form field in forms.py for use with Plain forms - Update README with both standard and presigned upload flows --- plain-s3/plain/s3/README.md | 76 ++++++++++++++++++++++++++++++++---- plain-s3/plain/s3/fields.py | 19 +++++++++ plain-s3/plain/s3/forms.py | 45 +++++++++++++++++++++ plain-s3/plain/s3/models.py | 43 ++++++++++++++++++++ plain-s3/plain/s3/storage.py | 21 ++++++++++ 5 files changed, 196 insertions(+), 8 deletions(-) create mode 100644 plain-s3/plain/s3/forms.py diff --git a/plain-s3/plain/s3/README.md b/plain-s3/plain/s3/README.md index d153bb8b16..9428640362 100644 --- a/plain-s3/plain/s3/README.md +++ b/plain-s3/plain/s3/README.md @@ -2,10 +2,11 @@ **S3-compatible file storage for Plain models.** -Store files in S3, Cloudflare R2, MinIO, or any S3-compatible storage service. Designed for direct browser uploads using presigned URLs. +Store files in S3, Cloudflare R2, MinIO, or any S3-compatible storage service. - [Overview](#overview) -- [Direct uploads](#direct-uploads) +- [Uploading files](#uploading-files) +- [Presigned uploads](#presigned-uploads) - [Downloading files](#downloading-files) - [Settings](#settings) - [Installation](#installation) @@ -60,11 +61,73 @@ doc.file.size_display # "1.0 MB" doc.file.download_url() # presigned S3 URL ``` -## Direct uploads +## Uploading files -For large files, upload directly from the browser to S3 to avoid tying up your server. +### Using a form -**1. Create views for presigned uploads:** +Use `S3FileField` in your form to handle file uploads: + +```python +# app/documents/forms.py +from plain import forms +from plain.s3.forms import S3FileField + + +class DocumentForm(forms.Form): + title = forms.CharField() + file = S3FileField(bucket="my-bucket") +``` + +```python +# app/documents/views.py +from plain.views import FormView + +from .forms import DocumentForm +from .models import Document + + +class DocumentCreateView(FormView): + form_class = DocumentForm + template_name = "documents/create.html" + + def form_valid(self, form): + doc = Document.query.create( + title=form.cleaned_data["title"], + file=form.cleaned_data["file"], # S3File instance + ) + return redirect("documents:detail", doc.id) +``` + +### Direct upload in a view + +Upload files directly using the model field's `upload` method: + +```python +from plain.views import View + +from .models import Document + + +class DocumentUploadView(View): + def post(self): + uploaded_file = self.request.files["file"] + + # Get the field and use its configuration + file_field = Document._meta.get_field("file") + s3_file = file_field.upload(uploaded_file) + + doc = Document.query.create( + title=self.request.POST["title"], + file=s3_file, + ) + return {"id": doc.id} +``` + +## Presigned uploads + +For large files, upload directly from the browser to S3 to avoid server load. + +**1. Create a view that returns presigned upload data:** ```python # app/documents/views.py @@ -80,10 +143,7 @@ class PresignUploadView(View): def post(self): data = json.loads(self.request.body) - # Get the field configuration file_field = Document._meta.get_field("file") - - # Create presigned upload using field's bucket/prefix/acl return file_field.create_presigned_upload( filename=data["filename"], byte_size=data["byte_size"], diff --git a/plain-s3/plain/s3/fields.py b/plain-s3/plain/s3/fields.py index 69ef07dac0..b8ab04b20c 100644 --- a/plain-s3/plain/s3/fields.py +++ b/plain-s3/plain/s3/fields.py @@ -52,6 +52,25 @@ def __init__( super().__init__(S3File, on_delete=on_delete, **kwargs) + def upload(self, file): + """ + Upload a file using this field's configuration. + + Args: + file: An uploaded file object with name, size, content_type, and read() method + + Returns: + The created S3File instance + """ + from .models import S3File + + return S3File.upload( + bucket=self.bucket, + file=file, + key_prefix=self.key_prefix, + acl=self.acl, + ) + def create_presigned_upload( self, *, diff --git a/plain-s3/plain/s3/forms.py b/plain-s3/plain/s3/forms.py new file mode 100644 index 0000000000..ecb5c5b1f7 --- /dev/null +++ b/plain-s3/plain/s3/forms.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +from plain.forms.fields import FileField + + +class S3FileField(FileField): + """ + A form field that uploads files to S3. + + Usage in a form: + class DocumentForm(Form): + title = fields.CharField() + file = S3FileField(bucket="my-bucket") + + The cleaned value is an S3File instance (or None if no file uploaded). + """ + + def __init__( + self, + bucket: str, + *, + key_prefix: str = "", + acl: str = "", + **kwargs, + ): + self.bucket = bucket + self.key_prefix = key_prefix + self.acl = acl + super().__init__(**kwargs) + + def clean(self, data, initial=None): + file = super().clean(data, initial) + + if file is None or file is False: + return file + + # Upload to S3 and return S3File instance + from .models import S3File + + return S3File.upload( + bucket=self.bucket, + file=file, + key_prefix=self.key_prefix, + acl=self.acl, + ) diff --git a/plain-s3/plain/s3/models.py b/plain-s3/plain/s3/models.py index c7cdab85e8..947ca06f48 100644 --- a/plain-s3/plain/s3/models.py +++ b/plain-s3/plain/s3/models.py @@ -60,6 +60,49 @@ def generate_key(cls, filename: str, *, key_prefix: str = "") -> str: ext = "." + filename.rsplit(".", 1)[-1].lower() return f"{key_prefix}{uuid4()}{ext}" + @classmethod + def upload( + cls, + *, + bucket: str, + file, + key_prefix: str = "", + acl: str = "", + ) -> "S3File": + """ + Upload a file to S3 and create the S3File record. + + Args: + bucket: S3 bucket name + file: An uploaded file object with name, size, content_type, and read() method + key_prefix: Optional prefix for the S3 key + acl: Optional ACL (e.g., "public-read") + + Returns: + The created S3File instance + """ + filename = file.name + content_type = getattr(file, "content_type", None) + if content_type is None: + content_type, _ = mimetypes.guess_type(filename) + content_type = content_type or "application/octet-stream" + + key = cls.generate_key(filename, key_prefix=key_prefix) + body = file.read() + byte_size = len(body) + + # Upload to S3 + storage.upload_object(bucket, key, body, content_type, acl=acl) + + # Create the database record + return cls.query.create( + bucket=bucket, + key=key, + filename=filename, + content_type=content_type, + byte_size=byte_size, + ) + @classmethod def create_presigned_upload( cls, diff --git a/plain-s3/plain/s3/storage.py b/plain-s3/plain/s3/storage.py index 1aa088b13d..e508990ef2 100644 --- a/plain-s3/plain/s3/storage.py +++ b/plain-s3/plain/s3/storage.py @@ -86,6 +86,27 @@ def generate_presigned_download_url( ) +def upload_object( + bucket: str, + key: str, + body: bytes, + content_type: str, + *, + acl: str = "", +) -> None: + """Upload file content to S3.""" + client = get_client() + kwargs = { + "Bucket": bucket, + "Key": key, + "Body": body, + "ContentType": content_type, + } + if acl: + kwargs["ACL"] = acl + client.put_object(**kwargs) + + def delete_object(bucket: str, key: str) -> None: """Delete an object from S3.""" client = get_client() From 0fd419dbc5a609a4bf6e54d916343052842c8476 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 14 Dec 2025 02:09:04 +0000 Subject: [PATCH 08/11] Consolidate storage utilities into S3File model Removed standalone storage.py and moved S3 client initialization directly into models.py as a private function. This simplifies the package structure since storage utilities aren't needed outside the model context. --- plain-s3/plain/s3/models.py | 88 ++++++++++++++++++------ plain-s3/plain/s3/storage.py | 128 ----------------------------------- 2 files changed, 69 insertions(+), 147 deletions(-) delete mode 100644 plain-s3/plain/s3/storage.py diff --git a/plain-s3/plain/s3/models.py b/plain-s3/plain/s3/models.py index 947ca06f48..e1d7439fc7 100644 --- a/plain-s3/plain/s3/models.py +++ b/plain-s3/plain/s3/models.py @@ -4,10 +4,30 @@ from datetime import datetime from uuid import UUID, uuid4 +import boto3 + from plain import models from plain.models import types +from plain.runtime import settings + -from . import storage +_client = None +_DEFAULT_PRESIGNED_EXPIRATION = 3600 # 1 hour + + +def _get_client(): + """Get or create the S3 client singleton.""" + global _client + if _client is None: + kwargs = { + "aws_access_key_id": settings.S3_ACCESS_KEY_ID, + "aws_secret_access_key": settings.S3_SECRET_ACCESS_KEY, + "region_name": settings.S3_REGION, + } + if settings.S3_ENDPOINT_URL: + kwargs["endpoint_url"] = settings.S3_ENDPOINT_URL + _client = boto3.client("s3", **kwargs) + return _client @models.register_model @@ -53,7 +73,7 @@ def __str__(self) -> str: return self.filename @classmethod - def generate_key(cls, filename: str, *, key_prefix: str = "") -> str: + def _generate_key(cls, filename: str, *, key_prefix: str = "") -> str: """Generate a unique S3 key for a new file.""" ext = "" if "." in filename: @@ -87,12 +107,21 @@ def upload( content_type, _ = mimetypes.guess_type(filename) content_type = content_type or "application/octet-stream" - key = cls.generate_key(filename, key_prefix=key_prefix) + key = cls._generate_key(filename, key_prefix=key_prefix) body = file.read() byte_size = len(body) # Upload to S3 - storage.upload_object(bucket, key, body, content_type, acl=acl) + client = _get_client() + put_kwargs = { + "Bucket": bucket, + "Key": key, + "Body": body, + "ContentType": content_type, + } + if acl: + put_kwargs["ACL"] = acl + client.put_object(**put_kwargs) # Create the database record return cls.query.create( @@ -118,7 +147,7 @@ def create_presigned_upload( Create a new S3File record and return presigned upload data. The file record is created immediately but the file isn't uploaded yet. - After the client uploads directly to S3, call verify_upload() to confirm. + After the client uploads directly to S3, the file will be available. Returns: { @@ -132,7 +161,7 @@ def create_presigned_upload( content_type, _ = mimetypes.guess_type(filename) content_type = content_type or "application/octet-stream" - key = cls.generate_key(filename, key_prefix=key_prefix) + key = cls._generate_key(filename, key_prefix=key_prefix) # Create the file record file = cls.query.create( @@ -144,8 +173,19 @@ def create_presigned_upload( ) # Generate presigned upload URL - presign = storage.generate_presigned_upload_url( - bucket, key, content_type, acl=acl + client = _get_client() + conditions: list = [{"Content-Type": content_type}] + fields = {"Content-Type": content_type} + if acl: + conditions.append({"acl": acl}) + fields["acl"] = acl + + presign = client.generate_presigned_post( + Bucket=bucket, + Key=key, + Fields=fields, + Conditions=conditions, + ExpiresIn=_DEFAULT_PRESIGNED_EXPIRATION, ) return { @@ -155,25 +195,35 @@ def create_presigned_upload( "upload_fields": presign["fields"], } - def download_url(self, *, expires_in: int | None = None) -> str: + def download_url(self, *, expires_in: int = _DEFAULT_PRESIGNED_EXPIRATION) -> str: """Generate a presigned URL for downloading this file.""" - kwargs = {} - if expires_in is not None: - kwargs["expires_in"] = expires_in - return storage.generate_presigned_download_url( - self.bucket, - self.key, - filename=self.filename, - **kwargs, + client = _get_client() + params = { + "Bucket": self.bucket, + "Key": self.key, + "ResponseContentDisposition": f'attachment; filename="{self.filename}"', + } + return client.generate_presigned_url( + "get_object", + Params=params, + ExpiresIn=expires_in, ) def exists_in_storage(self) -> bool: """Check if the file actually exists in S3.""" - return storage.head_object(self.bucket, self.key) is not None + client = _get_client() + try: + client.head_object(Bucket=self.bucket, Key=self.key) + return True + except client.exceptions.ClientError as e: + if e.response["Error"]["Code"] == "404": + return False + raise def delete(self) -> None: """Delete the file from S3 and the database record.""" - storage.delete_object(self.bucket, self.key) + client = _get_client() + client.delete_object(Bucket=self.bucket, Key=self.key) super().delete() @property diff --git a/plain-s3/plain/s3/storage.py b/plain-s3/plain/s3/storage.py deleted file mode 100644 index e508990ef2..0000000000 --- a/plain-s3/plain/s3/storage.py +++ /dev/null @@ -1,128 +0,0 @@ -from __future__ import annotations - -import boto3 - -from plain.runtime import settings - - -_client = None - -DEFAULT_PRESIGNED_URL_EXPIRATION = 3600 # 1 hour - - -def get_client(): - """Get or create the S3 client singleton.""" - global _client - if _client is None: - kwargs = { - "aws_access_key_id": settings.S3_ACCESS_KEY_ID, - "aws_secret_access_key": settings.S3_SECRET_ACCESS_KEY, - "region_name": settings.S3_REGION, - } - if settings.S3_ENDPOINT_URL: - kwargs["endpoint_url"] = settings.S3_ENDPOINT_URL - _client = boto3.client("s3", **kwargs) - return _client - - -def generate_presigned_upload_url( - bucket: str, - key: str, - content_type: str, - *, - acl: str = "", - expires_in: int = DEFAULT_PRESIGNED_URL_EXPIRATION, -) -> dict: - """ - Generate a presigned URL for uploading a file directly to S3. - - Returns a dict with 'url' and 'fields' for form-based uploads. - """ - client = get_client() - - conditions: list = [ - {"Content-Type": content_type}, - ] - fields = { - "Content-Type": content_type, - } - - if acl: - conditions.append({"acl": acl}) - fields["acl"] = acl - - response = client.generate_presigned_post( - Bucket=bucket, - Key=key, - Fields=fields, - Conditions=conditions, - ExpiresIn=expires_in, - ) - return response - - -def generate_presigned_download_url( - bucket: str, - key: str, - *, - expires_in: int = DEFAULT_PRESIGNED_URL_EXPIRATION, - filename: str | None = None, -) -> str: - """Generate a presigned URL for downloading a file from S3.""" - client = get_client() - - params = { - "Bucket": bucket, - "Key": key, - } - - if filename: - params["ResponseContentDisposition"] = f'attachment; filename="{filename}"' - - return client.generate_presigned_url( - "get_object", - Params=params, - ExpiresIn=expires_in, - ) - - -def upload_object( - bucket: str, - key: str, - body: bytes, - content_type: str, - *, - acl: str = "", -) -> None: - """Upload file content to S3.""" - client = get_client() - kwargs = { - "Bucket": bucket, - "Key": key, - "Body": body, - "ContentType": content_type, - } - if acl: - kwargs["ACL"] = acl - client.put_object(**kwargs) - - -def delete_object(bucket: str, key: str) -> None: - """Delete an object from S3.""" - client = get_client() - client.delete_object(Bucket=bucket, Key=key) - - -def head_object(bucket: str, key: str) -> dict | None: - """ - Get object metadata from S3. - - Returns None if the object doesn't exist. - """ - client = get_client() - try: - return client.head_object(Bucket=bucket, Key=key) - except client.exceptions.ClientError as e: - if e.response["Error"]["Code"] == "404": - return None - raise From d6580116dc6d840e7f47d11f6abe70b76667bff7 Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 14 Dec 2025 02:14:18 +0000 Subject: [PATCH 09/11] Add formfield() method for ModelForm support The S3FileField model field now returns the appropriate S3FileField form field in ModelForms, automatically passing through the bucket, key_prefix, and acl configuration. --- plain-s3/plain/s3/fields.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/plain-s3/plain/s3/fields.py b/plain-s3/plain/s3/fields.py index b8ab04b20c..7b0017b0b5 100644 --- a/plain-s3/plain/s3/fields.py +++ b/plain-s3/plain/s3/fields.py @@ -100,6 +100,19 @@ def create_presigned_upload( acl=self.acl, ) + def formfield(self, **kwargs): + """Return an S3FileField form field for use in ModelForms.""" + from .forms import S3FileField as S3FileFormField + + defaults = { + "bucket": self.bucket, + "key_prefix": self.key_prefix, + "acl": self.acl, + "required": not self.allow_null, + } + defaults.update(kwargs) + return S3FileFormField(**defaults) + def deconstruct(self) -> tuple: """Support migrations by including S3 configuration.""" name, path, args, kwargs = super().deconstruct() From 24ed14ba31676e36bf1eb17967caa07cff0e3e4d Mon Sep 17 00:00:00 2001 From: Claude Date: Sun, 14 Dec 2025 02:18:06 +0000 Subject: [PATCH 10/11] Fix type errors in plain-s3 - formfield() now passes arguments directly instead of via dict - clean() override matches parent FileField signature with type ignore - delete() returns proper tuple type from parent --- plain-s3/plain/s3/fields.py | 17 ++++++++--------- plain-s3/plain/s3/forms.py | 4 +++- plain-s3/plain/s3/models.py | 4 ++-- 3 files changed, 13 insertions(+), 12 deletions(-) diff --git a/plain-s3/plain/s3/fields.py b/plain-s3/plain/s3/fields.py index 7b0017b0b5..03ec465a3e 100644 --- a/plain-s3/plain/s3/fields.py +++ b/plain-s3/plain/s3/fields.py @@ -100,18 +100,17 @@ def create_presigned_upload( acl=self.acl, ) - def formfield(self, **kwargs): + def formfield(self, **kwargs: Any) -> Any: """Return an S3FileField form field for use in ModelForms.""" from .forms import S3FileField as S3FileFormField - defaults = { - "bucket": self.bucket, - "key_prefix": self.key_prefix, - "acl": self.acl, - "required": not self.allow_null, - } - defaults.update(kwargs) - return S3FileFormField(**defaults) + return S3FileFormField( + bucket=self.bucket, + key_prefix=self.key_prefix, + acl=self.acl, + required=kwargs.pop("required", not self.allow_null), + **kwargs, + ) def deconstruct(self) -> tuple: """Support migrations by including S3 configuration.""" diff --git a/plain-s3/plain/s3/forms.py b/plain-s3/plain/s3/forms.py index ecb5c5b1f7..9c2ded10b1 100644 --- a/plain-s3/plain/s3/forms.py +++ b/plain-s3/plain/s3/forms.py @@ -1,5 +1,7 @@ from __future__ import annotations +from typing import Any + from plain.forms.fields import FileField @@ -28,7 +30,7 @@ def __init__( self.acl = acl super().__init__(**kwargs) - def clean(self, data, initial=None): + def clean(self, data: Any, initial: Any = None) -> Any: # type: ignore[override] file = super().clean(data, initial) if file is None or file is False: diff --git a/plain-s3/plain/s3/models.py b/plain-s3/plain/s3/models.py index e1d7439fc7..5f1546ed47 100644 --- a/plain-s3/plain/s3/models.py +++ b/plain-s3/plain/s3/models.py @@ -220,11 +220,11 @@ def exists_in_storage(self) -> bool: return False raise - def delete(self) -> None: + def delete(self) -> tuple[int, dict[str, int]]: """Delete the file from S3 and the database record.""" client = _get_client() client.delete_object(Bucket=self.bucket, Key=self.key) - super().delete() + return super().delete() @property def extension(self) -> str: From 596179ca5dfce4c9181a38a89e7b491aec3d52e2 Mon Sep 17 00:00:00 2001 From: Dave Gaeddert Date: Mon, 15 Dec 2025 14:26:00 -0600 Subject: [PATCH 11/11] save --- plain-s3/LICENSE | 28 +++ plain-s3/plain/s3/README.md | 84 ++++--- plain-s3/plain/s3/admin.py | 1 - plain-s3/plain/s3/default_settings.py | 7 +- plain-s3/plain/s3/fields.py | 34 +-- plain-s3/plain/s3/forms.py | 6 +- plain-s3/plain/s3/migrations/0001_initial.py | 18 +- plain-s3/plain/s3/models.py | 151 +++++------- plain-s3/pyproject.toml | 3 +- plain-s3/tests/app/settings.py | 13 ++ plain-s3/tests/app/urls.py | 6 + plain-s3/tests/test_s3.py | 234 +++++++++++++++++++ scripts/test | 1 + scripts/type-validate | 1 + uv.lock | 65 +++++- 15 files changed, 483 insertions(+), 169 deletions(-) create mode 100644 plain-s3/LICENSE create mode 100644 plain-s3/tests/app/settings.py create mode 100644 plain-s3/tests/app/urls.py create mode 100644 plain-s3/tests/test_s3.py diff --git a/plain-s3/LICENSE b/plain-s3/LICENSE new file mode 100644 index 0000000000..4a29315c05 --- /dev/null +++ b/plain-s3/LICENSE @@ -0,0 +1,28 @@ +BSD 3-Clause License + +Copyright (c) 2025, Dropseed, LLC + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plain-s3/plain/s3/README.md b/plain-s3/plain/s3/README.md index 9428640362..ab432b231e 100644 --- a/plain-s3/plain/s3/README.md +++ b/plain-s3/plain/s3/README.md @@ -2,7 +2,7 @@ **S3-compatible file storage for Plain models.** -Store files in S3, Cloudflare R2, MinIO, or any S3-compatible storage service. +Store files in S3, Cloudflare R2, DigitalOcean Spaces, MinIO, or any S3-compatible storage. - [Overview](#overview) - [Uploading files](#uploading-files) @@ -13,7 +13,7 @@ Store files in S3, Cloudflare R2, MinIO, or any S3-compatible storage service. ## Overview -Add file uploads to your models with `S3FileField`. Each field specifies which bucket to use: +Add file uploads to your models with `S3FileField`: ```python from plain import models @@ -25,10 +25,10 @@ from plain.s3.models import S3File @models.register_model class Document(models.Model): title: str = types.CharField(max_length=200) - file: S3File | None = S3FileField(bucket="my-bucket") + file: S3File | None = S3FileField() # Uses S3_BUCKET setting ``` -Configure per-field storage options: +Override the bucket or add path prefixes per-field: ```python @models.register_model @@ -37,7 +37,6 @@ class User(models.Model): # Public avatars with custom path prefix avatar: S3File | None = S3FileField( - bucket="public-assets", key_prefix="avatars/", acl="public-read", ) @@ -54,11 +53,11 @@ Access file properties and generate download URLs: ```python doc = Document.query.get(id=some_id) -doc.file.filename # "report.pdf" -doc.file.content_type # "application/pdf" -doc.file.byte_size # 1048576 -doc.file.size_display # "1.0 MB" -doc.file.download_url() # presigned S3 URL +doc.file.filename # "report.pdf" +doc.file.content_type # "application/pdf" +doc.file.byte_size # 1048576 +doc.file.size_display # "1.0 MB" +doc.file.presigned_download_url() # Presigned S3 URL ``` ## Uploading files @@ -75,7 +74,7 @@ from plain.s3.forms import S3FileField class DocumentForm(forms.Form): title = forms.CharField() - file = S3FileField(bucket="my-bucket") + file = S3FileField() # Uses S3_BUCKET setting ``` ```python @@ -123,6 +122,15 @@ class DocumentUploadView(View): return {"id": doc.id} ``` +Or upload directly via `S3File.upload()`: + +```python +from plain.s3.models import S3File + + +s3_file = S3File.upload(file=uploaded_file) +``` + ## Presigned uploads For large files, upload directly from the browser to S3 to avoid server load. @@ -149,16 +157,15 @@ class PresignUploadView(View): byte_size=data["byte_size"], ) # Returns: { - # "file_id": "uuid...", + # "key": "abc123.pdf", # "upload_url": "https://bucket.s3...", - # "upload_fields": {"key": "...", "policy": "...", ...}, # } class DocumentCreateView(View): def post(self): data = json.loads(self.request.body) - file = S3File.query.get(uuid=data["file_id"]) + file = S3File.query.get(key=data["key"]) doc = Document.query.create( title=data["title"], file=file, @@ -179,18 +186,18 @@ const presign = await fetch('/documents/presign/', { }).then(r => r.json()); // Upload directly to S3 -const formData = new FormData(); -Object.entries(presign.upload_fields).forEach(([k, v]) => formData.append(k, v)); -formData.append('file', file); - -await fetch(presign.upload_url, { method: 'POST', body: formData }); +await fetch(presign.upload_url, { + method: 'PUT', + body: file, + headers: { 'Content-Type': file.type }, +}); // Now attach to your record await fetch('/documents/', { method: 'POST', body: JSON.stringify({ title: 'My Document', - file_id: presign.file_id, + key: presign.key, }), }); ``` @@ -200,31 +207,46 @@ await fetch('/documents/', { Generate presigned download URLs: ```python -# Default expiration (1 hour) -url = doc.file.download_url() +# Default expiration (1 hour), triggers download +url = doc.file.presigned_download_url() + +# Custom expiration (5 minutes) +url = doc.file.presigned_download_url(expires_in=300) -# Custom expiration -url = doc.file.download_url(expires_in=300) # 5 minutes +# Display in browser instead of downloading (for images, PDFs, etc.) +url = doc.file.presigned_download_url(inline=True) ``` ## Settings -Configure your S3 connection credentials in settings. Bucket and path configuration is per-field (see Overview above). +Configure your S3 connection in settings: ```python S3_ACCESS_KEY_ID = "..." S3_SECRET_ACCESS_KEY = "..." -S3_ENDPOINT_URL = "https://..." # For R2, MinIO, etc. -S3_REGION = "auto" # Default; set to actual region for AWS (e.g., "us-east-1") +S3_BUCKET = "my-bucket" +S3_REGION = "us-east-1" +S3_ENDPOINT_URL = "" # For R2, MinIO, DigitalOcean Spaces, etc. ``` -**Cloudflare R2 example:** +### Cloudflare R2 ```python S3_ACCESS_KEY_ID = "..." S3_SECRET_ACCESS_KEY = "..." +S3_BUCKET = "my-bucket" +S3_REGION = "auto" S3_ENDPOINT_URL = "https://ACCOUNT_ID.r2.cloudflarestorage.com" -# S3_REGION defaults to "auto", which works for R2 +``` + +### DigitalOcean Spaces + +```python +S3_ACCESS_KEY_ID = "..." +S3_SECRET_ACCESS_KEY = "..." +S3_BUCKET = "my-bucket" +S3_REGION = "nyc3" # Your Spaces region +S3_ENDPOINT_URL = "https://nyc3.digitaloceanspaces.com" ``` ## Installation @@ -238,7 +260,7 @@ INSTALLED_PACKAGES = [ ] ``` -2. Configure your S3 connection credentials (see Settings above). +2. Configure your S3 settings (see Settings above). 3. Run migrations: @@ -246,4 +268,4 @@ INSTALLED_PACKAGES = [ plain migrate ``` -4. Add `S3FileField` to your models, specifying the bucket for each field. +4. Add `S3FileField` to your models. diff --git a/plain-s3/plain/s3/admin.py b/plain-s3/plain/s3/admin.py index 979b62bc6a..d40a1c6b18 100644 --- a/plain-s3/plain/s3/admin.py +++ b/plain-s3/plain/s3/admin.py @@ -24,7 +24,6 @@ class ListView(AdminModelListView): "created_at", ] search_fields = [ - "uuid", "filename", "key", ] diff --git a/plain-s3/plain/s3/default_settings.py b/plain-s3/plain/s3/default_settings.py index ff3245189a..f07237f508 100644 --- a/plain-s3/plain/s3/default_settings.py +++ b/plain-s3/plain/s3/default_settings.py @@ -1,5 +1,8 @@ # S3 connection settings (global) S3_ACCESS_KEY_ID: str S3_SECRET_ACCESS_KEY: str -S3_ENDPOINT_URL: str = "" # For R2, MinIO, etc. -S3_REGION: str = "auto" # "auto" works for R2, set actual region for AWS +S3_REGION: str +S3_ENDPOINT_URL: str = "" # For R2, MinIO, DigitalOcean Spaces, etc. + +# Default upload settings +S3_BUCKET: str diff --git a/plain-s3/plain/s3/fields.py b/plain-s3/plain/s3/fields.py index 03ec465a3e..6bfa1e5c58 100644 --- a/plain-s3/plain/s3/fields.py +++ b/plain-s3/plain/s3/fields.py @@ -5,6 +5,8 @@ from plain import models from plain.models.fields.related import ForeignKeyField +from .models import PresignedUpload + class S3FileField(ForeignKeyField): """ @@ -12,7 +14,8 @@ class S3FileField(ForeignKeyField): Usage: class Document(models.Model): - file: S3File | None = S3FileField(bucket="my-bucket") + # Uses S3_BUCKET setting: + file: S3File | None = S3FileField() # With optional configuration: avatar: S3File | None = S3FileField( @@ -21,15 +24,13 @@ class Document(models.Model): acl="public-read", ) - The bucket is required. key_prefix and acl are optional. - By default, the field is optional (allow_null=True) and uses SET_NULL on delete to avoid cascading deletes of your records when files are removed. """ def __init__( self, - bucket: str, + bucket: str = "", *, key_prefix: str = "", acl: str = "", @@ -52,15 +53,11 @@ def __init__( super().__init__(S3File, on_delete=on_delete, **kwargs) - def upload(self, file): + def upload(self, file: Any) -> Any: """ Upload a file using this field's configuration. - Args: - file: An uploaded file object with name, size, content_type, and read() method - - Returns: - The created S3File instance + Returns the created S3File instance. """ from .models import S3File @@ -77,18 +74,8 @@ def create_presigned_upload( filename: str, byte_size: int, content_type: str | None = None, - ) -> dict: - """ - Create a presigned upload using this field's configuration. - - Returns: - { - "file_id": str (UUID), - "key": str, - "upload_url": str, - "upload_fields": dict, - } - """ + ) -> PresignedUpload: + """Create a presigned upload using this field's configuration.""" from .models import S3File return S3File.create_presigned_upload( @@ -116,7 +103,8 @@ def deconstruct(self) -> tuple: """Support migrations by including S3 configuration.""" name, path, args, kwargs = super().deconstruct() # Add our custom attributes - kwargs["bucket"] = self.bucket + if self.bucket: + kwargs["bucket"] = self.bucket if self.key_prefix: kwargs["key_prefix"] = self.key_prefix if self.acl: diff --git a/plain-s3/plain/s3/forms.py b/plain-s3/plain/s3/forms.py index 9c2ded10b1..6b656b43d0 100644 --- a/plain-s3/plain/s3/forms.py +++ b/plain-s3/plain/s3/forms.py @@ -12,18 +12,18 @@ class S3FileField(FileField): Usage in a form: class DocumentForm(Form): title = fields.CharField() - file = S3FileField(bucket="my-bucket") + file = S3FileField() # Uses S3_BUCKET setting The cleaned value is an S3File instance (or None if no file uploaded). """ def __init__( self, - bucket: str, + bucket: str = "", *, key_prefix: str = "", acl: str = "", - **kwargs, + **kwargs: Any, ): self.bucket = bucket self.key_prefix = key_prefix diff --git a/plain-s3/plain/s3/migrations/0001_initial.py b/plain-s3/plain/s3/migrations/0001_initial.py index 33fbaaf577..1f92b3b1ca 100644 --- a/plain-s3/plain/s3/migrations/0001_initial.py +++ b/plain-s3/plain/s3/migrations/0001_initial.py @@ -1,6 +1,5 @@ -# Generated by Plain 0.94.0 on 2025-12-14 00:39 +# Generated by Plain 0.94.0 on 2025-12-15 03:55 -import uuid from plain import models from plain.models import migrations @@ -15,27 +14,14 @@ class Migration(migrations.Migration): name="S3File", fields=[ ("id", models.PrimaryKeyField()), - ("uuid", models.UUIDField(default=uuid.uuid4)), ("bucket", models.CharField(max_length=255)), ("key", models.CharField(max_length=500)), ("filename", models.CharField(max_length=255)), ("content_type", models.CharField(max_length=100)), ("byte_size", models.PositiveBigIntegerField()), - ("checksum", models.CharField(max_length=64, required=False)), - ("metadata", models.JSONField(default=dict)), ("created_at", models.DateTimeField(auto_now_add=True)), ], ), - migrations.AddIndex( - model_name="s3file", - index=models.Index(fields=["uuid"], name="plains3_s3f_uuid_f52e97_idx"), - ), - migrations.AddIndex( - model_name="s3file", - index=models.Index( - fields=["bucket", "key"], name="plains3_s3f_bucket_fbc61f_idx" - ), - ), migrations.AddIndex( model_name="s3file", index=models.Index( @@ -45,7 +31,7 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name="s3file", constraint=models.UniqueConstraint( - fields=("uuid",), name="plains3_s3file_unique_uuid" + fields=("key",), name="plains3_s3file_unique_key" ), ), ] diff --git a/plain-s3/plain/s3/models.py b/plain-s3/plain/s3/models.py index 5f1546ed47..0cdbc5d058 100644 --- a/plain-s3/plain/s3/models.py +++ b/plain-s3/plain/s3/models.py @@ -2,7 +2,8 @@ import mimetypes from datetime import datetime -from uuid import UUID, uuid4 +from typing import TYPE_CHECKING, Any, TypedDict +from uuid import uuid4 import boto3 @@ -10,24 +11,15 @@ from plain.models import types from plain.runtime import settings +if TYPE_CHECKING: + from types_boto3_s3 import S3Client -_client = None -_DEFAULT_PRESIGNED_EXPIRATION = 3600 # 1 hour +class PresignedUpload(TypedDict): + """Return type for create_presigned_upload.""" -def _get_client(): - """Get or create the S3 client singleton.""" - global _client - if _client is None: - kwargs = { - "aws_access_key_id": settings.S3_ACCESS_KEY_ID, - "aws_secret_access_key": settings.S3_SECRET_ACCESS_KEY, - "region_name": settings.S3_REGION, - } - if settings.S3_ENDPOINT_URL: - kwargs["endpoint_url"] = settings.S3_ENDPOINT_URL - _client = boto3.client("s3", **kwargs) - return _client + key: str + upload_url: str @models.register_model @@ -41,8 +33,6 @@ class S3File(models.Model): query: models.QuerySet[S3File] = models.QuerySet() - uuid: UUID = types.UUIDField(default=uuid4) - # S3 storage location bucket: str = types.CharField(max_length=255) key: str = types.CharField(max_length=500) @@ -51,27 +41,33 @@ class S3File(models.Model): filename: str = types.CharField(max_length=255) content_type: str = types.CharField(max_length=100) byte_size: int = types.PositiveBigIntegerField() - checksum: str = types.CharField(max_length=64, required=False) - - # Extensible metadata (dimensions, duration, etc.) - metadata: dict = types.JSONField(default=dict) created_at: datetime = types.DateTimeField(auto_now_add=True) model_options = models.Options( indexes=[ - models.Index(fields=["uuid"]), - models.Index(fields=["bucket", "key"]), models.Index(fields=["created_at"]), ], constraints=[ - models.UniqueConstraint(fields=["uuid"], name="plains3_s3file_unique_uuid"), + models.UniqueConstraint(fields=["key"], name="plains3_s3file_unique_key"), ], ) def __str__(self) -> str: return self.filename + @classmethod + def get_s3_client(cls) -> S3Client: + """Create an S3 client using settings.""" + kwargs: dict[str, Any] = { + "aws_access_key_id": settings.S3_ACCESS_KEY_ID, + "aws_secret_access_key": settings.S3_SECRET_ACCESS_KEY, + "region_name": settings.S3_REGION, + } + if settings.S3_ENDPOINT_URL: + kwargs["endpoint_url"] = settings.S3_ENDPOINT_URL + return boto3.client("s3", **kwargs) + @classmethod def _generate_key(cls, filename: str, *, key_prefix: str = "") -> str: """Generate a unique S3 key for a new file.""" @@ -84,23 +80,15 @@ def _generate_key(cls, filename: str, *, key_prefix: str = "") -> str: def upload( cls, *, - bucket: str, - file, + file: Any, + bucket: str = "", key_prefix: str = "", acl: str = "", - ) -> "S3File": + ) -> S3File: """ Upload a file to S3 and create the S3File record. - - Args: - bucket: S3 bucket name - file: An uploaded file object with name, size, content_type, and read() method - key_prefix: Optional prefix for the S3 key - acl: Optional ACL (e.g., "public-read") - - Returns: - The created S3File instance """ + bucket = bucket or settings.S3_BUCKET filename = file.name content_type = getattr(file, "content_type", None) if content_type is None: @@ -112,7 +100,7 @@ def upload( byte_size = len(body) # Upload to S3 - client = _get_client() + client = cls.get_s3_client() put_kwargs = { "Bucket": bucket, "Key": key, @@ -136,27 +124,21 @@ def upload( def create_presigned_upload( cls, *, - bucket: str, filename: str, byte_size: int, + bucket: str = "", content_type: str | None = None, key_prefix: str = "", acl: str = "", - ) -> dict: + ) -> PresignedUpload: """ Create a new S3File record and return presigned upload data. The file record is created immediately but the file isn't uploaded yet. After the client uploads directly to S3, the file will be available. - - Returns: - { - "file_id": str (UUID), - "key": str, - "upload_url": str, - "upload_fields": dict, - } """ + bucket = bucket or settings.S3_BUCKET + if content_type is None: content_type, _ = mimetypes.guess_type(filename) content_type = content_type or "application/octet-stream" @@ -164,7 +146,7 @@ def create_presigned_upload( key = cls._generate_key(filename, key_prefix=key_prefix) # Create the file record - file = cls.query.create( + cls.query.create( bucket=bucket, key=key, filename=filename, @@ -172,36 +154,41 @@ def create_presigned_upload( byte_size=byte_size, ) - # Generate presigned upload URL - client = _get_client() - conditions: list = [{"Content-Type": content_type}] - fields = {"Content-Type": content_type} + client = cls.get_s3_client() + + params: dict[str, Any] = { + "Bucket": bucket, + "Key": key, + "ContentType": content_type, + } if acl: - conditions.append({"acl": acl}) - fields["acl"] = acl - - presign = client.generate_presigned_post( - Bucket=bucket, - Key=key, - Fields=fields, - Conditions=conditions, - ExpiresIn=_DEFAULT_PRESIGNED_EXPIRATION, + params["ACL"] = acl + + upload_url = client.generate_presigned_url( + "put_object", + Params=params, + ExpiresIn=3600, ) - return { - "file_id": str(file.uuid), - "key": key, - "upload_url": presign["url"], - "upload_fields": presign["fields"], - } + return PresignedUpload( + key=key, + upload_url=upload_url, + ) - def download_url(self, *, expires_in: int = _DEFAULT_PRESIGNED_EXPIRATION) -> str: - """Generate a presigned URL for downloading this file.""" - client = _get_client() + def presigned_download_url( + self, *, expires_in: int = 3600, inline: bool = False + ) -> str: + """Generate a presigned URL for downloading this file. + + Use inline=True to display in browser (for images, PDFs, etc.) + instead of triggering a download. + """ + client = self.get_s3_client() + disposition = "inline" if inline else "attachment" params = { "Bucket": self.bucket, "Key": self.key, - "ResponseContentDisposition": f'attachment; filename="{self.filename}"', + "ResponseContentDisposition": f'{disposition}; filename="{self.filename}"', } return client.generate_presigned_url( "get_object", @@ -211,7 +198,7 @@ def download_url(self, *, expires_in: int = _DEFAULT_PRESIGNED_EXPIRATION) -> st def exists_in_storage(self) -> bool: """Check if the file actually exists in S3.""" - client = _get_client() + client = self.get_s3_client() try: client.head_object(Bucket=self.bucket, Key=self.key) return True @@ -222,7 +209,7 @@ def exists_in_storage(self) -> bool: def delete(self) -> tuple[int, dict[str, int]]: """Delete the file from S3 and the database record.""" - client = _get_client() + client = self.get_s3_client() client.delete_object(Bucket=self.bucket, Key=self.key) return super().delete() @@ -233,22 +220,6 @@ def extension(self) -> str: return self.filename.rsplit(".", 1)[-1].lower() return "" - def is_image(self) -> bool: - """Check if this file is an image based on content type.""" - return self.content_type.startswith("image/") - - def is_video(self) -> bool: - """Check if this file is a video based on content type.""" - return self.content_type.startswith("video/") - - def is_audio(self) -> bool: - """Check if this file is audio based on content type.""" - return self.content_type.startswith("audio/") - - def is_pdf(self) -> bool: - """Check if this file is a PDF.""" - return self.content_type == "application/pdf" - @property def size_display(self) -> str: """Human-readable file size.""" diff --git a/plain-s3/pyproject.toml b/plain-s3/pyproject.toml index 7d8e5fc35f..55d9215f53 100644 --- a/plain-s3/pyproject.toml +++ b/plain-s3/pyproject.toml @@ -14,7 +14,8 @@ dependencies = [ [dependency-groups] dev = [ - "pytest>=8.0.0", + "plain.pytest<1.0.0", + "types-boto3[s3]>=1.35.0", ] [tool.hatch.build.targets.wheel] diff --git a/plain-s3/tests/app/settings.py b/plain-s3/tests/app/settings.py new file mode 100644 index 0000000000..a43f59ad78 --- /dev/null +++ b/plain-s3/tests/app/settings.py @@ -0,0 +1,13 @@ +SECRET_KEY = "test" +URLS_ROUTER = "app.urls.AppRouter" +INSTALLED_PACKAGES = [ + "plain.models", + "plain.s3", +] + +# S3 settings (will be mocked in tests) +S3_ACCESS_KEY_ID = "test-key" +S3_SECRET_ACCESS_KEY = "test-secret" +S3_REGION = "us-east-1" +S3_BUCKET = "test-bucket" +S3_ENDPOINT_URL = "" diff --git a/plain-s3/tests/app/urls.py b/plain-s3/tests/app/urls.py new file mode 100644 index 0000000000..49af284112 --- /dev/null +++ b/plain-s3/tests/app/urls.py @@ -0,0 +1,6 @@ +from plain.urls import Router + + +class AppRouter(Router): + namespace = "" + urls = [] diff --git a/plain-s3/tests/test_s3.py b/plain-s3/tests/test_s3.py new file mode 100644 index 0000000000..f7030f03e0 --- /dev/null +++ b/plain-s3/tests/test_s3.py @@ -0,0 +1,234 @@ +from io import BytesIO +from unittest.mock import MagicMock, patch + +import pytest + +from plain.s3.models import S3File + + +@pytest.fixture +def mock_s3_client(): + """Mock the boto3 S3 client.""" + with patch("plain.s3.models.boto3.client") as mock_client: + client = MagicMock() + mock_client.return_value = client + yield client + + +@pytest.fixture +def s3_file(db): + """Create a test S3File record.""" + return S3File.query.create( + bucket="test-bucket", + key="abc123.pdf", + filename="document.pdf", + content_type="application/pdf", + byte_size=1024, + ) + + +class TestGenerateKey: + def test_generates_unique_keys(self): + key1 = S3File._generate_key("test.pdf") + key2 = S3File._generate_key("test.pdf") + assert key1 != key2 + + def test_preserves_extension(self): + key = S3File._generate_key("document.pdf") + assert key.endswith(".pdf") + + def test_lowercases_extension(self): + key = S3File._generate_key("document.PDF") + assert key.endswith(".pdf") + + def test_handles_no_extension(self): + key = S3File._generate_key("README") + assert "." not in key + + def test_applies_key_prefix(self): + key = S3File._generate_key("test.pdf", key_prefix="uploads/") + assert key.startswith("uploads/") + assert key.endswith(".pdf") + + +class TestUpload: + def test_uploads_file_to_s3(self, db, mock_s3_client): + file = BytesIO(b"test content") + file.name = "test.txt" + + S3File.upload(file=file) + + mock_s3_client.put_object.assert_called_once() + call_kwargs = mock_s3_client.put_object.call_args.kwargs + assert call_kwargs["Bucket"] == "test-bucket" + assert call_kwargs["Body"] == b"test content" + assert call_kwargs["ContentType"] == "text/plain" + + def test_creates_database_record(self, db, mock_s3_client): + file = BytesIO(b"test content") + file.name = "test.txt" + + s3_file = S3File.upload(file=file) + + assert s3_file.id is not None + assert s3_file.filename == "test.txt" + assert s3_file.byte_size == 12 + assert s3_file.content_type == "text/plain" + + def test_uses_custom_bucket(self, db, mock_s3_client): + file = BytesIO(b"test") + file.name = "test.txt" + + s3_file = S3File.upload(file=file, bucket="custom-bucket") + + call_kwargs = mock_s3_client.put_object.call_args.kwargs + assert call_kwargs["Bucket"] == "custom-bucket" + assert s3_file.bucket == "custom-bucket" + + def test_applies_acl(self, db, mock_s3_client): + file = BytesIO(b"test") + file.name = "test.txt" + + S3File.upload(file=file, acl="public-read") + + call_kwargs = mock_s3_client.put_object.call_args.kwargs + assert call_kwargs["ACL"] == "public-read" + + +class TestCreatePresignedUpload: + def test_creates_presigned_url(self, db, mock_s3_client): + mock_s3_client.generate_presigned_url.return_value = ( + "https://s3.example.com/presigned-url" + ) + + result = S3File.create_presigned_upload( + filename="document.pdf", + byte_size=1024, + ) + + assert "key" in result + assert result["upload_url"] == "https://s3.example.com/presigned-url" + mock_s3_client.generate_presigned_url.assert_called_once() + + def test_creates_database_record(self, db, mock_s3_client): + mock_s3_client.generate_presigned_url.return_value = ( + "https://s3.example.com/presigned-url" + ) + + result = S3File.create_presigned_upload( + filename="document.pdf", + byte_size=1024, + ) + + s3_file = S3File.query.get(key=result["key"]) + assert s3_file.filename == "document.pdf" + assert s3_file.byte_size == 1024 + + +class TestPresignedDownloadUrl: + def test_generates_download_url(self, s3_file, mock_s3_client): + mock_s3_client.generate_presigned_url.return_value = "https://signed-url" + + url = s3_file.presigned_download_url() + + assert url == "https://signed-url" + call_args = mock_s3_client.generate_presigned_url.call_args + params = call_args.kwargs["Params"] + assert ( + 'attachment; filename="document.pdf"' + in params["ResponseContentDisposition"] + ) + + def test_inline_disposition(self, s3_file, mock_s3_client): + mock_s3_client.generate_presigned_url.return_value = "https://signed-url" + + s3_file.presigned_download_url(inline=True) + + call_args = mock_s3_client.generate_presigned_url.call_args + params = call_args.kwargs["Params"] + assert 'inline; filename="document.pdf"' in params["ResponseContentDisposition"] + + def test_custom_expiration(self, s3_file, mock_s3_client): + mock_s3_client.generate_presigned_url.return_value = "https://signed-url" + + s3_file.presigned_download_url(expires_in=7200) + + call_args = mock_s3_client.generate_presigned_url.call_args + assert call_args.kwargs["ExpiresIn"] == 7200 + + +class TestExistsInStorage: + def test_returns_true_when_exists(self, s3_file, mock_s3_client): + mock_s3_client.head_object.return_value = {} + + assert s3_file.exists_in_storage() is True + + def test_returns_false_when_not_exists(self, s3_file, mock_s3_client): + error = MagicMock() + error.response = {"Error": {"Code": "404"}} + mock_s3_client.head_object.side_effect = mock_s3_client.exceptions.ClientError( + error.response, "HeadObject" + ) + mock_s3_client.exceptions.ClientError = type( + "ClientError", + (Exception,), + {"response": property(lambda self: error.response)}, + ) + + # Re-raise the mock exception + mock_s3_client.head_object.side_effect = mock_s3_client.exceptions.ClientError( + error.response, "HeadObject" + ) + + assert s3_file.exists_in_storage() is False + + +class TestDelete: + def test_deletes_from_s3_and_database(self, s3_file, mock_s3_client): + file_id = s3_file.id + + s3_file.delete() + + mock_s3_client.delete_object.assert_called_once_with( + Bucket="test-bucket", + Key="abc123.pdf", + ) + assert S3File.query.filter(id=file_id).count() == 0 + + +class TestProperties: + def test_extension(self, s3_file): + assert s3_file.extension == "pdf" + + def test_extension_no_dot(self, db): + file = S3File.query.create( + bucket="test", + key="abc", + filename="README", + content_type="text/plain", + byte_size=100, + ) + assert file.extension == "" + + def test_size_display_bytes(self, db): + file = S3File.query.create( + bucket="test", + key="abc", + filename="test", + content_type="text/plain", + byte_size=500, + ) + assert file.size_display == "500 B" + + def test_size_display_kb(self, db): + file = S3File.query.create( + bucket="test", + key="abc", + filename="test", + content_type="text/plain", + byte_size=2048, + ) + assert file.size_display == "2.0 KB" + + def test_str(self, s3_file): + assert str(s3_file) == "document.pdf" diff --git a/scripts/test b/scripts/test index 2c80c93d40..412c7ad2f4 100755 --- a/scripts/test +++ b/scripts/test @@ -22,6 +22,7 @@ plain-auth plain-api plain-elements plain-htmx +plain-s3 EOF )" diff --git a/scripts/type-validate b/scripts/type-validate index adbff32f5c..88d49c1365 100755 --- a/scripts/type-validate +++ b/scripts/type-validate @@ -45,6 +45,7 @@ FULLY_TYPED_PATHS = [ "plain-passwords", "plain-pytest", "plain-redirection", + "plain-s3", "plain-sessions", "plain-observer", "plain-support", diff --git a/uv.lock b/uv.lock index 378369ff60..a4bc85c891 100644 --- a/uv.lock +++ b/uv.lock @@ -80,6 +80,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/2a/e9275f40042f7a09915c4be86b092cb02dc4bd74e77ab8864f485d998af1/botocore-1.42.9-py3-none-any.whl", hash = "sha256:f99ba2ca34e24c4ebec150376c815646970753c032eb84f230874b2975a185a8", size = 14537810, upload-time = "2025-12-12T20:33:04.069Z" }, ] +[[package]] +name = "botocore-stubs" +version = "1.42.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "types-awscrt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/98/4ce007813d13ff107b8c5a39a85916f53bd58923dbb358882a6031d8f53a/botocore_stubs-1.42.9.tar.gz", hash = "sha256:92fdd2a1d911355166da3e30e9bb9b1803f7e2caec0d913f5fad3a920352ce6d", size = 42413, upload-time = "2025-12-12T21:24:37.054Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/9f/fbed79fa17af56cea7543b70d3f69c11426575a38b3421666411becefeef/botocore_stubs-1.42.9-py3-none-any.whl", hash = "sha256:9f8b652549d4f727aa69e09d462d18e54a1bd10f3dbb593da56d5d0aafe9756e", size = 66748, upload-time = "2025-12-12T21:24:35.832Z" }, +] + [[package]] name = "certifi" version = "2025.6.15" @@ -889,7 +901,8 @@ dependencies = [ [package.dev-dependencies] dev = [ - { name = "pytest" }, + { name = "plain-pytest" }, + { name = "types-boto3", extra = ["s3"] }, ] [package.metadata] @@ -900,7 +913,10 @@ requires-dist = [ ] [package.metadata.requires-dev] -dev = [{ name = "pytest", specifier = ">=8.0.0" }] +dev = [ + { name = "plain-pytest", editable = "plain-pytest" }, + { name = "types-boto3", extras = ["s3"], specifier = ">=1.35.0" }, +] [[package]] name = "plain-scan" @@ -1382,6 +1398,51 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2f/fc/1825f1f8c77d4d8fe75543882d9ad5934e568aa807e1a4cb7e999f701750/ty-0.0.1a33-py3-none-win_arm64.whl", hash = "sha256:d9937e9ddc7b383c6b1ab3065982fb2b8d0a2884ae5bd7b542e4208a807e326e", size = 9471473, upload-time = "2025-12-09T22:35:12.105Z" }, ] +[[package]] +name = "types-awscrt" +version = "0.30.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/30/1f/febd2df22e24f77b759db0dd9ecdd7f07f055e6a4dbbb699c5eb34b617ef/types_awscrt-0.30.0.tar.gz", hash = "sha256:362fd8f5eaebcfcd922cb9fd8274fb375df550319f78031ee3779eac0b9ecc79", size = 17761, upload-time = "2025-12-12T01:55:59.626Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5b/5f/15999051fca2949a67562c3f80fae2dd5d3404a3f97b326b614533843281/types_awscrt-0.30.0-py3-none-any.whl", hash = "sha256:8204126e01a00eaa4a746e7a0076538ca0e4e3f52408adec0ab9b471bb0bb64b", size = 42392, upload-time = "2025-12-12T01:55:58.194Z" }, +] + +[[package]] +name = "types-boto3" +version = "1.42.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore-stubs" }, + { name = "types-s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0c/8f/f079336d4058e28f124f0b0d7aacbc4a9d78cc69f6284395c525630007ef/types_boto3-1.42.9.tar.gz", hash = "sha256:48714b22b411eec0996be0836370732605ac5d78085afe79c3a0123ea9cd30f7", size = 101245, upload-time = "2025-12-12T20:36:35.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/fb/41f0649754f3916a5959dbec5ff3559b95a4f8f9c830569635945164f144/types_boto3-1.42.9-py3-none-any.whl", hash = "sha256:057595465ccc6275a1f1ddd8f2c031f490229f0f8c9d65c11fb49dfbac85b35f", size = 69666, upload-time = "2025-12-12T20:36:29.96Z" }, +] + +[package.optional-dependencies] +s3 = [ + { name = "types-boto3-s3" }, +] + +[[package]] +name = "types-boto3-s3" +version = "1.42.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/78/20/f436a130ec7c959abebea7164a4dc4780819dfb80b2e451511d2007885e5/types_boto3_s3-1.42.3.tar.gz", hash = "sha256:9c7810300b9944d4c00cb99bfb8685c300c93d75c00273ecb340c91bd6f426b4", size = 75905, upload-time = "2025-12-04T21:11:18.675Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/99/3aff9dc31c8264a693433a7229f892d2f8c5d27ae8d9d1dccedfb25cce4d/types_boto3_s3-1.42.3-py3-none-any.whl", hash = "sha256:a685a9808d158ca1a3642898062133532b0be064e372c27926d1a73a5533f47e", size = 82963, upload-time = "2025-12-04T21:11:16.51Z" }, +] + +[[package]] +name = "types-s3transfer" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/64/42689150509eb3e6e82b33ee3d89045de1592488842ddf23c56957786d05/types_s3transfer-0.16.0.tar.gz", hash = "sha256:b4636472024c5e2b62278c5b759661efeb52a81851cde5f092f24100b1ecb443", size = 13557, upload-time = "2025-12-08T08:13:09.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/27/e88220fe6274eccd3bdf95d9382918716d312f6f6cef6a46332d1ee2feff/types_s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:1c0cd111ecf6e21437cb410f5cddb631bfb2263b77ad973e79b9c6d0cb24e0ef", size = 19247, upload-time = "2025-12-08T08:13:08.426Z" }, +] + [[package]] name = "typing-extensions" version = "4.14.0"