diff --git a/lib/callbacks.py b/lib/callbacks.py index e07621b..ee601fe 100644 --- a/lib/callbacks.py +++ b/lib/callbacks.py @@ -24,8 +24,8 @@ def raise_http_exception(request: Request) -> Callable[[Exception | str], Awaita """Callback to raise an HTTPException with a specific status code.""" async def _raise_http_exception(error: Exception | str) -> None: - message = str(error) if isinstance(error, Exception) else error - code = error.status_code if isinstance(error, HTTPException) else 400 - raise StreamTerminated(f"{code}: {message}") from error + message = f"{type(error).__name__}: {error}" if isinstance(error, Exception) else str(error) + code = error.status_code if isinstance(error, HTTPException) else 502 + raise StreamTerminated(f"{code} - {message}") from error return _raise_http_exception diff --git a/lib/metadata.py b/lib/metadata.py index e829597..ba3dca5 100644 --- a/lib/metadata.py +++ b/lib/metadata.py @@ -1,10 +1,11 @@ from starlette.datastructures import Headers -from pydantic import BaseModel, Field, field_validator, ByteSize, StrictStr, ConfigDict, AliasChoices -from typing import Optional, Self, Annotated +from pydantic import BaseModel, ByteSize, ConfigDict, Field, field_validator, StrictStr +from typing import Annotated, Optional, Self +import re class FileMetadata(BaseModel): - name: StrictStr = Field(description="File name", min_length=2, max_length=255) + name: StrictStr = Field(description="File name", min_length=1, max_length=255) size: ByteSize = Field(description="Size in bytes", gt=0) type: StrictStr = Field(description="MIME type", default='application/octet-stream') @@ -13,8 +14,19 @@ class FileMetadata(BaseModel): @field_validator('name') @classmethod def validate_name(cls, v: str) -> str: - safe_filename = str(v).translate(str.maketrans(':;|*@/\\', ' ')).strip() - return safe_filename.encode('latin-1', 'ignore').decode('utf-8', 'ignore') + if not v or not v.strip(): + raise ValueError("Filename cannot be empty") + + safe_filename = re.sub(r'[<>:"/\\|?*\x00-\x1f]', ' ', str(v)).strip() + if not safe_filename: + raise ValueError("Filename contains only invalid characters") + + try: + safe_filename = safe_filename.encode('utf-8').decode('utf-8') + except UnicodeError: + safe_filename = safe_filename.encode('utf-8', 'ignore').decode('utf-8', 'ignore') + + return safe_filename @classmethod def from_json(cls, data: str) -> Self: @@ -29,7 +41,7 @@ def get_from_http_headers(cls, headers: Headers, filename: str) -> Self: return cls( name=filename, size=headers.get('content-length', '0'), - type=headers.get('content-type', '') or None + type=headers.get('content-type', '') # Must be a string ) @classmethod diff --git a/lib/store.py b/lib/store.py index a62d7c4..53ffbe9 100644 --- a/lib/store.py +++ b/lib/store.py @@ -19,10 +19,11 @@ def __init__(self, transfer_id: str): self.transfer_id = transfer_id self.redis = self.get_redis() - self._k_queue = self.key('queue') - self._k_meta = self.key('metadata') - self._k_cleanup = f'cleanup:{transfer_id}' - self._k_receiver_connected = self.key('receiver_connected') + self._k_stream = self.key('stream') + self._k_metadata = self.key('metadata') + self._k_position = self.key('position') + self._k_progress = self.key('progress') + self._k_receiver_active = self.key('receiver_active') @classmethod def get_redis(cls) -> redis.Redis: @@ -36,26 +37,25 @@ def key(self, name: str) -> str: """Get the Redis key for this transfer with the provided name.""" return f'transfer:{self.transfer_id}:{name}' - ## Queue operations ## + async def add_chunk(self, data: bytes) -> None: + """Add chunk to stream.""" + await self.redis.xadd(self._k_stream, {'data': data}) - async def _wait_for_queue_space(self, maxsize: int) -> None: - while await self.redis.llen(self._k_queue) >= maxsize: - await anyio.sleep(0.5) + async def stream_chunks(self, read_timeout: float = 20.0): + """Stream chunks from last position.""" + position = await self.redis.get(self._k_position) + last_id = position.decode() if position else '0' - async def put_in_queue(self, data: bytes, maxsize: int = 16, timeout: float = 20.0) -> None: - """Add data to the transfer queue with backpressure control.""" - with anyio.fail_after(timeout): - await self._wait_for_queue_space(maxsize) - await self.redis.lpush(self._k_queue, data) - - async def get_from_queue(self, timeout: float = 20.0) -> bytes: - """Get data from the transfer queue with timeout.""" - result = await self.redis.brpop([self._k_queue], timeout=timeout) - if not result: - raise TimeoutError("Timeout waiting for data") + while True: + result = await self.redis.xread({self._k_stream: last_id}, block=int(read_timeout*1000)) + if not result: + raise TimeoutError("Stream read timeout") - _, data = result - return data + _, messages = result[0] + for message_id, fields in messages: + last_id = message_id + await self.redis.set(self._k_position, last_id, ex=300) + yield fields[b'data'] ## Event operations ## @@ -99,80 +99,43 @@ async def wait_for_event(self, event_name: str, timeout: float = 300.0) -> None: await pubsub.unsubscribe(event_key) await pubsub.aclose() - ## Metadata operations ## - async def set_metadata(self, metadata: str) -> None: """Store transfer metadata.""" - challenge = random.randbytes(8) - await self.redis.set(self._k_meta, challenge, nx=True) - if await self.redis.get(self._k_meta) == challenge: - await self.redis.set(self._k_meta, metadata, ex=300) - else: - raise KeyError("Metadata already set for this transfer.") + if not await self.redis.set(self._k_metadata, metadata, nx=True, ex=300): + raise KeyError("Transfer already exists") async def get_metadata(self) -> str | None: - """Retrieve transfer metadata.""" - return await self.redis.get(self._k_meta) - - ## Transfer state operations ## - - async def set_receiver_connected(self) -> bool: - """ - Mark that a receiver has connected for this transfer. - Returns True if the flag was set, False if it was already created. - """ - return bool(await self.redis.set(self._k_receiver_connected, '1', ex=300, nx=True)) - - async def is_receiver_connected(self) -> bool: - """Check if a receiver has already connected.""" - return await self.redis.exists(self._k_receiver_connected) > 0 - - async def set_completed(self) -> None: - """Mark the transfer as completed.""" - await self.redis.set(f'completed:{self.transfer_id}', '1', ex=300, nx=True) - - async def is_completed(self) -> bool: - """Check if the transfer is marked as completed.""" - return await self.redis.exists(f'completed:{self.transfer_id}') > 0 - - async def set_interrupted(self) -> None: - """Mark the transfer as interrupted.""" - await self.redis.set(f'interrupt:{self.transfer_id}', '1', ex=300, nx=True) - await self.redis.ltrim(self._k_queue, 0, 0) - - async def is_interrupted(self) -> bool: - """Check if the transfer was interrupted.""" - return await self.redis.exists(f'interrupt:{self.transfer_id}') > 0 - - ## Cleanup operations ## - - async def cleanup_started(self) -> bool: - """ - Check if cleanup has already been initiated for this transfer. - This uses a set/get pattern with challenge to avoid race conditions. - """ - challenge = random.randbytes(8) - await self.redis.set(self._k_cleanup, challenge, ex=60, nx=True) - if await self.redis.get(self._k_cleanup) == challenge: - return False - return True - - async def cleanup(self) -> int: - """Remove all keys related to this transfer.""" - if await self.cleanup_started(): - return 0 + """Get transfer metadata.""" + return await self.redis.get(self._k_metadata) - pattern = self.key('*') - keys_to_delete = set() + async def save_progress(self, bytes_downloaded: int) -> None: + """Save download progress.""" + await self.redis.set(self._k_progress, str(bytes_downloaded), ex=300) + + async def get_progress(self) -> int: + """Get download progress.""" + progress = await self.redis.get(self._k_progress) + return int(progress) if progress else 0 + async def set_receiver_active(self) -> None: + """Mark receiver as actively downloading with TTL.""" + await self.redis.set(self._k_receiver_active, '1', ex=5) + + async def is_receiver_active(self) -> bool: + """Check if receiver is actively downloading.""" + return bool(await self.redis.exists(self._k_receiver_active)) + + async def cleanup(self) -> None: + """Delete all transfer data.""" + pattern = self.key('*') cursor = 0 + keys = [] + while True: - cursor, keys = await self.redis.scan(cursor, match=pattern) - keys_to_delete |= set(keys) + cursor, batch = await self.redis.scan(cursor, match=pattern) + keys.extend(batch) if cursor == 0: break - if keys_to_delete: - self.debug(f"- Cleaning up {len(keys_to_delete)} keys") - return await self.redis.delete(*keys_to_delete) - return 0 + if keys: + await self.redis.delete(*keys) diff --git a/lib/transfer.py b/lib/transfer.py index d8af9d8..e6db758 100644 --- a/lib/transfer.py +++ b/lib/transfer.py @@ -37,12 +37,14 @@ def __init__(self, uid: str, file: FileMetadata): @classmethod async def create(cls, uid: str, file: FileMetadata): + """Create a new transfer using the provided identifier and file metadata.""" transfer = cls(uid, file) await transfer.store.set_metadata(file.to_json()) return transfer @classmethod async def get(cls, uid: str): + """Fetch a transfer from the store using the provided identifier.""" store = Store(uid) metadata_json = await store.get_metadata() if not metadata_json: @@ -58,122 +60,89 @@ def _format_uid(uid: str): def get_file_info(self): return self.file.name, self.file.size, self.file.type - async def wait_for_event(self, event_name: str, timeout: float = 300.0): - await self.store.wait_for_event(event_name, timeout) - - async def set_client_connected(self): - self.debug(f"▼ Notifying sender that receiver is connected...") - await self.store.set_event('client_connected') - - async def wait_for_client_connected(self): - self.info(f"△ Waiting for client to connect...") - await self.wait_for_event('client_connected') - self.debug(f"△ Received client connected notification.") - - async def is_receiver_connected(self) -> bool: - return await self.store.is_receiver_connected() - - async def set_receiver_connected(self) -> bool: - return await self.store.set_receiver_connected() - - async def is_interrupted(self) -> bool: - return await self.store.is_interrupted() - - async def set_interrupted(self): - await self.store.set_interrupted() - - async def is_completed(self) -> bool: - return await self.store.is_completed() - - async def set_completed(self): - await self.store.set_completed() - - async def collect_upload(self, stream: AsyncIterator[bytes], on_error: Callable[[Exception | str], Awaitable[None]]) -> None: + @property + async def receiver_connected(self) -> bool: + """Check if a receiver is actively downloading.""" + return await self.store.is_receiver_active() + + async def notify_receiver_connected(self): + """Notify sender that receiver connected.""" + await self.store.set_event('receiver_connected') + + async def wait_for_receiver(self): + """Wait for receiver to connect.""" + self.info(f"△ Waiting for receiver...") + await self.store.wait_for_event('receiver_connected') + self.debug(f"△ Receiver connected") + + async def consume_upload(self, stream: AsyncIterator[bytes], on_error: Callable[[Exception | str], Awaitable[None]]) -> None: + """Consume upload stream and add chunks to Redis stream.""" self.bytes_uploaded = 0 try: async for chunk in stream: if not chunk: - self.debug(f"△ Empty chunk received, ending upload.") break - if await self.is_interrupted(): - raise TransferError("Transfer was interrupted by the receiver.", propagate=False) - - await self.store.put_in_queue(chunk) + await self.store.add_chunk(chunk) self.bytes_uploaded += len(chunk) if self.bytes_uploaded < self.file.size: - raise TransferError("Received less data than expected.", propagate=True) + raise TransferError("Incomplete upload", propagate=True) - self.debug(f"△ End of upload, sending done marker.") - await self.store.put_in_queue(self.DONE_FLAG) + await self.store.add_chunk(self.DONE_FLAG) + self.debug(f"△ All data chunks uploaded: {self.bytes_uploaded} bytes") - except (ClientDisconnect, WebSocketDisconnect) as e: - self.error(f"△ Unexpected upload error: {e}") - await self.store.put_in_queue(self.DEAD_FLAG) + except (ClientDisconnect, WebSocketDisconnect): + self.error(f"△ Sender disconnected") + await self.store.add_chunk(self.DEAD_FLAG) - except TimeoutError as e: - self.warning(f"△ Timeout during upload.", exc_info=True) - await on_error("Timeout during upload.") + except TimeoutError: + self.warning(f"△ Upload timeout") + await on_error("Upload timeout") except TransferError as e: - self.warning(f"△ Upload error: {e}") if e.propagate: - await self.store.put_in_queue(self.DEAD_FLAG) - else: - await on_error(e) + await self.store.add_chunk(self.DEAD_FLAG) + await on_error(e) - finally: - await anyio.sleep(1.0) + async def produce_download(self, on_error: Callable[[Exception | str], Awaitable[None]]) -> AsyncIterator[bytes]: + """Produce download stream from Redis stream.""" + self.bytes_downloaded = await self.store.get_progress() - async def supply_download(self, on_error: Callable[[Exception | str], Awaitable[None]]) -> AsyncIterator[bytes]: - self.bytes_downloaded = 0 + if self.bytes_downloaded > 0: + self.info(f"▼ Resuming from byte {self.bytes_downloaded}") try: - while True: - chunk = await self.store.get_from_queue() + await self.store.set_receiver_active() + async for chunk in self.store.stream_chunks(): if chunk == self.DEAD_FLAG: - raise TransferError("Sender disconnected.") + raise TransferError("Sender disconnected") - if chunk == self.DONE_FLAG and self.bytes_downloaded < self.file.size: - raise TransferError("Received less data than expected.") - - elif chunk == self.DONE_FLAG: - self.debug(f"▼ Done marker received, ending download.") + if chunk == self.DONE_FLAG: + if self.bytes_downloaded >= self.file.size: + self.debug(f"▼ All data chunks downloaded: {self.bytes_downloaded} bytes") break self.bytes_downloaded += len(chunk) + await self.store.save_progress(self.bytes_downloaded) + await self.store.set_receiver_active() yield chunk - except Exception as e: - self.error(f"▼ Unexpected download error!", exc_info=True) - self.debug("Debug info:", stack_info=True) - await on_error(e) - except TransferError as e: - self.warning(f"▼ Download error") + await on_error(e) + except Exception as e: + self.error(f"▼ Download error", exc_info=True) await on_error(e) async def cleanup(self): - try: - with anyio.fail_after(30.0): - await self.store.cleanup() - except TimeoutError: - self.warning(f"- Cleanup timed out.") - pass + """Clean up transfer data.""" + await self.store.cleanup() async def finalize_download(self): - # self.debug("▼ Finalizing download...") - if self.bytes_downloaded < self.file.size and not await self.is_interrupted(): - self.warning("▼ Client disconnected before download was complete.") - await self.set_interrupted() - - await self.cleanup() - # self.debug("▼ Finalizing download...") - if self.bytes_downloaded < self.file.size and not await self.is_interrupted(): - self.warning("▼ Client disconnected before download was complete.") - await self.set_interrupted() - - await self.cleanup() + """Finalize download and cleanup if complete.""" + if self.bytes_downloaded < self.file.size: + self.info(f"▼ Download paused at {self.bytes_downloaded}/{self.file.size} bytes") + else: + await self.cleanup() diff --git a/static/css/style.css b/static/css/style.css index 08ab353..1664621 100644 --- a/static/css/style.css +++ b/static/css/style.css @@ -430,13 +430,54 @@ code.inline-highlight { } /* Responsive Design */ + +/* Tablet and small desktop */ +@media (max-width: 768px) { + .container { + max-width: 100%; + padding: var(--space-md); + } + + .header { + padding: var(--space-xl) 0; + } + + .code-section { + padding: var(--space-md); + } + + .info-list { + padding: var(--space-md); + } +} + +/* Mobile devices */ @media (max-width: 600px) { + :root { + --space-xs: 0.25rem; + --space-sm: 0.375rem; + --space-md: 0.75rem; + --space-lg: 1rem; + --space-xl: 1.5rem; + --space-2xl: 2rem; + } + .container { padding: var(--space-sm); } + .header { + padding: var(--space-lg) 0; + margin-bottom: var(--space-lg); + } + .header h1 { - font-size: 2rem; + font-size: 1.75rem; + margin-bottom: var(--space-xs); + } + + .header p { + font-size: 0.95rem; } .beta-badge { @@ -444,13 +485,223 @@ code.inline-highlight { display: inline-block; margin-left: var(--space-sm); margin-top: var(--space-xs); + font-size: 0.65rem; + padding: 1px 4px; + } + + .beta-warning { + padding: var(--space-sm); + margin-top: var(--space-md); + font-size: 0.85rem; + } + + .section { + margin-bottom: var(--space-xl); + } + + .section h2 { + font-size: 1.1rem; + margin-bottom: var(--space-sm); + } + + .section p { + font-size: 0.95rem; + margin-bottom: var(--space-sm); } + /* Mobile-optimized transfer area */ .transfer-container { - min-height: 160px; + min-height: 180px; + margin-bottom: var(--space-lg); + } + + .drop-area { + border-radius: var(--radius-md); + padding: var(--space-lg); + min-height: 180px; + touch-action: none; + } + + .drop-area p { + font-size: 1rem; + padding: var(--space-md); + line-height: 1.4; + } + + /* Make file input area larger for mobile touch */ + .drop-area::after { + content: ''; + position: absolute; + top: -10px; + left: -10px; + right: -10px; + bottom: -10px; + z-index: -1; + } + + .share-link { + padding: var(--space-md); + } + + .share-link label { + font-size: 0.95rem; + } + + .share-link input { + padding: var(--space-md); + font-size: 0.85rem; + max-width: 100%; + } + + /* Progress bar mobile optimization */ + .upload-progress { + margin-top: var(--space-md); + } + + .progress-info { + margin-bottom: var(--space-xs); + } + + .status-text, + .progress-text { + font-size: 0.85rem; + } + + .progress-bar { + height: 10px; + } + + /* Download page mobile */ + .download-container { + padding: var(--space-md); + } + + .file-info p { + font-size: 0.95rem; + margin-bottom: var(--space-xs); + } + + .button-download { + padding: var(--space-md) var(--space-lg); + font-size: 1rem; + width: 100%; + display: block; + text-align: center; + touch-action: manipulation; + } + + /* Code sections mobile - hide cURL section */ + .code-section { + display: none; + } + + .code-section h3 { + font-size: 1rem; + margin-bottom: var(--space-sm); + } + + .code-section p { + font-size: 0.9rem; } .code-block { - font-size: 0.8rem; + padding: var(--space-sm); + font-size: 0.75rem; + overflow-x: auto; + -webkit-overflow-scrolling: touch; + } + + .code-block code { + font-size: 0.75rem; + } + + code.inline-highlight { + font-size: 0.85rem; + padding: 0.05rem 0.3rem; + } + + /* Info list mobile */ + .info-list { + padding: var(--space-md); + } + + .info-list h3 { + font-size: 1rem; + margin-bottom: var(--space-sm); + } + + .info-list li { + padding-left: var(--space-md); + margin-bottom: var(--space-sm); + font-size: 0.9rem; + } + + /* Footer mobile */ + .footer { + padding: var(--space-lg) 0; + font-size: 0.85rem; + } +} + +/* Extra small mobile devices */ +@media (max-width: 375px) { + .header h1 { + font-size: 1.5rem; + } + + .beta-badge { + display: block; + margin-left: 0; + margin-top: var(--space-sm); + width: fit-content; + margin-inline: auto; + } + + .code-block { + font-size: 0.7rem; + } +} + +/* Mobile landscape orientation */ +@media (max-width: 900px) and (orientation: landscape) { + .header { + padding: var(--space-md) 0; + } + + .header h1 { + font-size: 1.5rem; + } + + .transfer-container { + min-height: 140px; + } + + .drop-area { + min-height: 140px; + } +} + +/* Touch device optimizations */ +@media (pointer: coarse) { + .drop-area { + cursor: default; + } + + .button-download, + .drop-area { + -webkit-tap-highlight-color: transparent; + user-select: none; + } + + a { + -webkit-tap-highlight-color: rgba(31, 111, 235, 0.2); + } +} + +/* High DPI screens */ +@media (-webkit-min-device-pixel-ratio: 2), (min-resolution: 192dpi) { + .progress-bar { + transform: translateZ(0); + will-change: width; } } diff --git a/static/index.html b/static/index.html index 929c6fd..68f4837 100644 --- a/static/index.html +++ b/static/index.html @@ -19,7 +19,7 @@
Direct file transfer without intermediate storage
Drag and drop or select a file to generate a download link.
Drag and drop your file here, or click to select a file
- +Drag and drop your file here, or tap to select a file
+You can use the curl command to transfer from your terminal. 100 MiB maximum.
You can use the curl command to transfer from your terminal. 1 GiB maximum.
# Send
@@ -95,6 +95,6 @@ Important Information