From cdc0908a324bdd411fd7f1edda944af29f49deca Mon Sep 17 00:00:00 2001 From: Jvillegasd Date: Wed, 4 Mar 2026 09:50:46 -0500 Subject: [PATCH 01/27] feat(options): add About section with version and author info MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds an About page to the options sidebar showing the extension icon, dynamic version (from chrome.runtime.getManifest()), description, and a "Made with ♥ by jvillegasd" link to the GitHub profile. Co-Authored-By: Claude Sonnet 4.6 --- src/options/options.html | 30 ++++++++++++++++++++++++++++++ src/options/options.ts | 12 +++++++++++- 2 files changed, 41 insertions(+), 1 deletion(-) diff --git a/src/options/options.html b/src/options/options.html index 808d231..d160742 100644 --- a/src/options/options.html +++ b/src/options/options.html @@ -909,6 +909,15 @@ Advanced + + + +
+
+

About

+
+
+ Media Bridge +
+

Media Bridge

+

Version

+
+

+ Download HLS, DASH, and direct media streams directly from your browser. +

+

+ Made with ♥ by + jvillegasd +

+
+
+ diff --git a/src/options/options.ts b/src/options/options.ts index 8751b16..b33cda5 100644 --- a/src/options/options.ts +++ b/src/options/options.ts @@ -76,7 +76,7 @@ function init(): void { // Check URL hash to navigate directly to a view (e.g. opened via history button) const hash = location.hash.slice(1); - const validViews = new Set(["history", "cloud-providers", "recording", "notifications", "advanced"]); + const validViews = new Set(["history", "cloud-providers", "recording", "notifications", "advanced", "about"]); switchView(validViews.has(hash) ? hash : "download-settings"); } @@ -120,6 +120,16 @@ function switchView(viewId: string): void { if (viewId === "recording") loadRecordingSettings(); if (viewId === "notifications") loadNotificationSettings(); if (viewId === "advanced") loadAdvancedSettings(); + if (viewId === "about") loadAboutSection(); +} + +// ───────────────────────────────────────────── +// Section: About +// ───────────────────────────────────────────── + +function loadAboutSection(): void { + const el = document.getElementById("about-version"); + if (el) el.textContent = chrome.runtime.getManifest().version; } // ───────────────────────────────────────────── From 1298940db1d994b772fd70f2f5e53b930d9b3df2 Mon Sep 17 00:00:00 2001 From: Jvillegasd Date: Wed, 4 Mar 2026 11:48:22 -0500 Subject: [PATCH 02/27] feat(cloud): implement S3 and Google Drive upload support - Add S3Client with SigV4 signing (single-part and multipart upload) - Rewrite UploadManager to support both Google Drive and S3 concurrently - Wire onBlobReady hook through download handlers to upload before blob revocation - Add UPLOADING stage, cloud action buttons, and deferred upload via file picker in popup - Add autoUpload toggles, provider presets, test connection, and CORS helper in S3 options - Fix S3 enable checkbox show/hide to match Google Drive behaviour - Add tabs to Advanced options view (Retry & Reliability, Detection Caches, Performance) Co-Authored-By: Claude Sonnet 4.6 --- src/core/cloud/s3-client.ts | 415 ++++++++++++++++++ src/core/cloud/upload-manager.ts | 188 +++++--- src/core/downloader/base-playlist-handler.ts | 6 + src/core/downloader/base-recording-handler.ts | 1 + .../downloader/dash/dash-download-handler.ts | 1 + src/core/downloader/download-manager.ts | 5 + .../downloader/hls/hls-download-handler.ts | 1 + .../downloader/m3u8/m3u8-download-handler.ts | 1 + src/core/storage/settings.ts | 4 + src/core/types/index.ts | 9 + src/core/utils/blob-utils.ts | 27 +- src/options/options.html | 254 ++++++----- src/options/options.ts | 162 ++++++- src/popup/download-actions.ts | 66 ++- src/popup/popup.html | 56 +++ src/popup/popup.ts | 22 +- src/popup/render-downloads.ts | 62 ++- src/service-worker.ts | 172 ++++++++ 18 files changed, 1254 insertions(+), 198 deletions(-) create mode 100644 src/core/cloud/s3-client.ts diff --git a/src/core/cloud/s3-client.ts b/src/core/cloud/s3-client.ts new file mode 100644 index 0000000..d0b8813 --- /dev/null +++ b/src/core/cloud/s3-client.ts @@ -0,0 +1,415 @@ +/** + * S3-compatible upload client with SigV4 request signing. + * Works with AWS S3, Cloudflare R2, Backblaze B2, Wasabi, MinIO, and any + * S3-compatible provider that accepts path-style or virtual-hosted-style URLs. + * + * Uses Web Crypto API — no external dependencies. + */ + +import { UploadError } from "../utils/errors"; +import { logger } from "../utils/logger"; + +export interface S3Config { + bucket: string; + region: string; + accessKeyId: string; + secretAccessKey: string; + /** Custom endpoint for S3-compatible providers. Defaults to AWS S3 virtual-hosted URL. */ + endpoint?: string; + /** Key prefix prepended to all uploaded object names. */ + prefix?: string; +} + +export interface S3UploadResult { + /** The public URL of the uploaded object (path-style). */ + url: string; + key: string; +} + +// Multipart threshold: 100 MB +const MULTIPART_THRESHOLD = 100 * 1024 * 1024; +// Part size for multipart: 10 MB +const PART_SIZE = 10 * 1024 * 1024; + +export class S3Client { + private readonly config: S3Config; + + constructor(config: S3Config) { + this.config = config; + } + + async uploadBlob( + blob: Blob, + filename: string, + onProgress?: (uploadedBytes: number, totalBytes: number) => void, + ): Promise { + const key = this.config.prefix ? `${this.config.prefix.replace(/\/$/, "")}/${filename}` : filename; + + if (blob.size >= MULTIPART_THRESHOLD) { + return this.multipartUpload(blob, key, onProgress); + } + return this.putUpload(blob, key, onProgress); + } + + /** Single-part PUT upload for files < 100 MB */ + private async putUpload( + blob: Blob, + key: string, + onProgress?: (uploaded: number, total: number) => void, + ): Promise { + const url = this.objectUrl(key); + const buffer = await blob.arrayBuffer(); + const payloadHash = await sha256hex(buffer); + + const now = new Date(); + const datetime = isoDatetime(now); + const date = datetime.slice(0, 8); + + const headers: Record = { + "Content-Type": blob.type || "video/mp4", + "Content-Length": String(blob.size), + "x-amz-content-sha256": payloadHash, + "x-amz-date": datetime, + "Host": new URL(url).host, + }; + + const authorization = await this.buildAuthorization( + "PUT", new URL(url), headers, payloadHash, datetime, date, + ); + headers["Authorization"] = authorization; + delete headers["Host"]; // fetch adds it automatically + + const response = await fetch(url, { + method: "PUT", + headers, + body: buffer, + }); + + if (!response.ok) { + const text = await response.text().catch(() => response.statusText); + throw new UploadError(`S3 PUT failed (${response.status}): ${text}`, response.status); + } + + onProgress?.(blob.size, blob.size); + logger.info(`S3 upload complete: ${key}`); + return { url, key }; + } + + /** Multipart upload for files >= 100 MB */ + private async multipartUpload( + blob: Blob, + key: string, + onProgress?: (uploaded: number, total: number) => void, + ): Promise { + // 1. Initiate + const uploadId = await this.initiateMultipart(key); + const parts: Array<{ PartNumber: number; ETag: string }> = []; + let uploadedBytes = 0; + + try { + const totalParts = Math.ceil(blob.size / PART_SIZE); + + for (let i = 0; i < totalParts; i++) { + const start = i * PART_SIZE; + const end = Math.min(start + PART_SIZE, blob.size); + const partBlob = blob.slice(start, end); + const partNumber = i + 1; + + const etag = await this.uploadPart(key, uploadId, partNumber, partBlob); + parts.push({ PartNumber: partNumber, ETag: etag }); + + uploadedBytes += partBlob.size; + onProgress?.(uploadedBytes, blob.size); + } + + // 2. Complete + await this.completeMultipart(key, uploadId, parts); + } catch (err) { + // Abort on failure to avoid orphaned multipart uploads + await this.abortMultipart(key, uploadId).catch((e) => + logger.warn("Failed to abort multipart upload:", e), + ); + throw err; + } + + const url = this.objectUrl(key); + logger.info(`S3 multipart upload complete: ${key}`); + return { url, key }; + } + + private async initiateMultipart(key: string): Promise { + const url = `${this.objectUrl(key)}?uploads`; + const now = new Date(); + const datetime = isoDatetime(now); + const date = datetime.slice(0, 8); + const payloadHash = await sha256hex(""); + + const headers: Record = { + "Content-Type": "video/mp4", + "x-amz-content-sha256": payloadHash, + "x-amz-date": datetime, + "Host": new URL(url).host, + }; + const authorization = await this.buildAuthorization( + "POST", new URL(url), headers, payloadHash, datetime, date, + ); + headers["Authorization"] = authorization; + delete headers["Host"]; + + const response = await fetch(url, { method: "POST", headers }); + if (!response.ok) { + throw new UploadError(`Failed to initiate multipart upload: ${response.statusText}`, response.status); + } + + const xml = await response.text(); + const match = xml.match(/(.+?)<\/UploadId>/); + if (!match?.[1]) throw new UploadError("No UploadId in response"); + return match[1]; + } + + private async uploadPart( + key: string, + uploadId: string, + partNumber: number, + blob: Blob, + ): Promise { + const baseUrl = this.objectUrl(key); + const url = `${baseUrl}?partNumber=${partNumber}&uploadId=${encodeURIComponent(uploadId)}`; + const buffer = await blob.arrayBuffer(); + const payloadHash = await sha256hex(buffer); + const now = new Date(); + const datetime = isoDatetime(now); + const date = datetime.slice(0, 8); + + const headers: Record = { + "Content-Length": String(blob.size), + "x-amz-content-sha256": payloadHash, + "x-amz-date": datetime, + "Host": new URL(url).host, + }; + const authorization = await this.buildAuthorization( + "PUT", new URL(url), headers, payloadHash, datetime, date, + ); + headers["Authorization"] = authorization; + delete headers["Host"]; + + const response = await fetch(url, { method: "PUT", headers, body: buffer }); + if (!response.ok) { + throw new UploadError(`Part ${partNumber} upload failed: ${response.statusText}`, response.status); + } + + const etag = response.headers.get("ETag") ?? ""; + return etag.replace(/"/g, ""); + } + + private async completeMultipart( + key: string, + uploadId: string, + parts: Array<{ PartNumber: number; ETag: string }>, + ): Promise { + const url = `${this.objectUrl(key)}?uploadId=${encodeURIComponent(uploadId)}`; + const body = [ + "", + ...parts.map( + (p) => `${p.PartNumber}${p.ETag}`, + ), + "", + ].join(""); + + const now = new Date(); + const datetime = isoDatetime(now); + const date = datetime.slice(0, 8); + const payloadHash = await sha256hex(body); + + const headers: Record = { + "Content-Type": "application/xml", + "x-amz-content-sha256": payloadHash, + "x-amz-date": datetime, + "Host": new URL(url).host, + }; + const authorization = await this.buildAuthorization( + "POST", new URL(url), headers, payloadHash, datetime, date, + ); + headers["Authorization"] = authorization; + delete headers["Host"]; + + const response = await fetch(url, { method: "POST", headers, body }); + if (!response.ok) { + const text = await response.text().catch(() => response.statusText); + throw new UploadError(`Failed to complete multipart upload: ${text}`, response.status); + } + } + + private async abortMultipart(key: string, uploadId: string): Promise { + const url = `${this.objectUrl(key)}?uploadId=${encodeURIComponent(uploadId)}`; + const now = new Date(); + const datetime = isoDatetime(now); + const date = datetime.slice(0, 8); + const payloadHash = await sha256hex(""); + + const headers: Record = { + "x-amz-content-sha256": payloadHash, + "x-amz-date": datetime, + "Host": new URL(url).host, + }; + const authorization = await this.buildAuthorization( + "DELETE", new URL(url), headers, payloadHash, datetime, date, + ); + headers["Authorization"] = authorization; + delete headers["Host"]; + + await fetch(url, { method: "DELETE", headers }); + } + + /** Build SigV4 Authorization header value */ + private async buildAuthorization( + method: string, + url: URL, + headers: Record, + payloadHash: string, + datetime: string, + date: string, + ): Promise { + const region = this.config.region; + const service = "s3"; + + // Sorted canonical headers (lowercase names) + const signedHeaderNames = Object.keys(headers) + .map((k) => k.toLowerCase()) + .sort(); + + const canonicalHeaders = signedHeaderNames + .map((name) => { + const value = headers[Object.keys(headers).find((k) => k.toLowerCase() === name)!]; + return `${name}:${value.trim()}`; + }) + .join("\n") + "\n"; + + const signedHeaders = signedHeaderNames.join(";"); + + // Canonical query string (sorted) + const queryParams = Array.from(url.searchParams.entries()) + .sort(([a], [b]) => a.localeCompare(b)) + .map(([k, v]) => `${encodeURIComponent(k)}=${encodeURIComponent(v)}`) + .join("&"); + + const canonicalPath = url.pathname; + + const canonicalRequest = [ + method, + canonicalPath, + queryParams, + canonicalHeaders, + signedHeaders, + payloadHash, + ].join("\n"); + + const credentialScope = `${date}/${region}/${service}/aws4_request`; + const stringToSign = [ + "AWS4-HMAC-SHA256", + datetime, + credentialScope, + await sha256hex(canonicalRequest), + ].join("\n"); + + const signingKey = await deriveSigningKey(this.config.secretAccessKey, date, region, service); + const signature = buf2hex(await hmacSha256(signingKey, stringToSign)); + + return ( + `AWS4-HMAC-SHA256 ` + + `Credential=${this.config.accessKeyId}/${credentialScope}, ` + + `SignedHeaders=${signedHeaders}, ` + + `Signature=${signature}` + ); + } + + private objectUrl(key: string): string { + if (this.config.endpoint) { + // Path-style: endpoint/bucket/key + const base = this.config.endpoint.replace(/\/$/, ""); + return `${base}/${this.config.bucket}/${key}`; + } + // AWS virtual-hosted style + return `https://${this.config.bucket}.s3.${this.config.region}.amazonaws.com/${key}`; + } + + /** Verify credentials and bucket access (lightweight HEAD on the bucket) */ + async testConnection(): Promise<{ ok: boolean; error?: string }> { + try { + const url = this.bucketUrl(); + const now = new Date(); + const datetime = isoDatetime(now); + const date = datetime.slice(0, 8); + const payloadHash = await sha256hex(""); + + const headers: Record = { + "x-amz-content-sha256": payloadHash, + "x-amz-date": datetime, + "Host": new URL(url).host, + }; + const authorization = await this.buildAuthorization( + "HEAD", new URL(url), headers, payloadHash, datetime, date, + ); + headers["Authorization"] = authorization; + delete headers["Host"]; + + const response = await fetch(url, { method: "HEAD", headers }); + if (response.ok || response.status === 403) { + // 403 = bucket exists but no ListBucket permission — credentials are valid + return { ok: true }; + } + return { ok: false, error: `HTTP ${response.status}: ${response.statusText}` }; + } catch (err) { + return { ok: false, error: err instanceof Error ? err.message : String(err) }; + } + } + + private bucketUrl(): string { + if (this.config.endpoint) { + const base = this.config.endpoint.replace(/\/$/, ""); + return `${base}/${this.config.bucket}`; + } + return `https://${this.config.bucket}.s3.${this.config.region}.amazonaws.com`; + } +} + +// ---- Crypto helpers ---- + +function buf2hex(buffer: ArrayBuffer): string { + return Array.from(new Uint8Array(buffer)) + .map((b) => b.toString(16).padStart(2, "0")) + .join(""); +} + +async function sha256hex(data: string | ArrayBuffer): Promise { + const buf: BufferSource = + typeof data === "string" ? new TextEncoder().encode(data) : data; + return buf2hex(await crypto.subtle.digest("SHA-256", buf)); +} + +async function hmacSha256(key: BufferSource, data: string): Promise { + const cryptoKey = await crypto.subtle.importKey( + "raw", + key, + { name: "HMAC", hash: "SHA-256" }, + false, + ["sign"], + ); + return crypto.subtle.sign("HMAC", cryptoKey, new TextEncoder().encode(data)); +} + +async function deriveSigningKey( + secretKey: string, + date: string, + region: string, + service: string, +): Promise { + const kDate = await hmacSha256(new TextEncoder().encode(`AWS4${secretKey}`), date); + const kRegion = await hmacSha256(kDate, region); + const kService = await hmacSha256(kRegion, service); + return hmacSha256(kService, "aws4_request"); +} + +function isoDatetime(d: Date): string { + return d.toISOString().replace(/[-:]/g, "").slice(0, 15) + "Z"; +} diff --git a/src/core/cloud/upload-manager.ts b/src/core/cloud/upload-manager.ts index e168b13..564b3e5 100644 --- a/src/core/cloud/upload-manager.ts +++ b/src/core/cloud/upload-manager.ts @@ -1,86 +1,162 @@ /** - * Cloud upload orchestration + * Cloud upload orchestration — Google Drive + S3. + * + * Designed to be called from the service worker BEFORE blob URL revocation + * (i.e. inside the onBlobReady callback passed to saveBlobUrlToFile). */ -import { GoogleDriveClient, UploadResult } from "./google-drive"; -import { DownloadState, DownloadStage } from "../types"; +import { GoogleDriveClient } from "./google-drive"; +import { S3Client } from "./s3-client"; +import { DownloadState, DownloadStage, StorageConfig } from "../types"; import { UploadError } from "../utils/errors"; import { logger } from "../utils/logger"; -import { StorageConfig } from "../types"; + +export interface CloudLinks { + googleDrive?: string; // webViewLink + s3?: string; // object URL +} export interface UploadManagerOptions { - config?: StorageConfig; - onProgress?: (state: DownloadState) => void; + config: StorageConfig; + onProgress?: (uploadedBytes: number, totalBytes: number) => void; + onStateUpdate?: (state: DownloadState) => Promise; } export class UploadManager { - private googleDrive?: GoogleDriveClient; - private onProgress?: (state: DownloadState) => void; + private readonly googleDrive?: GoogleDriveClient; + private readonly s3?: S3Client; + private readonly onProgress?: (uploaded: number, total: number) => void; + private readonly onStateUpdate?: (state: DownloadState) => Promise; + + constructor(options: UploadManagerOptions) { + const { config } = options; + this.onProgress = options.onProgress; + this.onStateUpdate = options.onStateUpdate; - constructor(options: UploadManagerOptions = {}) { - if (options.config?.googleDrive?.enabled) { + if (config.googleDrive?.enabled) { this.googleDrive = new GoogleDriveClient({ - targetFolderId: options.config.googleDrive.targetFolderId, - createFolderIfNotExists: - options.config.googleDrive.createFolderIfNotExists, - folderName: options.config.googleDrive.folderName, + targetFolderId: config.googleDrive.targetFolderId, + createFolderIfNotExists: config.googleDrive.createFolderIfNotExists, + folderName: config.googleDrive.folderName, }); } - this.onProgress = options.onProgress; + if ( + config.s3?.enabled && + config.s3.bucket && + config.s3.region && + config.s3.accessKeyId && + config.s3.secretAccessKey + ) { + this.s3 = new S3Client({ + bucket: config.s3.bucket, + region: config.s3.region, + accessKeyId: config.s3.accessKeyId, + secretAccessKey: config.s3.secretAccessKey, + endpoint: config.s3.endpoint, + prefix: config.s3.prefix, + }); + } } /** - * Upload file to configured cloud storage + * Fetch the blob from a blob URL and upload to all configured providers. + * This must be called BEFORE the blob URL is revoked. + * + * Updates downloadState.progress.stage to UPLOADING and writes progress + * via onStateUpdate during the upload. */ - async uploadFile( - blob: Blob, + async uploadFromBlobUrl( + blobUrl: string, filename: string, - downloadState?: DownloadState, - ): Promise { - if (!this.googleDrive) { - logger.warn("Google Drive not configured"); - return null; + downloadState: DownloadState, + ): Promise { + if (!this.isConfigured()) { + return {}; } - try { - if (downloadState) { - downloadState.progress.stage = DownloadStage.UPLOADING; - downloadState.progress.message = "Uploading to Google Drive..."; - this.onProgress?.(downloadState); - } - - const result = await this.googleDrive.uploadFile(blob, filename); - - if (downloadState) { - downloadState.cloudId = result.fileId; - downloadState.progress.stage = DownloadStage.COMPLETED; - downloadState.progress.message = "Upload completed"; - this.onProgress?.(downloadState); - } - - logger.info(`File uploaded successfully: ${result.fileId}`); - return result; - } catch (error) { - logger.error("Upload failed:", error); - - if (downloadState) { - downloadState.progress.stage = DownloadStage.FAILED; - downloadState.progress.error = - error instanceof Error ? error.message : String(error); - this.onProgress?.(downloadState); - } - - throw error instanceof UploadError - ? error - : new UploadError(`Upload failed: ${error}`); + // Fetch the blob while it's still alive + const response = await fetch(blobUrl); + if (!response.ok) { + throw new UploadError(`Failed to read blob for upload: ${response.statusText}`); } + const blob = await response.blob(); + + return this.uploadBlob(blob, filename, downloadState); } /** - * Check if upload is configured + * Upload an already-fetched blob to all configured providers. + * Both Drive and S3 are attempted independently — one failing does not + * prevent the other. */ + async uploadBlob( + blob: Blob, + filename: string, + downloadState: DownloadState, + ): Promise { + const links: CloudLinks = {}; + + // Notify UPLOADING stage + downloadState.progress.stage = DownloadStage.UPLOADING; + downloadState.progress.message = "Uploading to cloud..."; + downloadState.progress.percentage = 0; + await this.onStateUpdate?.(downloadState); + + const totalBytes = blob.size; + // Track combined progress from both providers (simple: use whichever fires) + const onProgress = (uploaded: number, total: number) => { + this.onProgress?.(uploaded, total); + }; + + // Run both providers concurrently; failures are independent + const [driveResult, s3Result] = await Promise.allSettled([ + this.googleDrive + ? this.uploadToDrive(blob, filename, onProgress) + : Promise.resolve(null), + this.s3 + ? this.s3.uploadBlob(blob, filename, onProgress) + : Promise.resolve(null), + ]); + + if (driveResult.status === "fulfilled" && driveResult.value) { + links.googleDrive = driveResult.value.webViewLink ?? driveResult.value.fileId; + logger.info(`Drive upload complete: ${links.googleDrive}`); + } else if (driveResult.status === "rejected") { + logger.error("Drive upload failed:", driveResult.reason); + } + + if (s3Result.status === "fulfilled" && s3Result.value) { + links.s3 = s3Result.value.url; + logger.info(`S3 upload complete: ${links.s3}`); + } else if (s3Result.status === "rejected") { + logger.error("S3 upload failed:", s3Result.reason); + } + + const bothFailed = + driveResult.status === "rejected" && s3Result.status === "rejected"; + if (bothFailed) { + const driveErr = + driveResult.status === "rejected" ? String(driveResult.reason) : ""; + const s3Err = s3Result.status === "rejected" ? String(s3Result.reason) : ""; + throw new UploadError(`All uploads failed. Drive: ${driveErr} S3: ${s3Err}`); + } + + return links; + } + + private async uploadToDrive( + blob: Blob, + filename: string, + onProgress: (uploaded: number, total: number) => void, + ) { + if (!this.googleDrive) return null; + const result = await this.googleDrive.uploadFile(blob, filename); + onProgress(blob.size, blob.size); + return result; + } + isConfigured(): boolean { - return this.googleDrive !== undefined; + return this.googleDrive !== undefined || this.s3 !== undefined; } } diff --git a/src/core/downloader/base-playlist-handler.ts b/src/core/downloader/base-playlist-handler.ts index 94d8393..6fc9076 100644 --- a/src/core/downloader/base-playlist-handler.ts +++ b/src/core/downloader/base-playlist-handler.ts @@ -48,6 +48,9 @@ export interface BasePlaylistHandlerOptions { minPollIntervalMs?: number; maxPollIntervalMs?: number; pollFraction?: number; + /** Called after Chrome saves the file to disk but BEFORE the blob URL is revoked. + * Receives the live blob URL and the download state ID. */ + onBlobReady?: (blobUrl: string, stateId: string) => Promise; } export abstract class BasePlaylistHandler { @@ -63,6 +66,7 @@ export abstract class BasePlaylistHandler { protected readonly minPollIntervalMs: number; protected readonly maxPollIntervalMs: number; protected readonly pollFraction: number; + protected readonly onBlobReady?: (blobUrl: string, stateId: string) => Promise; protected downloadId: string = ""; protected bytesDownloaded: number = 0; @@ -90,6 +94,7 @@ export abstract class BasePlaylistHandler { this.minPollIntervalMs = options.minPollIntervalMs ?? DEFAULT_MIN_POLL_MS; this.maxPollIntervalMs = options.maxPollIntervalMs ?? DEFAULT_MAX_POLL_MS; this.pollFraction = options.pollFraction ?? DEFAULT_POLL_FRACTION; + this.onBlobReady = options.onBlobReady; } // ---- Shared utility methods ---- @@ -504,6 +509,7 @@ export abstract class BasePlaylistHandler { blobUrl, `${baseFileName}.mp4`, stateId, + this.onBlobReady ? (url) => this.onBlobReady!(url, stateId) : undefined, ); const completionMessage = warning diff --git a/src/core/downloader/base-recording-handler.ts b/src/core/downloader/base-recording-handler.ts index 8e5dea9..6d00a8a 100644 --- a/src/core/downloader/base-recording-handler.ts +++ b/src/core/downloader/base-recording-handler.ts @@ -86,6 +86,7 @@ export abstract class BaseRecordingHandler extends BasePlaylistHandler { blobUrl, `${baseFileName}.mp4`, stateId, + this.onBlobReady ? (url) => this.onBlobReady!(url, stateId) : undefined, ); const completionMessage = warning diff --git a/src/core/downloader/dash/dash-download-handler.ts b/src/core/downloader/dash/dash-download-handler.ts index edebe83..128106b 100644 --- a/src/core/downloader/dash/dash-download-handler.ts +++ b/src/core/downloader/dash/dash-download-handler.ts @@ -148,6 +148,7 @@ export class DashDownloadHandler extends BasePlaylistHandler { blobUrl, `${baseFileName}.mp4`, stateId, + this.onBlobReady ? (url) => this.onBlobReady!(url, stateId) : undefined, ); const completionMessage = warning diff --git a/src/core/downloader/download-manager.ts b/src/core/downloader/download-manager.ts index 2d8dca0..e369519 100644 --- a/src/core/downloader/download-manager.ts +++ b/src/core/downloader/download-manager.ts @@ -61,6 +61,10 @@ export interface DownloadManagerOptions { /** Fraction of #EXT-X-TARGETDURATION used to compute HLS poll cadence (default: 0.5) */ pollFraction?: number; + + /** Called after Chrome saves the file but BEFORE the blob URL is revoked. + * Use to upload the blob to cloud storage while it's still alive. */ + onBlobReady?: (blobUrl: string, stateId: string) => Promise; } /** @@ -99,6 +103,7 @@ export class DownloadManager { minPollIntervalMs: options.minPollIntervalMs, maxPollIntervalMs: options.maxPollIntervalMs, pollFraction: options.pollFraction, + onBlobReady: options.onBlobReady, }; // Initialize direct download handler diff --git a/src/core/downloader/hls/hls-download-handler.ts b/src/core/downloader/hls/hls-download-handler.ts index 36dd958..ce0e425 100644 --- a/src/core/downloader/hls/hls-download-handler.ts +++ b/src/core/downloader/hls/hls-download-handler.ts @@ -230,6 +230,7 @@ export class HlsDownloadHandler extends BasePlaylistHandler { blobUrl, finalFilename, stateId, + this.onBlobReady ? (url) => this.onBlobReady!(url, stateId) : undefined, ); const completionMessage = warning diff --git a/src/core/downloader/m3u8/m3u8-download-handler.ts b/src/core/downloader/m3u8/m3u8-download-handler.ts index 8bf6bbe..2afe217 100644 --- a/src/core/downloader/m3u8/m3u8-download-handler.ts +++ b/src/core/downloader/m3u8/m3u8-download-handler.ts @@ -99,6 +99,7 @@ export class M3u8DownloadHandler extends BasePlaylistHandler { blobUrl, `${baseFileName}.mp4`, stateId, + this.onBlobReady ? (url) => this.onBlobReady!(url, stateId) : undefined, ); const completionMessage = warning diff --git a/src/core/storage/settings.ts b/src/core/storage/settings.ts index 0d25cc3..a035812 100644 --- a/src/core/storage/settings.ts +++ b/src/core/storage/settings.ts @@ -32,6 +32,7 @@ export interface AppSettings { googleDrive: { enabled: boolean; + autoUpload: boolean; targetFolderId?: string; createFolderIfNotExists: boolean; folderName: string; @@ -39,6 +40,7 @@ export interface AppSettings { s3: { enabled: boolean; + autoUpload: boolean; bucket?: string; region?: string; endpoint?: string; @@ -79,6 +81,7 @@ export async function loadSettings(): Promise { googleDrive: { enabled: raw?.googleDrive?.enabled ?? false, + autoUpload: raw?.googleDrive?.autoUpload ?? false, targetFolderId: raw?.googleDrive?.targetFolderId, createFolderIfNotExists: raw?.googleDrive?.createFolderIfNotExists ?? false, folderName: raw?.googleDrive?.folderName ?? DEFAULT_GOOGLE_DRIVE_FOLDER_NAME, @@ -86,6 +89,7 @@ export async function loadSettings(): Promise { s3: { enabled: raw?.s3?.enabled ?? false, + autoUpload: raw?.s3?.autoUpload ?? false, bucket: raw?.s3?.bucket, region: raw?.s3?.region, endpoint: raw?.s3?.endpoint, diff --git a/src/core/types/index.ts b/src/core/types/index.ts index 60e3645..0d75bc7 100644 --- a/src/core/types/index.ts +++ b/src/core/types/index.ts @@ -74,6 +74,13 @@ export interface DownloadState { progress: DownloadProgress; localPath?: string; cloudId?: string; + cloudLinks?: { + googleDrive?: string; // webViewLink + s3?: string; // public URL or s3:// URI + }; + uploadToDrive?: boolean; // per-download override (defaults to AppSettings.googleDrive.autoUpload) + uploadToS3?: boolean; // per-download override (defaults to AppSettings.s3.autoUpload) + uploadError?: string; // last upload failure message isManual?: boolean; // Indicates if download was started from manual/manifest tab chromeDownloadId?: number; // Chrome downloads API ID for reliable cancellation (only set when Chrome API is used) createdAt: number; @@ -83,6 +90,7 @@ export interface DownloadState { export interface StorageConfig { googleDrive?: { enabled: boolean; + autoUpload?: boolean; // Auto-upload every completed download to Google Drive targetFolderId?: string; createFolderIfNotExists?: boolean; folderName?: string; @@ -92,6 +100,7 @@ export interface StorageConfig { historyEnabled?: boolean; // Whether to persist completed/failed/cancelled downloads (default: true) s3?: { enabled: boolean; + autoUpload?: boolean; // Auto-upload every completed download to S3 bucket?: string; region?: string; endpoint?: string; // For S3-compatible providers (Cloudflare R2, Backblaze, etc.) diff --git a/src/core/utils/blob-utils.ts b/src/core/utils/blob-utils.ts index e35a9ff..2ceea3d 100644 --- a/src/core/utils/blob-utils.ts +++ b/src/core/utils/blob-utils.ts @@ -32,6 +32,9 @@ export async function saveBlobUrlToFile( blobUrl: string, filename: string, stateId: string, + /** Called after Chrome saves the file to disk but BEFORE the blob URL is revoked. + * Use this hook to upload the blob to cloud storage while it's still live. */ + onBlobReady?: (blobUrl: string) => Promise, ): Promise { try { return await new Promise((resolve, reject) => { @@ -74,12 +77,24 @@ export async function saveBlobUrlToFile( if (delta.state.current === "complete") { clearTimeout(timeoutId); chrome.downloads.onChanged.removeListener(onChange); - revokeBlobUrl(blobUrl); - // Retrieve filename from the completed download - chrome.downloads.search({ id: downloadId }, (results) => { - const item = results?.[0]; - resolve(item?.filename || filename); - }); + + // Upload to cloud (if configured) before revoking the blob URL + const finish = async () => { + if (onBlobReady) { + try { + await onBlobReady(blobUrl); + } catch (uploadErr) { + // Upload failure is non-fatal — the local file was saved successfully + logger.warn("onBlobReady callback failed:", uploadErr); + } + } + revokeBlobUrl(blobUrl); + chrome.downloads.search({ id: downloadId }, (results) => { + const item = results?.[0]; + resolve(item?.filename || filename); + }); + }; + finish(); } else if (delta.state.current === "interrupted") { clearTimeout(timeoutId); chrome.downloads.onChanged.removeListener(onChange); diff --git a/src/options/options.html b/src/options/options.html index d160742..f1d9bc0 100644 --- a/src/options/options.html +++ b/src/options/options.html @@ -437,6 +437,33 @@ display: block; } + .advanced-tab { + padding: 6px 14px; + border: none; + border-radius: calc(var(--radius-sm) - 2px); + font-family: inherit; + font-size: 13px; + font-weight: 500; + cursor: pointer; + background: transparent; + color: var(--text-secondary); + transition: all 0.15s; + } + + .advanced-tab.active { + background: var(--surface-1); + color: var(--text-primary); + box-shadow: 0 1px 3px rgba(0, 0, 0, 0.2); + } + + .advanced-panel { + display: none; + } + + .advanced-panel.active { + display: block; + } + .coming-soon-note { display: flex; align-items: flex-start; @@ -1007,10 +1034,18 @@

Cloud Providers

Enable Google Drive uploads -
Automatically upload downloaded videos to Google Drive.
+
Connect Google Drive to upload downloaded videos.