diff --git a/src/content/__tests__/buildRenderFfmpegArgs.test.ts b/src/content/__tests__/buildRenderFfmpegArgs.test.ts new file mode 100644 index 0000000..9d4df02 --- /dev/null +++ b/src/content/__tests__/buildRenderFfmpegArgs.test.ts @@ -0,0 +1,133 @@ +import { describe, it, expect } from "vitest"; +import { buildRenderFfmpegArgs } from "../buildRenderFfmpegArgs"; + +describe("buildRenderFfmpegArgs", () => { + it("builds trim args with -ss and -t", () => { + const args = buildRenderFfmpegArgs("in.mp4", "out.mp4", [ + { type: "trim", start: 5, duration: 10 }, + ]); + expect(args).toContain("-ss"); + expect(args).toContain("5"); + expect(args).toContain("-t"); + expect(args).toContain("10"); + }); + + it("builds crop filter for aspect ratio", () => { + const args = buildRenderFfmpegArgs("in.mp4", "out.mp4", [ + { type: "crop", aspect: "9:16" }, + ]); + const vfIndex = args.indexOf("-vf"); + expect(vfIndex).toBeGreaterThan(-1); + expect(args[vfIndex + 1]).toContain("crop="); + }); + + it("builds crop 9:16 as portrait crop (narrows width from source)", () => { + const args = buildRenderFfmpegArgs("in.mp4", "out.mp4", [ + { type: "crop", aspect: "9:16" }, + ]); + const vf = args[args.indexOf("-vf") + 1]; + expect(vf).toContain("crop=ih*9/16:ih"); + }); + + it("builds crop 16:9 as landscape crop (narrows height from source)", () => { + const args = buildRenderFfmpegArgs("in.mp4", "out.mp4", [ + { type: "crop", aspect: "16:9" }, + ]); + const vf = args[args.indexOf("-vf") + 1]; + expect(vf).toContain("crop=iw:iw*9/16"); + }); + + it("skips crop with malformed aspect string", () => { + const args = buildRenderFfmpegArgs("in.mp4", "out.mp4", [ + { type: "crop", aspect: "invalid" }, + ]); + expect(args).not.toContain("-vf"); + }); + + it("builds resize filter with scale", () => { + const args = buildRenderFfmpegArgs("in.mp4", "out.mp4", [ + { type: "resize", width: 1080, height: 1920 }, + ]); + const vf = args[args.indexOf("-vf") + 1]; + expect(vf).toContain("scale=1080:1920"); + }); + + it("builds overlay_text with drawtext", () => { + const args = buildRenderFfmpegArgs("in.mp4", "out.mp4", [ + { + type: "overlay_text", + content: "hello world", + color: "white", + stroke_color: "black", + max_font_size: 42, + position: "bottom" as const, + }, + ]); + const vf = args[args.indexOf("-vf") + 1]; + expect(vf).toContain("drawtext="); + expect(vf).toContain("fontsize=42"); + expect(vf).toContain("fontcolor=white"); + expect(vf).toContain("bordercolor=black"); + expect(vf).toContain("y=h-th-120"); + }); + + it("positions overlay_text at top", () => { + const args = buildRenderFfmpegArgs("in.mp4", "out.mp4", [ + { type: "overlay_text", content: "top text", color: "white", stroke_color: "black", max_font_size: 42, position: "top" as const }, + ]); + const vf = args[args.indexOf("-vf") + 1]; + expect(vf).toContain("y=180"); + }); + + it("positions overlay_text at center", () => { + const args = buildRenderFfmpegArgs("in.mp4", "out.mp4", [ + { type: "overlay_text", content: "center text", color: "white", stroke_color: "black", max_font_size: 42, position: "center" as const }, + ]); + const vf = args[args.indexOf("-vf") + 1]; + expect(vf).toContain("y=(h-th)/2"); + }); + + it("strips emoji from overlay_text content", () => { + const args = buildRenderFfmpegArgs("in.mp4", "out.mp4", [ + { type: "overlay_text", content: "hello 🔥 world", color: "white", stroke_color: "black", max_font_size: 42, position: "bottom" as const }, + ]); + const vf = args[args.indexOf("-vf") + 1]; + expect(vf).not.toContain("🔥"); + }); + + it("skips overlay_text when content is missing (template mode)", () => { + const args = buildRenderFfmpegArgs("in.mp4", "out.mp4", [ + { type: "overlay_text", color: "white", stroke_color: "black", max_font_size: 42, position: "bottom" as const }, + ]); + expect(args).not.toContain("-vf"); + }); + + it("chains multiple video operations in order", () => { + const args = buildRenderFfmpegArgs("in.mp4", "out.mp4", [ + { type: "crop", aspect: "9:16" }, + { type: "overlay_text", content: "caption", color: "white", stroke_color: "black", max_font_size: 42, position: "bottom" as const }, + ]); + const vf = args[args.indexOf("-vf") + 1]; + expect(vf).toContain("crop="); + expect(vf).toContain(","); + expect(vf).toContain("drawtext="); + }); + + it("only accepts 3 arguments (no audioOnly or fallback params)", () => { + // TypeScript compile check — function should work with exactly 3 args + expect(buildRenderFfmpegArgs.length).toBe(3); + }); + + it("always includes video output encoding flags", () => { + const args = buildRenderFfmpegArgs("in.mp4", "out.mp4", [ + { type: "trim", start: 0, duration: 5 }, + ]); + expect(args).toContain("-c:v"); + expect(args).toContain("libx264"); + expect(args).toContain("-c:a"); + expect(args).toContain("aac"); + expect(args).toContain("-pix_fmt"); + expect(args).toContain("yuv420p"); + expect(args[args.length - 1]).toBe("out.mp4"); + }); +}); diff --git a/src/content/__tests__/runFfmpeg.test.ts b/src/content/__tests__/runFfmpeg.test.ts new file mode 100644 index 0000000..f402688 --- /dev/null +++ b/src/content/__tests__/runFfmpeg.test.ts @@ -0,0 +1,25 @@ +import { describe, it, expect, vi } from "vitest"; + +vi.mock("node:child_process", () => ({ + execFile: vi.fn((_cmd, _args, options, cb) => { + // Capture the options passed to execFile + if (typeof options === "function") { + cb = options; + options = {}; + } + // Store options for assertion + (globalThis as Record).__lastExecFileOptions = options; + cb(null, "", ""); + return {}; + }), +})); + +describe("runFfmpeg", () => { + it("sets maxBuffer to at least 10MB to handle ffmpeg stderr", async () => { + const { runFfmpeg } = await import("../runFfmpeg"); + await runFfmpeg(["-version"]); + + const options = (globalThis as Record).__lastExecFileOptions as { maxBuffer?: number }; + expect(options?.maxBuffer).toBeGreaterThanOrEqual(10 * 1024 * 1024); + }); +}); diff --git a/src/content/buildCropFilter.ts b/src/content/buildCropFilter.ts new file mode 100644 index 0000000..218108e --- /dev/null +++ b/src/content/buildCropFilter.ts @@ -0,0 +1,23 @@ +/** + * Build the ffmpeg crop= filter from aspect ratio or explicit dimensions. + * + * For aspect ratio: calculates which dimension to constrain. + * - 9:16 (portrait): keep full height, narrow width → crop=ih*9/16:ih + * - 16:9 (landscape): keep full width, narrow height → crop=iw:iw*9/16 + * + * @param op - Crop operation with aspect, width, or height. + * @returns The ffmpeg crop filter string, or null if the input is invalid. + */ +export function buildCropFilter(op: { aspect?: string; width?: number; height?: number }): string | null { + if (op.aspect) { + const parts = op.aspect.split(":"); + if (parts.length !== 2) return null; + const [w, h] = parts.map(Number); + if (!w || !h || isNaN(w) || isNaN(h)) return null; + return w >= h ? `crop=iw:iw*${h}/${w}` : `crop=ih*${w}/${h}:ih`; + } + if (op.width || op.height) { + return `crop=${op.width ?? -1}:${op.height ?? -1}`; + } + return null; +} diff --git a/src/content/buildOverlayTextFilter.ts b/src/content/buildOverlayTextFilter.ts new file mode 100644 index 0000000..971d689 --- /dev/null +++ b/src/content/buildOverlayTextFilter.ts @@ -0,0 +1,36 @@ +import { escapeDrawtext } from "./escapeDrawtext"; +import { stripEmoji } from "./stripEmoji"; + +/** + * Build the ffmpeg drawtext= filter for text overlay. + * + * @param op - Overlay text operation with content, color, position, etc. + * @returns The ffmpeg drawtext filter string. + */ +export function buildOverlayTextFilter(op: { + content: string; + color: string; + stroke_color: string; + max_font_size: number; + position: "top" | "center" | "bottom"; +}): string { + const cleanText = stripEmoji(op.content); + const escaped = escapeDrawtext(cleanText); + const safeColor = op.color.replace(/:/g, "\\\\:"); + const safeStrokeColor = op.stroke_color.replace(/:/g, "\\\\:"); + const borderWidth = Math.max(2, Math.round(op.max_font_size / 14)); + const yExpr = + op.position === "top" ? "y=180" : + op.position === "center" ? "y=(h-th)/2" : + "y=h-th-120"; + + return [ + `drawtext=text='${escaped}'`, + `fontsize=${op.max_font_size}`, + `fontcolor=${safeColor}`, + `borderw=${borderWidth}`, + `bordercolor=${safeStrokeColor}`, + "x=(w-tw)/2", + yExpr, + ].join(":"); +} diff --git a/src/content/buildRenderFfmpegArgs.ts b/src/content/buildRenderFfmpegArgs.ts new file mode 100644 index 0000000..0009851 --- /dev/null +++ b/src/content/buildRenderFfmpegArgs.ts @@ -0,0 +1,62 @@ +import type { FfmpegEditPayload } from "../schemas/ffmpegEditSchema"; +import { buildCropFilter } from "./buildCropFilter"; +import { buildOverlayTextFilter } from "./buildOverlayTextFilter"; + +type Operations = FfmpegEditPayload["operations"]; + +/** + * Builds ffmpeg arguments from a list of video edit operations. + * + * Each operation maps to ffmpeg flags: + * - trim → -ss / -t + * - crop → crop= filter + * - resize → scale= filter + * - overlay_text → drawtext= filter + * + * @param inputPath - Path to the input video file. + * @param outputPath - Path for the output file. + * @param operations - Array of edit operations to apply in order. + * @returns Array of ffmpeg CLI arguments. + */ +export function buildRenderFfmpegArgs( + inputPath: string, + outputPath: string, + operations: Operations, +): string[] { + const args = ["-y", "-i", inputPath]; + const videoFilters: string[] = []; + + for (const op of operations) { + switch (op.type) { + case "trim": + args.splice(1, 0, "-ss", String(op.start), "-t", String(op.duration)); + break; + case "crop": { + const filter = buildCropFilter(op); + if (filter) videoFilters.push(filter); + break; + } + case "resize": + videoFilters.push(`scale=${op.width ?? -1}:${op.height ?? -1}`); + break; + case "overlay_text": + if (op.content) videoFilters.push(buildOverlayTextFilter(op as Parameters[0])); + break; + } + } + + if (videoFilters.length > 0) { + args.push("-vf", videoFilters.join(",")); + } + + args.push( + "-c:v", "libx264", + "-c:a", "aac", + "-pix_fmt", "yuv420p", + "-movflags", "+faststart", + "-shortest", + outputPath, + ); + + return args; +} diff --git a/src/content/downloadMediaToFile.ts b/src/content/downloadMediaToFile.ts new file mode 100644 index 0000000..a246353 --- /dev/null +++ b/src/content/downloadMediaToFile.ts @@ -0,0 +1,14 @@ +import { writeFile } from "node:fs/promises"; +import { downloadImageBuffer } from "./downloadImageBuffer"; + +/** + * Download media from a URL and write it to a local file. + * Reuses downloadImageBuffer for the fetch + error handling. + * + * @param url - Public URL of the media to download. + * @param filePath - Local path to write the downloaded file. + */ +export async function downloadMediaToFile(url: string, filePath: string): Promise { + const { buffer } = await downloadImageBuffer(url); + await writeFile(filePath, buffer); +} diff --git a/src/content/falServer.ts b/src/content/falServer.ts new file mode 100644 index 0000000..73174af --- /dev/null +++ b/src/content/falServer.ts @@ -0,0 +1,13 @@ +import { fal as falClient } from "@fal-ai/client"; + +const FAL_KEY = process.env.FAL_KEY as string; + +if (!FAL_KEY) { + throw new Error("FAL_KEY must be set"); +} + +falClient.config({ credentials: FAL_KEY }); + +const fal = falClient; + +export default fal; diff --git a/src/content/renderFinalVideo.ts b/src/content/renderFinalVideo.ts index 79a7464..063ad47 100644 --- a/src/content/renderFinalVideo.ts +++ b/src/content/renderFinalVideo.ts @@ -1,17 +1,15 @@ -import { execFile } from "node:child_process"; import { randomUUID } from "node:crypto"; -import { readFile, writeFile, unlink, mkdir } from "node:fs/promises"; +import { writeFile, unlink, mkdir } from "node:fs/promises"; import { tmpdir } from "node:os"; import { join } from "node:path"; -import { promisify } from "node:util"; import { logStep } from "../sandboxes/logStep"; -import { fal } from "@fal-ai/client"; import { buildFfmpegArgs } from "./buildFfmpegArgs"; import { calculateCaptionLayout } from "./calculateCaptionLayout"; import { stripEmoji } from "./stripEmoji"; import { downloadOverlayImages } from "./downloadOverlayImages"; - -const execFileAsync = promisify(execFile); +import { downloadMediaToFile } from "./downloadMediaToFile"; +import { runFfmpeg } from "./runFfmpeg"; +import { uploadToFalStorage } from "./uploadToFalStorage"; export interface RenderFinalVideoInput { videoUrl: string; @@ -46,11 +44,7 @@ export async function renderFinalVideo( try { logStep("Downloading video for final render"); - const videoResponse = await fetch(input.videoUrl); - if (!videoResponse.ok) { - throw new Error(`Failed to download video: ${videoResponse.status}`); - } - await writeFile(videoPath, Buffer.from(await videoResponse.arrayBuffer())); + await downloadMediaToFile(input.videoUrl, videoPath); await writeFile(audioPath, input.songBuffer); overlayPaths = await downloadOverlayImages(input.overlayImageUrls ?? [], tempDir); @@ -74,17 +68,13 @@ export async function renderFinalVideo( overlayCount: overlayPaths.length, }); - await execFileAsync("ffmpeg", ffmpegArgs); - - const finalBuffer = await readFile(outputPath); - const sizeBytes = finalBuffer.length; - logStep("Final video rendered, uploading to fal.ai storage", true, { sizeBytes }); + await runFfmpeg(ffmpegArgs); - const videoFile = new File([finalBuffer], "final-video.mp4", { type: "video/mp4" }); - const videoUrl = await fal.storage.upload(videoFile); - logStep("Final video uploaded to fal.ai storage", false, { videoUrl, sizeBytes }); + logStep("Final video rendered, uploading to fal.ai storage"); + const result = await uploadToFalStorage(outputPath, "final-video.mp4", "video/mp4"); + logStep("Final video uploaded to fal.ai storage", false, { videoUrl: result.url, sizeBytes: result.sizeBytes }); - return { videoUrl, mimeType: "video/mp4", sizeBytes }; + return { videoUrl: result.url, mimeType: result.mimeType, sizeBytes: result.sizeBytes }; } finally { const cleanupPaths = [videoPath, audioPath, outputPath, ...overlayPaths]; await Promise.all(cleanupPaths.map((p) => unlink(p).catch(() => undefined))); diff --git a/src/content/runFfmpeg.ts b/src/content/runFfmpeg.ts new file mode 100644 index 0000000..0da73aa --- /dev/null +++ b/src/content/runFfmpeg.ts @@ -0,0 +1,14 @@ +import { execFile } from "node:child_process"; +import { promisify } from "node:util"; + +const execFileAsync = promisify(execFile); + +/** + * Execute ffmpeg with the given arguments. + * + * @param args - Array of ffmpeg CLI arguments. + * @throws Error if ffmpeg exits with a non-zero code. + */ +export async function runFfmpeg(args: string[]): Promise { + await execFileAsync("ffmpeg", args, { maxBuffer: 10 * 1024 * 1024 }); +} diff --git a/src/content/uploadToFalStorage.ts b/src/content/uploadToFalStorage.ts new file mode 100644 index 0000000..4b59293 --- /dev/null +++ b/src/content/uploadToFalStorage.ts @@ -0,0 +1,27 @@ +import { readFile } from "node:fs/promises"; +import fal from "./falServer"; + +export interface UploadResult { + url: string; + mimeType: string; + sizeBytes: number; +} + +/** + * Read a local file and upload it to fal.ai storage. + * + * @param filePath - Local path of the file to upload. + * @param filename - Name for the uploaded file. + * @param mimeType - MIME type of the file. + * @returns Object with the uploaded URL, MIME type, and file size. + */ +export async function uploadToFalStorage( + filePath: string, + filename: string, + mimeType: string, +): Promise { + const buffer = await readFile(filePath); + const file = new File([buffer], filename, { type: mimeType }); + const url = await fal.storage.upload(file); + return { url, mimeType, sizeBytes: buffer.length }; +} diff --git a/src/schemas/ffmpegEditSchema.ts b/src/schemas/ffmpegEditSchema.ts new file mode 100644 index 0000000..4cbcb36 --- /dev/null +++ b/src/schemas/ffmpegEditSchema.ts @@ -0,0 +1,49 @@ +import { z } from "zod"; + +const cssColorRegex = /^[a-zA-Z]+$|^#([0-9a-fA-F]{3}|[0-9a-fA-F]{4}|[0-9a-fA-F]{6}|[0-9a-fA-F]{8})$/; + +export const editOperationSchema = z.union([ + z.object({ + type: z.literal("trim"), + start: z.number().nonnegative(), + duration: z.number().positive(), + }), + z.object({ + type: z.literal("crop"), + aspect: z.string().optional(), + width: z.number().int().positive().optional(), + height: z.number().int().positive().optional(), + }).refine(data => data.aspect || data.width || data.height, { + message: "crop requires at least one of: aspect, width, height", + }), + z.object({ + type: z.literal("resize"), + width: z.number().int().positive().optional(), + height: z.number().int().positive().optional(), + }).refine(data => data.width || data.height, { + message: "resize requires at least one of: width, height", + }), + z.object({ + type: z.literal("overlay_text"), + content: z.string().optional(), + font: z.string().optional(), + color: z.string().regex(cssColorRegex, "color must be a CSS color name or hex value").optional().default("white"), + stroke_color: z.string().regex(cssColorRegex, "stroke_color must be a CSS color name or hex value").optional().default("black"), + max_font_size: z.number().positive().optional().default(42), + position: z.enum(["top", "center", "bottom"]).optional().default("bottom"), + }), +]); + +export const ffmpegEditPayloadSchema = z.object({ + accountId: z.string().min(1, "accountId is required"), + video_url: z.string().url(), + operations: z.array(editOperationSchema), + output_format: z.enum(["mp4", "webm", "mov"]).default("mp4"), +}); + +export type FfmpegEditPayload = z.infer; + +/** @deprecated Use ffmpegEditPayloadSchema */ +export const createRenderPayloadSchema = ffmpegEditPayloadSchema; +/** @deprecated Use FfmpegEditPayload */ +export type CreateRenderPayload = FfmpegEditPayload; diff --git a/src/tasks/__tests__/ffmpegEditTask.test.ts b/src/tasks/__tests__/ffmpegEditTask.test.ts new file mode 100644 index 0000000..0f5e20c --- /dev/null +++ b/src/tasks/__tests__/ffmpegEditTask.test.ts @@ -0,0 +1,196 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; +import { ffmpegEditPayloadSchema as createRenderPayloadSchema } from "../../schemas/ffmpegEditSchema"; + +// Mock fal.ai server config +vi.mock("../../content/falServer", () => ({ + default: { + config: vi.fn(), + storage: { upload: vi.fn() }, + }, +})); + +// Mock trigger.dev +vi.mock("@trigger.dev/sdk/v3", () => ({ + schemaTask: vi.fn((config) => config), + tags: { add: vi.fn() }, +})); + +// Mock logStep +vi.mock("../../sandboxes/logStep", () => ({ + logStep: vi.fn(), +})); + +describe("createRenderPayloadSchema", () => { + it("requires video_url", () => { + const result = createRenderPayloadSchema.safeParse({ + accountId: "acc-123", + operations: [{ type: "trim", start: 0, duration: 5 }], + }); + expect(result.success).toBe(false); + }); + + it("validates a payload with video_url and trim operation", () => { + const result = createRenderPayloadSchema.safeParse({ + accountId: "acc-123", + video_url: "https://example.com/video.mp4", + operations: [{ type: "trim", start: 0, duration: 5 }], + }); + expect(result.success).toBe(true); + }); + + it("validates a payload with crop operation", () => { + const result = createRenderPayloadSchema.safeParse({ + accountId: "acc-123", + video_url: "https://example.com/video.mp4", + operations: [{ type: "crop", aspect: "9:16" }], + }); + expect(result.success).toBe(true); + }); + + it("validates a payload with overlay_text operation and defaults", () => { + const result = createRenderPayloadSchema.safeParse({ + accountId: "acc-123", + video_url: "https://example.com/video.mp4", + operations: [{ type: "overlay_text", content: "hello world" }], + }); + expect(result.success).toBe(true); + if (result.success) { + const op = result.data.operations[0]; + if (op.type === "overlay_text") { + expect(op.color).toBe("white"); + expect(op.stroke_color).toBe("black"); + expect(op.max_font_size).toBe(42); + expect(op.position).toBe("bottom"); + } + } + }); + + it("validates multiple video operations", () => { + const result = createRenderPayloadSchema.safeParse({ + accountId: "acc-123", + video_url: "https://example.com/video.mp4", + operations: [ + { type: "crop", aspect: "9:16" }, + { type: "overlay_text", content: "caption text" }, + ], + }); + expect(result.success).toBe(true); + if (result.success) { + expect(result.data.operations).toHaveLength(2); + } + }); + + it("defaults output_format to mp4", () => { + const result = createRenderPayloadSchema.safeParse({ + accountId: "acc-123", + video_url: "https://example.com/video.mp4", + operations: [{ type: "trim", start: 0, duration: 5 }], + }); + expect(result.success).toBe(true); + if (result.success) { + expect(result.data.output_format).toBe("mp4"); + } + }); + + it("rejects missing accountId", () => { + const result = createRenderPayloadSchema.safeParse({ + video_url: "https://example.com/video.mp4", + operations: [{ type: "trim", start: 0, duration: 5 }], + }); + expect(result.success).toBe(false); + }); + + it("rejects mux_audio operation (removed)", () => { + const result = createRenderPayloadSchema.safeParse({ + accountId: "acc-123", + video_url: "https://example.com/video.mp4", + operations: [{ type: "mux_audio", audio_url: "https://example.com/a.mp3" }], + }); + expect(result.success).toBe(false); + }); + + it("does not accept audio_url param", () => { + const result = createRenderPayloadSchema.safeParse({ + accountId: "acc-123", + video_url: "https://example.com/video.mp4", + audio_url: "https://example.com/audio.mp3", + operations: [{ type: "trim", start: 0, duration: 5 }], + }); + if (result.success) { + expect(result.data).not.toHaveProperty("audio_url"); + } + }); + + it("rejects crop with no dimensions or aspect", () => { + const result = createRenderPayloadSchema.safeParse({ + accountId: "acc-123", + video_url: "https://example.com/video.mp4", + operations: [{ type: "crop" }], + }); + expect(result.success).toBe(false); + }); + + it("rejects resize with no dimensions", () => { + const result = createRenderPayloadSchema.safeParse({ + accountId: "acc-123", + video_url: "https://example.com/video.mp4", + operations: [{ type: "resize" }], + }); + expect(result.success).toBe(false); + }); + + it("rejects color values with special characters", () => { + const result = createRenderPayloadSchema.safeParse({ + accountId: "acc-123", + video_url: "https://example.com/video.mp4", + operations: [{ type: "overlay_text", content: "test", color: "white:enable=0" }], + }); + expect(result.success).toBe(false); + }); + + // Fix #4: color regex should reject 5/7-digit hex + it("rejects invalid 5-digit hex color", () => { + const result = createRenderPayloadSchema.safeParse({ + accountId: "acc-123", + video_url: "https://example.com/video.mp4", + operations: [{ type: "overlay_text", content: "test", color: "#12345" }], + }); + expect(result.success).toBe(false); + }); + + it("accepts valid 6-digit hex color", () => { + const result = createRenderPayloadSchema.safeParse({ + accountId: "acc-123", + video_url: "https://example.com/video.mp4", + operations: [{ type: "overlay_text", content: "test", color: "#FF5500" }], + }); + expect(result.success).toBe(true); + }); + + it("rejects invalid operation type", () => { + const result = createRenderPayloadSchema.safeParse({ + accountId: "acc-123", + video_url: "https://example.com/video.mp4", + operations: [{ type: "invalid_op" }], + }); + expect(result.success).toBe(false); + }); +}); + +describe("ffmpegEditTask", () => { + beforeEach(() => { + vi.clearAllMocks(); + process.env.FAL_KEY = "test-key"; + }); + + it("exports a task with id ffmpeg-edit", async () => { + const { ffmpegEditTask } = await import("../ffmpegEditTask"); + expect(ffmpegEditTask.id).toBe("ffmpeg-edit"); + }); + + it("has medium-1x machine and 10 min max duration", async () => { + const { ffmpegEditTask } = await import("../ffmpegEditTask"); + expect(ffmpegEditTask.machine).toBe("medium-1x"); + expect(ffmpegEditTask.maxDuration).toBe(600); + }); +}); diff --git a/src/tasks/ffmpegEditTask.ts b/src/tasks/ffmpegEditTask.ts new file mode 100644 index 0000000..31ef1c1 --- /dev/null +++ b/src/tasks/ffmpegEditTask.ts @@ -0,0 +1,68 @@ +import { randomUUID } from "node:crypto"; +import { unlink, mkdir } from "node:fs/promises"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { schemaTask, tags } from "@trigger.dev/sdk/v3"; +import { ffmpegEditPayloadSchema } from "../schemas/ffmpegEditSchema"; +import { logStep } from "../sandboxes/logStep"; +import { downloadMediaToFile } from "../content/downloadMediaToFile"; +import { runFfmpeg } from "../content/runFfmpeg"; +import { uploadToFalStorage } from "../content/uploadToFalStorage"; +import { buildRenderFfmpegArgs } from "../content/buildRenderFfmpegArgs"; + +/** + * FFmpeg edit task — applies video edit operations via ffmpeg. + * + * Triggered by PATCH /api/content. Accepts a video URL and runs + * operations (trim, crop, resize, overlay_text) in order using ffmpeg. + * Uploads the result to fal.ai storage. + */ +export const ffmpegEditTask = schemaTask({ + id: "ffmpeg-edit", + schema: ffmpegEditPayloadSchema, + maxDuration: 600, + machine: "medium-1x", + retry: { + maxAttempts: 0, + }, + run: async (payload) => { + await tags.add(`account:${payload.accountId}`); + logStep("ffmpeg-edit task started", true, { + accountId: payload.accountId, + operationCount: payload.operations.length, + outputFormat: payload.output_format, + }); + + const tempDir = join(tmpdir(), `render-${randomUUID()}`); + await mkdir(tempDir, { recursive: true }); + + const inputPath = join(tempDir, "input.mp4"); + const outputPath = join(tempDir, `output.${payload.output_format}`); + + try { + logStep("Downloading input video"); + await downloadMediaToFile(payload.video_url, inputPath); + + const ffmpegArgs = buildRenderFfmpegArgs(inputPath, outputPath, payload.operations); + + logStep("Running ffmpeg", true, { args: ffmpegArgs.join(" ") }); + await runFfmpeg(ffmpegArgs); + + logStep("Uploading rendered output"); + const result = await uploadToFalStorage(outputPath, `rendered.${payload.output_format}`, `video/${payload.output_format}`); + + logStep("Render complete", true, { url: result.url, sizeBytes: result.sizeBytes }); + + return { + status: "completed", + url: result.url, + mimeType: result.mimeType, + sizeBytes: result.sizeBytes, + }; + } finally { + await Promise.all( + [inputPath, outputPath].map((p) => unlink(p).catch(() => undefined)), + ); + } + }, +});