diff --git a/app/api/content/analyze/route.ts b/app/api/content/analyze/route.ts new file mode 100644 index 00000000..2679338b --- /dev/null +++ b/app/api/content/analyze/route.ts @@ -0,0 +1,23 @@ +import { NextRequest, NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { createAnalyzeHandler } from "@/lib/content/analyze/createAnalyzeHandler"; + +/** + * OPTIONS handler for CORS preflight requests. + */ +export async function OPTIONS() { + return new NextResponse(null, { status: 204, headers: getCorsHeaders() }); +} + +/** + * POST /api/content/analyze + * + * Analyze a video with AI — describe scenes, check quality, evaluate content. + */ +export async function POST(request: NextRequest): Promise { + return createAnalyzeHandler(request); +} + +export const dynamic = "force-dynamic"; +export const fetchCache = "force-no-store"; +export const revalidate = 0; diff --git a/app/api/content/caption/route.ts b/app/api/content/caption/route.ts new file mode 100644 index 00000000..59b1a9ae --- /dev/null +++ b/app/api/content/caption/route.ts @@ -0,0 +1,23 @@ +import { NextRequest, NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { createTextHandler } from "@/lib/content/caption/createTextHandler"; + +/** + * OPTIONS handler for CORS preflight requests. + */ +export async function OPTIONS() { + return new NextResponse(null, { status: 204, headers: getCorsHeaders() }); +} + +/** + * POST /api/content/caption + * + * Generate on-screen caption text for a social video. + */ +export async function POST(request: NextRequest): Promise { + return createTextHandler(request); +} + +export const dynamic = "force-dynamic"; +export const fetchCache = "force-no-store"; +export const revalidate = 0; diff --git a/app/api/content/image/route.ts b/app/api/content/image/route.ts new file mode 100644 index 00000000..06c7bc9f --- /dev/null +++ b/app/api/content/image/route.ts @@ -0,0 +1,23 @@ +import { NextRequest, NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { createImageHandler } from "@/lib/content/image/createImageHandler"; + +/** + * OPTIONS handler for CORS preflight requests. + */ +export async function OPTIONS() { + return new NextResponse(null, { status: 204, headers: getCorsHeaders() }); +} + +/** + * POST /api/content/image + * + * Generate an image from a prompt and optional reference image. + */ +export async function POST(request: NextRequest): Promise { + return createImageHandler(request); +} + +export const dynamic = "force-dynamic"; +export const fetchCache = "force-no-store"; +export const revalidate = 0; diff --git a/app/api/content/route.ts b/app/api/content/route.ts new file mode 100644 index 00000000..f5703b37 --- /dev/null +++ b/app/api/content/route.ts @@ -0,0 +1,23 @@ +import { NextRequest, NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { editHandler } from "@/lib/content/edit/editHandler"; + +/** + * OPTIONS handler for CORS preflight requests. + */ +export async function OPTIONS() { + return new NextResponse(null, { status: 204, headers: getCorsHeaders() }); +} + +/** + * PATCH /api/content + * + * Edit media with operations or a template preset. + */ +export async function PATCH(request: NextRequest): Promise { + return editHandler(request); +} + +export const dynamic = "force-dynamic"; +export const fetchCache = "force-no-store"; +export const revalidate = 0; diff --git a/app/api/content/templates/[id]/route.ts b/app/api/content/templates/[id]/route.ts new file mode 100644 index 00000000..e4c27251 --- /dev/null +++ b/app/api/content/templates/[id]/route.ts @@ -0,0 +1,26 @@ +import { NextRequest, NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { getContentTemplateDetailHandler } from "@/lib/content/getContentTemplateDetailHandler"; + +/** + * OPTIONS handler for CORS preflight requests. + */ +export async function OPTIONS() { + return new NextResponse(null, { status: 204, headers: getCorsHeaders() }); +} + +/** + * GET /api/content/templates/[id] + * + * Returns the full template configuration for a given template id. + */ +export async function GET( + request: NextRequest, + context: { params: Promise<{ id: string }> }, +): Promise { + return getContentTemplateDetailHandler(request, context); +} + +export const dynamic = "force-dynamic"; +export const fetchCache = "force-no-store"; +export const revalidate = 0; diff --git a/app/api/content/transcribe/route.ts b/app/api/content/transcribe/route.ts new file mode 100644 index 00000000..75f7be63 --- /dev/null +++ b/app/api/content/transcribe/route.ts @@ -0,0 +1,23 @@ +import { NextRequest, NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { createAudioHandler } from "@/lib/content/transcribe/createAudioHandler"; + +/** + * OPTIONS handler for CORS preflight requests. + */ +export async function OPTIONS() { + return new NextResponse(null, { status: 204, headers: getCorsHeaders() }); +} + +/** + * POST /api/content/transcribe + * + * Transcribe audio into text with word-level timestamps. + */ +export async function POST(request: NextRequest): Promise { + return createAudioHandler(request); +} + +export const dynamic = "force-dynamic"; +export const fetchCache = "force-no-store"; +export const revalidate = 0; diff --git a/app/api/content/upscale/route.ts b/app/api/content/upscale/route.ts new file mode 100644 index 00000000..63739f5d --- /dev/null +++ b/app/api/content/upscale/route.ts @@ -0,0 +1,23 @@ +import { NextRequest, NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { createUpscaleHandler } from "@/lib/content/upscale/createUpscaleHandler"; + +/** + * OPTIONS handler for CORS preflight requests. + */ +export async function OPTIONS() { + return new NextResponse(null, { status: 204, headers: getCorsHeaders() }); +} + +/** + * POST /api/content/upscale + * + * Upscale an image or video to higher resolution. + */ +export async function POST(request: NextRequest): Promise { + return createUpscaleHandler(request); +} + +export const dynamic = "force-dynamic"; +export const fetchCache = "force-no-store"; +export const revalidate = 0; diff --git a/app/api/content/video/route.ts b/app/api/content/video/route.ts new file mode 100644 index 00000000..fde60b30 --- /dev/null +++ b/app/api/content/video/route.ts @@ -0,0 +1,23 @@ +import { NextRequest, NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { createVideoHandler } from "@/lib/content/video/createVideoHandler"; + +/** + * OPTIONS handler for CORS preflight requests. + */ +export async function OPTIONS() { + return new NextResponse(null, { status: 204, headers: getCorsHeaders() }); +} + +/** + * POST /api/content/video + * + * Generate a video from a prompt, image, or existing video. + */ +export async function POST(request: NextRequest): Promise { + return createVideoHandler(request); +} + +export const dynamic = "force-dynamic"; +export const fetchCache = "force-no-store"; +export const revalidate = 0; diff --git a/lib/content/__tests__/getContentTemplateDetailHandler.test.ts b/lib/content/__tests__/getContentTemplateDetailHandler.test.ts new file mode 100644 index 00000000..8820f81e --- /dev/null +++ b/lib/content/__tests__/getContentTemplateDetailHandler.test.ts @@ -0,0 +1,94 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { NextRequest, NextResponse } from "next/server"; +import { getContentTemplateDetailHandler } from "@/lib/content/getContentTemplateDetailHandler"; +import { validateAuthContext } from "@/lib/auth/validateAuthContext"; +import { loadTemplate } from "@/lib/content/templates"; + +vi.mock("@/lib/networking/getCorsHeaders", () => ({ + getCorsHeaders: vi.fn(() => ({ "Access-Control-Allow-Origin": "*" })), +})); + +vi.mock("@/lib/auth/validateAuthContext", () => ({ + validateAuthContext: vi.fn(), +})); + +vi.mock("@/lib/content/templates", () => ({ + loadTemplate: vi.fn(), +})); + +describe("getContentTemplateDetailHandler", () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + it("returns 401 when not authenticated", async () => { + vi.mocked(validateAuthContext).mockResolvedValue( + NextResponse.json({ status: "error", error: "Unauthorized" }, { status: 401 }), + ); + const request = new NextRequest("http://localhost/api/content/templates/bedroom", { + method: "GET", + }); + + const result = await getContentTemplateDetailHandler(request, { + params: Promise.resolve({ id: "bedroom" }), + }); + + expect(result.status).toBe(401); + }); + + it("returns 404 for unknown template", async () => { + vi.mocked(validateAuthContext).mockResolvedValue({ + accountId: "acc_123", + orgId: null, + authToken: "test-key", + }); + vi.mocked(loadTemplate).mockReturnValue(null); + + const request = new NextRequest("http://localhost/api/content/templates/nonexistent", { + method: "GET", + }); + + const result = await getContentTemplateDetailHandler(request, { + params: Promise.resolve({ id: "nonexistent" }), + }); + const body = await result.json(); + + expect(result.status).toBe(404); + expect(body.error).toBe("Template not found"); + }); + + it("returns full template for valid id", async () => { + vi.mocked(validateAuthContext).mockResolvedValue({ + accountId: "acc_123", + orgId: null, + authToken: "test-key", + }); + const mockTemplate = { + id: "artist-caption-bedroom", + description: "Moody purple bedroom setting", + image: { prompt: "test", reference_images: [], style_rules: {} }, + video: { moods: ["calm"], movements: ["slow pan"] }, + caption: { guide: { tone: "dreamy", rules: [], formats: [] }, examples: [] }, + edit: { operations: [] }, + }; + vi.mocked(loadTemplate).mockReturnValue(mockTemplate); + + const request = new NextRequest( + "http://localhost/api/content/templates/artist-caption-bedroom", + { method: "GET" }, + ); + + const result = await getContentTemplateDetailHandler(request, { + params: Promise.resolve({ id: "artist-caption-bedroom" }), + }); + const body = await result.json(); + + expect(result.status).toBe(200); + expect(body.id).toBe("artist-caption-bedroom"); + expect(body.description).toBe("Moody purple bedroom setting"); + expect(body.image).toBeDefined(); + expect(body.video).toBeDefined(); + expect(body.caption).toBeDefined(); + expect(body.edit).toBeDefined(); + }); +}); diff --git a/lib/content/__tests__/schemas.test.ts b/lib/content/__tests__/schemas.test.ts new file mode 100644 index 00000000..645e6570 --- /dev/null +++ b/lib/content/__tests__/schemas.test.ts @@ -0,0 +1,327 @@ +import { describe, it, expect, vi } from "vitest"; + +vi.mock("@/lib/supabase/serverClient", () => ({ default: {} })); +vi.mock("@/lib/auth/validateAuthContext", () => ({ + validateAuthContext: vi.fn(), +})); + +const { createImageBodySchema } = await import("../image/validateCreateImageBody"); +const { createVideoBodySchema } = await import("../video/validateCreateVideoBody"); +const { createTextBodySchema } = await import("../caption/validateCreateCaptionBody"); +const { createAudioBodySchema } = await import("../transcribe/validateTranscribeAudioBody"); +const { editBodySchema } = await import("../edit/validateEditContentBody"); +const { createUpscaleBodySchema } = await import("../upscale/validateUpscaleBody"); +const { createAnalyzeBodySchema } = await import("../analyze/validateAnalyzeVideoBody"); + +describe("createImageBodySchema", () => { + it("parses valid payload with prompt only", () => { + expect( + createImageBodySchema.safeParse({ + prompt: "a moody portrait", + }).success, + ).toBe(true); + }); + + it("parses valid payload with reference image", () => { + expect( + createImageBodySchema.safeParse({ + prompt: "portrait photo", + reference_image_url: "https://example.com/ref.png", + }).success, + ).toBe(true); + }); + + it("parses empty payload (all fields optional)", () => { + expect(createImageBodySchema.safeParse({}).success).toBe(true); + }); + + it("accepts custom model", () => { + const result = createImageBodySchema.safeParse({ + prompt: "test", + model: "fal-ai/some-other-model", + }); + expect(result.success).toBe(true); + if (result.success) expect(result.data.model).toBe("fal-ai/some-other-model"); + }); +}); + +describe("createVideoBodySchema", () => { + it("parses prompt-only payload", () => { + expect( + createVideoBodySchema.safeParse({ + prompt: "a calm ocean", + }).success, + ).toBe(true); + }); + + it("parses animate mode with image", () => { + expect( + createVideoBodySchema.safeParse({ + mode: "animate", + image_url: "https://example.com/img.png", + prompt: "make it move", + }).success, + ).toBe(true); + }); + + it("parses extend mode with video", () => { + expect( + createVideoBodySchema.safeParse({ + mode: "extend", + video_url: "https://example.com/clip.mp4", + prompt: "continue the scene", + }).success, + ).toBe(true); + }); + + it("parses first-last mode with two images", () => { + expect( + createVideoBodySchema.safeParse({ + mode: "first-last", + image_url: "https://example.com/start.png", + end_image_url: "https://example.com/end.png", + prompt: "transition between these", + }).success, + ).toBe(true); + }); + + it("parses lipsync mode", () => { + expect( + createVideoBodySchema.safeParse({ + mode: "lipsync", + image_url: "https://example.com/face.png", + audio_url: "https://example.com/audio.mp3", + }).success, + ).toBe(true); + }); + + it("defaults duration to 8s", () => { + const result = createVideoBodySchema.safeParse({ prompt: "test" }); + expect(result.success).toBe(true); + if (result.success) expect(result.data.duration).toBe("8s"); + }); + + it("defaults generate_audio to false", () => { + const result = createVideoBodySchema.safeParse({ prompt: "test" }); + expect(result.success).toBe(true); + if (result.success) expect(result.data.generate_audio).toBe(false); + }); + + it("parses video with template", () => { + expect( + createVideoBodySchema.safeParse({ + template: "artist-caption-bedroom", + prompt: "subtle motion", + }).success, + ).toBe(true); + }); +}); + +describe("createTextBodySchema", () => { + it("parses valid payload", () => { + expect( + createTextBodySchema.safeParse({ + topic: "a rainy day in the city", + }).success, + ).toBe(true); + }); + + it("defaults length to short", () => { + const result = createTextBodySchema.safeParse({ + topic: "test", + }); + expect(result.success).toBe(true); + if (result.success) expect(result.data.length).toBe("short"); + }); + + it("rejects missing topic", () => { + expect(createTextBodySchema.safeParse({}).success).toBe(false); + }); +}); + +describe("createAudioBodySchema", () => { + it("parses valid payload with audio URLs", () => { + expect( + createAudioBodySchema.safeParse({ + audio_urls: ["https://example.com/song.mp3"], + }).success, + ).toBe(true); + }); + + it("rejects non-URL strings", () => { + expect( + createAudioBodySchema.safeParse({ + audio_urls: ["not-a-url"], + }).success, + ).toBe(false); + }); + + it("rejects empty array", () => { + expect( + createAudioBodySchema.safeParse({ + audio_urls: [], + }).success, + ).toBe(false); + }); + + it("accepts custom model", () => { + const result = createAudioBodySchema.safeParse({ + audio_urls: ["https://example.com/audio.mp3"], + model: "fal-ai/custom-whisper", + }); + expect(result.success).toBe(true); + if (result.success) expect(result.data.model).toBe("fal-ai/custom-whisper"); + }); +}); + +describe("editBodySchema", () => { + it("parses manual mode with operations", () => { + expect( + editBodySchema.safeParse({ + video_url: "https://example.com/v.mp4", + operations: [{ type: "trim", start: 10, duration: 15 }], + }).success, + ).toBe(true); + }); + + it("parses template mode", () => { + expect( + editBodySchema.safeParse({ + video_url: "https://example.com/v.mp4", + template: "artist-caption-bedroom", + }).success, + ).toBe(true); + }); + + it("rejects missing both template and operations", () => { + expect( + editBodySchema.safeParse({ + video_url: "https://example.com/v.mp4", + }).success, + ).toBe(false); + }); + + it("rejects missing all inputs", () => { + expect( + editBodySchema.safeParse({ + operations: [{ type: "trim", start: 0, duration: 5 }], + }).success, + ).toBe(false); + }); + + it("rejects audio_url without video_url", () => { + expect( + editBodySchema.safeParse({ + audio_url: "https://example.com/a.mp3", + operations: [{ type: "trim", start: 0, duration: 15 }], + }).success, + ).toBe(false); + }); + + it("parses overlay_text operation", () => { + expect( + editBodySchema.safeParse({ + video_url: "https://example.com/v.mp4", + operations: [{ type: "overlay_text", content: "hello world" }], + }).success, + ).toBe(true); + }); + + it("rejects mux_audio operation", () => { + expect( + editBodySchema.safeParse({ + video_url: "https://example.com/v.mp4", + operations: [{ type: "mux_audio", audio_url: "https://example.com/a.mp3" }], + }).success, + ).toBe(false); + }); + + it("parses crop operation", () => { + expect( + editBodySchema.safeParse({ + video_url: "https://example.com/v.mp4", + operations: [{ type: "crop", aspect: "9:16" }], + }).success, + ).toBe(true); + }); + + it("parses multiple operations", () => { + expect( + editBodySchema.safeParse({ + video_url: "https://example.com/v.mp4", + operations: [ + { type: "trim", start: 30, duration: 15 }, + { type: "crop", aspect: "9:16" }, + { type: "overlay_text", content: "caption" }, + ], + }).success, + ).toBe(true); + }); + + it("defaults output_format to mp4", () => { + const result = editBodySchema.safeParse({ + video_url: "https://example.com/v.mp4", + operations: [{ type: "trim", start: 0, duration: 5 }], + }); + expect(result.success).toBe(true); + if (result.success) expect(result.data.output_format).toBe("mp4"); + }); +}); + +describe("createUpscaleBodySchema", () => { + it("parses image upscale", () => { + expect( + createUpscaleBodySchema.safeParse({ + url: "https://example.com/img.png", + type: "image", + }).success, + ).toBe(true); + }); + + it("rejects invalid type", () => { + expect( + createUpscaleBodySchema.safeParse({ + url: "https://example.com/f", + type: "audio", + }).success, + ).toBe(false); + }); +}); + +describe("createAnalyzeBodySchema", () => { + it("parses valid payload", () => { + expect( + createAnalyzeBodySchema.safeParse({ + video_url: "https://example.com/video.mp4", + prompt: "Describe what happens in this video", + }).success, + ).toBe(true); + }); + + it("defaults temperature to 0.2", () => { + const result = createAnalyzeBodySchema.safeParse({ + video_url: "https://example.com/video.mp4", + prompt: "Describe this video", + }); + expect(result.success).toBe(true); + if (result.success) expect(result.data.temperature).toBe(0.2); + }); + + it("rejects prompt exceeding 2000 chars", () => { + expect( + createAnalyzeBodySchema.safeParse({ + video_url: "https://example.com/video.mp4", + prompt: "x".repeat(2001), + }).success, + ).toBe(false); + }); + + it("rejects invalid video_url", () => { + expect( + createAnalyzeBodySchema.safeParse({ + video_url: "not-a-url", + prompt: "Describe this video", + }).success, + ).toBe(false); + }); +}); diff --git a/lib/content/__tests__/validateCreateContentBody.test.ts b/lib/content/__tests__/validateCreateContentBody.test.ts index 1a71d5ae..658ef8d7 100644 --- a/lib/content/__tests__/validateCreateContentBody.test.ts +++ b/lib/content/__tests__/validateCreateContentBody.test.ts @@ -20,6 +20,10 @@ vi.mock("@/lib/content/resolveArtistSlug", () => ({ resolveArtistSlug: vi.fn().mockResolvedValue("gatsby-grace"), })); +/** + * + * @param body + */ function createRequest(body: unknown): NextRequest { return new NextRequest("http://localhost/api/content/create", { method: "POST", @@ -71,11 +75,25 @@ describe("validateCreateContentBody", () => { expect(result).not.toBeInstanceOf(NextResponse); if (!(result instanceof NextResponse)) { - expect(result.template).toBe("artist-caption-bedroom"); + expect(result.template).toBeUndefined(); expect(result.lipsync).toBe(false); } }); + it("accepts request without template", async () => { + const request = createRequest({ + artist_account_id: "550e8400-e29b-41d4-a716-446655440000", + }); + + const result = await validateCreateContentBody(request); + + expect(result).not.toBeInstanceOf(NextResponse); + if (!(result instanceof NextResponse)) { + expect(result.template).toBeUndefined(); + expect(result.artistAccountId).toBe("550e8400-e29b-41d4-a716-446655440000"); + } + }); + it("returns 400 when artist_account_id is missing", async () => { const request = createRequest({ template: "artist-caption-bedroom", diff --git a/lib/content/analyze/__tests__/createAnalyzeHandler.test.ts b/lib/content/analyze/__tests__/createAnalyzeHandler.test.ts new file mode 100644 index 00000000..c416a60b --- /dev/null +++ b/lib/content/analyze/__tests__/createAnalyzeHandler.test.ts @@ -0,0 +1,144 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { NextRequest, NextResponse } from "next/server"; + +vi.mock("@/lib/networking/getCorsHeaders", () => ({ + getCorsHeaders: vi.fn(() => ({ "Access-Control-Allow-Origin": "*" })), +})); + +vi.mock("@/lib/auth/validateAuthContext", () => ({ + validateAuthContext: vi.fn(), +})); + +vi.mock("@/lib/networking/safeParseJson", () => ({ + safeParseJson: vi.fn(), +})); + +const { validateAuthContext } = await import("@/lib/auth/validateAuthContext"); +const { safeParseJson } = await import("@/lib/networking/safeParseJson"); +const { createAnalyzeHandler } = await import("../createAnalyzeHandler"); + +const VALID_BODY = { + video_url: "https://example.com/video.mp4", + prompt: "Describe what happens in this video", +}; + +describe("createAnalyzeHandler", () => { + const originalEnv = process.env; + + beforeEach(() => { + vi.clearAllMocks(); + process.env = { ...originalEnv, TWELVELABS_API_KEY: "test-key" }; + vi.mocked(validateAuthContext).mockResolvedValue({ + accountId: "acc_123", + orgId: null, + authToken: "tok", + }); + vi.mocked(safeParseJson).mockResolvedValue(VALID_BODY); + }); + + afterEach(() => { + process.env = originalEnv; + }); + + it("returns auth error when auth fails", async () => { + const authError = NextResponse.json({ error: "Unauthorized" }, { status: 401 }); + vi.mocked(validateAuthContext).mockResolvedValue(authError); + + const request = new NextRequest("http://localhost/api/content/analyze", { + method: "POST", + }); + const result = await createAnalyzeHandler(request); + + expect(result.status).toBe(401); + }); + + it("returns 500 when TWELVELABS_API_KEY is missing", async () => { + delete process.env.TWELVELABS_API_KEY; + + const request = new NextRequest("http://localhost/api/content/analyze", { + method: "POST", + }); + const result = await createAnalyzeHandler(request); + + expect(result.status).toBe(500); + const body = await result.json(); + expect(body.error).toContain("TWELVELABS_API_KEY"); + }); + + it("returns analysis text on success", async () => { + vi.spyOn(global, "fetch").mockResolvedValueOnce( + new Response( + JSON.stringify({ + data: "This video shows a cat playing piano.", + finish_reason: "stop", + usage: { output_tokens: 42 }, + }), + { status: 200 }, + ), + ); + + const request = new NextRequest("http://localhost/api/content/analyze", { + method: "POST", + }); + const result = await createAnalyzeHandler(request); + + expect(result.status).toBe(200); + const body = await result.json(); + expect(body.text).toBe("This video shows a cat playing piano."); + expect(body.finish_reason).toBe("stop"); + expect(body.usage).toEqual({ output_tokens: 42 }); + }); + + it("returns 502 when Twelve Labs returns an error", async () => { + vi.spyOn(global, "fetch").mockResolvedValueOnce(new Response("Bad Request", { status: 400 })); + + const request = new NextRequest("http://localhost/api/content/analyze", { + method: "POST", + }); + const result = await createAnalyzeHandler(request); + + expect(result.status).toBe(502); + const body = await result.json(); + expect(body.error).toContain("400"); + }); + + it("returns 502 when response has no data", async () => { + vi.spyOn(global, "fetch").mockResolvedValueOnce( + new Response(JSON.stringify({}), { status: 200 }), + ); + + const request = new NextRequest("http://localhost/api/content/analyze", { + method: "POST", + }); + const result = await createAnalyzeHandler(request); + + expect(result.status).toBe(502); + const body = await result.json(); + expect(body.error).toContain("no text"); + }); + + it("sends correct body to Twelve Labs API", async () => { + const fetchSpy = vi + .spyOn(global, "fetch") + .mockResolvedValueOnce( + new Response( + JSON.stringify({ data: "result", finish_reason: "stop", usage: { output_tokens: 10 } }), + { status: 200 }, + ), + ); + + const request = new NextRequest("http://localhost/api/content/analyze", { + method: "POST", + }); + await createAnalyzeHandler(request); + + expect(fetchSpy).toHaveBeenCalledOnce(); + const [url, options] = fetchSpy.mock.calls[0]; + expect(url).toBe("https://api.twelvelabs.io/v1.3/analyze"); + const sentBody = JSON.parse(options?.body as string); + expect(sentBody.video).toEqual({ type: "url", url: "https://example.com/video.mp4" }); + expect(sentBody.prompt).toBe("Describe what happens in this video"); + expect(sentBody.stream).toBe(false); + expect(sentBody.temperature).toBe(0.2); + }); +}); diff --git a/lib/content/analyze/createAnalyzeHandler.ts b/lib/content/analyze/createAnalyzeHandler.ts new file mode 100644 index 00000000..9b318015 --- /dev/null +++ b/lib/content/analyze/createAnalyzeHandler.ts @@ -0,0 +1,37 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { analyzeVideo } from "@/lib/twelvelabs/analyzeVideo"; +import { validateAnalyzeVideoBody } from "./validateAnalyzeVideoBody"; + +/** + * POST /api/content/analyze + * + * @param request - Incoming request with video URL and analysis prompt. + * @returns JSON with the generated analysis text. + */ +export async function createAnalyzeHandler(request: NextRequest): Promise { + const validated = await validateAnalyzeVideoBody(request); + if (validated instanceof NextResponse) return validated; + + try { + const result = await analyzeVideo(validated); + + return NextResponse.json( + { + text: result.text, + finish_reason: result.finishReason, + usage: result.usage, + }, + { status: 200, headers: getCorsHeaders() }, + ); + } catch (error) { + console.error("Video analysis error:", error); + const message = error instanceof Error ? error.message : "Video analysis failed"; + const status = message.includes("not configured") ? 500 : 502; + return NextResponse.json( + { status: "error", error: message }, + { status, headers: getCorsHeaders() }, + ); + } +} diff --git a/lib/content/analyze/validateAnalyzeVideoBody.ts b/lib/content/analyze/validateAnalyzeVideoBody.ts new file mode 100644 index 00000000..ee8d221b --- /dev/null +++ b/lib/content/analyze/validateAnalyzeVideoBody.ts @@ -0,0 +1,40 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { z } from "zod"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { safeParseJson } from "@/lib/networking/safeParseJson"; +import { validateAuthContext } from "@/lib/auth/validateAuthContext"; + +export const createAnalyzeBodySchema = z.object({ + video_url: z.string().url(), + prompt: z.string().min(1).max(2000), + temperature: z.number().min(0).max(1).optional().default(0.2), + max_tokens: z.number().int().min(1).max(4096).optional(), +}); + +export type ValidatedAnalyzeVideoBody = { accountId: string } & z.infer< + typeof createAnalyzeBodySchema +>; + +/** + * Validates auth and request body for POST /api/content/analyze. + */ +export async function validateAnalyzeVideoBody( + request: NextRequest, +): Promise { + const authResult = await validateAuthContext(request); + if (authResult instanceof NextResponse) return authResult; + + const body = await safeParseJson(request); + const result = createAnalyzeBodySchema.safeParse(body); + + if (!result.success) { + const firstError = result.error.issues[0]; + return NextResponse.json( + { status: "error", field: firstError.path, error: firstError.message }, + { status: 400, headers: getCorsHeaders() }, + ); + } + + return { accountId: authResult.accountId, ...result.data }; +} diff --git a/lib/content/caption/composeCaptionPrompt.ts b/lib/content/caption/composeCaptionPrompt.ts new file mode 100644 index 00000000..dcd7ce17 --- /dev/null +++ b/lib/content/caption/composeCaptionPrompt.ts @@ -0,0 +1,29 @@ +import type { Template } from "@/lib/content/templates"; + +/** + * Builds the LLM prompt for caption generation, optionally with template guide. + * + * @param topic - Subject or theme for the caption. + * @param length - Desired caption length tier. + * @param tpl - Optional template with caption guide and examples. + * @returns Formatted prompt string. + */ +export function composeCaptionPrompt(topic: string, length: string, tpl: Template | null): string { + let prompt = `Generate ONE short on-screen text for a social media video. +Topic: "${topic}" +Length: ${length} +Return ONLY the text, nothing else. No quotes.`; + + if (tpl?.caption.guide) { + const g = tpl.caption.guide; + prompt += `\n\nStyle: ${g.tone}`; + if (g.rules.length) prompt += `\nRules:\n${g.rules.map(r => `- ${r}`).join("\n")}`; + if (g.formats.length) prompt += `\nFormats to try:\n${g.formats.map(f => `- ${f}`).join("\n")}`; + } + + if (tpl?.caption.examples.length) { + prompt += `\n\nExamples of good captions:\n${tpl.caption.examples.map(e => `- "${e}"`).join("\n")}`; + } + + return prompt; +} diff --git a/lib/content/caption/createTextHandler.ts b/lib/content/caption/createTextHandler.ts new file mode 100644 index 00000000..909cc5e3 --- /dev/null +++ b/lib/content/caption/createTextHandler.ts @@ -0,0 +1,46 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import generateText from "@/lib/ai/generateText"; +import { LIGHTWEIGHT_MODEL } from "@/lib/const"; +import { loadTemplate } from "@/lib/content/templates"; +import { validateCreateCaptionBody } from "./validateCreateCaptionBody"; +import { composeCaptionPrompt } from "./composeCaptionPrompt"; + +/** + * POST /api/content/caption + * + * @param request - Incoming Next.js request with JSON body. + * @returns JSON with generated text styling fields, or an error NextResponse. + */ +export async function createTextHandler(request: NextRequest): Promise { + const validated = await validateCreateCaptionBody(request); + if (validated instanceof NextResponse) return validated; + + try { + const template = validated.template ? loadTemplate(validated.template) : null; + const prompt = composeCaptionPrompt(validated.topic, validated.length, template); + const result = await generateText({ prompt, model: LIGHTWEIGHT_MODEL }); + + let content = result.text.trim(); + content = content.replace(/^["']|["']$/g, "").trim(); + + if (!content) { + return NextResponse.json( + { status: "error", error: "Text generation returned empty" }, + { status: 502, headers: getCorsHeaders() }, + ); + } + + return NextResponse.json( + { content, font: null, color: "white", borderColor: "black", maxFontSize: 42 }, + { status: 200, headers: getCorsHeaders() }, + ); + } catch (error) { + console.error("Text generation error:", error); + return NextResponse.json( + { status: "error", error: "Text generation failed" }, + { status: 500, headers: getCorsHeaders() }, + ); + } +} diff --git a/lib/content/caption/validateCreateCaptionBody.ts b/lib/content/caption/validateCreateCaptionBody.ts new file mode 100644 index 00000000..9eb2faf9 --- /dev/null +++ b/lib/content/caption/validateCreateCaptionBody.ts @@ -0,0 +1,41 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { z } from "zod"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { safeParseJson } from "@/lib/networking/safeParseJson"; +import { validateAuthContext } from "@/lib/auth/validateAuthContext"; +import { TEMPLATE_IDS } from "@/lib/content/templates"; +import { CAPTION_LENGTHS } from "@/lib/content/captionLengths"; + +export const createTextBodySchema = z.object({ + template: z.enum(TEMPLATE_IDS).optional(), + topic: z.string().min(1), + length: z.enum(CAPTION_LENGTHS).optional().default("short"), +}); + +export type ValidatedCreateCaptionBody = { accountId: string } & z.infer< + typeof createTextBodySchema +>; + +/** + * Validates auth and request body for POST /api/content/caption. + */ +export async function validateCreateCaptionBody( + request: NextRequest, +): Promise { + const authResult = await validateAuthContext(request); + if (authResult instanceof NextResponse) return authResult; + + const body = await safeParseJson(request); + const result = createTextBodySchema.safeParse(body); + + if (!result.success) { + const firstError = result.error.issues[0]; + return NextResponse.json( + { status: "error", field: firstError.path, error: firstError.message }, + { status: 400, headers: getCorsHeaders() }, + ); + } + + return { accountId: authResult.accountId, ...result.data }; +} diff --git a/lib/content/edit/__tests__/validateEditContentBody.test.ts b/lib/content/edit/__tests__/validateEditContentBody.test.ts new file mode 100644 index 00000000..1565a6ab --- /dev/null +++ b/lib/content/edit/__tests__/validateEditContentBody.test.ts @@ -0,0 +1,74 @@ +import { describe, it, expect, vi } from "vitest"; + +vi.mock("@/lib/supabase/serverClient", () => ({ default: {} })); +vi.mock("@/lib/auth/validateAuthContext", () => ({ validateAuthContext: vi.fn() })); + +const { editBodySchema } = await import("../validateEditContentBody"); + +describe("editBodySchema", () => { + it("requires video_url", () => { + const result = editBodySchema.safeParse({ + operations: [{ type: "crop", aspect: "9:16" }], + }); + expect(result.success).toBe(false); + }); + + it("accepts video_url with crop operation", () => { + const result = editBodySchema.safeParse({ + video_url: "https://example.com/video.mp4", + operations: [{ type: "crop", aspect: "9:16" }], + }); + expect(result.success).toBe(true); + }); + + it("rejects audio_url without video_url", () => { + const result = editBodySchema.safeParse({ + audio_url: "https://example.com/audio.mp3", + operations: [{ type: "crop", aspect: "9:16" }], + }); + expect(result.success).toBe(false); + }); + + it("does not accept mux_audio operation type", () => { + const result = editBodySchema.safeParse({ + video_url: "https://example.com/video.mp4", + operations: [{ type: "mux_audio", audio_url: "https://example.com/audio.mp3" }], + }); + expect(result.success).toBe(false); + }); + + it("does not accept audio_url as a parameter", () => { + const result = editBodySchema.safeParse({ + video_url: "https://example.com/video.mp4", + audio_url: "https://example.com/audio.mp3", + operations: [{ type: "crop", aspect: "9:16" }], + }); + if (result.success) { + expect(result.data).not.toHaveProperty("audio_url"); + } + }); + + it("accepts trim operation", () => { + const result = editBodySchema.safeParse({ + video_url: "https://example.com/video.mp4", + operations: [{ type: "trim", start: 0, duration: 5 }], + }); + expect(result.success).toBe(true); + }); + + it("accepts overlay_text operation", () => { + const result = editBodySchema.safeParse({ + video_url: "https://example.com/video.mp4", + operations: [{ type: "overlay_text", content: "hello" }], + }); + expect(result.success).toBe(true); + }); + + it("accepts resize operation", () => { + const result = editBodySchema.safeParse({ + video_url: "https://example.com/video.mp4", + operations: [{ type: "resize", width: 720 }], + }); + expect(result.success).toBe(true); + }); +}); diff --git a/lib/content/edit/editHandler.ts b/lib/content/edit/editHandler.ts new file mode 100644 index 00000000..bfa8cfb8 --- /dev/null +++ b/lib/content/edit/editHandler.ts @@ -0,0 +1,44 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { tasks } from "@trigger.dev/sdk"; +import { loadTemplate } from "@/lib/content/templates"; +import { validateEditContentBody } from "./validateEditContentBody"; + +/** + * PATCH /api/content + * + * @param request - Incoming request with video URL and edit operations. + * @returns JSON with the triggered run ID. + */ +export async function editHandler(request: NextRequest): Promise { + const validated = await validateEditContentBody(request); + if (validated instanceof NextResponse) return validated; + + try { + let operations = validated.operations; + + if (!operations && validated.template) { + const template = loadTemplate(validated.template); + if (template?.edit.operations) { + operations = template.edit.operations as typeof operations; + } + } + + const handle = await tasks.trigger("ffmpeg-edit", { + ...validated, + operations, + }); + + return NextResponse.json( + { runId: handle.id, status: "triggered" }, + { status: 202, headers: getCorsHeaders() }, + ); + } catch (error) { + console.error("Failed to trigger edit:", error); + return NextResponse.json( + { status: "error", error: "Failed to trigger edit task" }, + { status: 500, headers: getCorsHeaders() }, + ); + } +} diff --git a/lib/content/edit/validateEditContentBody.ts b/lib/content/edit/validateEditContentBody.ts new file mode 100644 index 00000000..b1040636 --- /dev/null +++ b/lib/content/edit/validateEditContentBody.ts @@ -0,0 +1,71 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { z } from "zod"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { safeParseJson } from "@/lib/networking/safeParseJson"; +import { validateAuthContext } from "@/lib/auth/validateAuthContext"; +import { TEMPLATE_IDS } from "@/lib/content/templates"; + +export const editOperationSchema = z.discriminatedUnion("type", [ + z.object({ + type: z.literal("trim"), + start: z.number().nonnegative(), + duration: z.number().positive(), + }), + z.object({ + type: z.literal("crop"), + aspect: z.string().optional(), + width: z.number().int().positive().optional(), + height: z.number().int().positive().optional(), + }), + z.object({ + type: z.literal("resize"), + width: z.number().int().positive().optional(), + height: z.number().int().positive().optional(), + }), + z.object({ + type: z.literal("overlay_text"), + content: z.string().min(1), + font: z.string().optional(), + color: z.string().optional().default("white"), + stroke_color: z.string().optional().default("black"), + max_font_size: z.number().positive().optional().default(42), + position: z.enum(["top", "center", "bottom"]).optional().default("bottom"), + }), +]); + +export const editBodySchema = z + .object({ + video_url: z.string().url(), + template: z.enum(TEMPLATE_IDS).optional(), + operations: z.array(editOperationSchema).optional(), + output_format: z.enum(["mp4", "webm", "mov"]).optional().default("mp4"), + }) + .refine(data => data.template || (data.operations && data.operations.length > 0), { + message: "Must provide either template or operations", + }); + +export type ValidatedEditContentBody = { accountId: string } & z.infer; + +/** + * Validates auth and request body for PATCH /api/content. + */ +export async function validateEditContentBody( + request: NextRequest, +): Promise { + const authResult = await validateAuthContext(request); + if (authResult instanceof NextResponse) return authResult; + + const body = await safeParseJson(request); + const result = editBodySchema.safeParse(body); + + if (!result.success) { + const firstError = result.error.issues[0]; + return NextResponse.json( + { status: "error", field: firstError.path, error: firstError.message }, + { status: 400, headers: getCorsHeaders() }, + ); + } + + return { accountId: authResult.accountId, ...result.data }; +} diff --git a/lib/content/getContentTemplateDetailHandler.ts b/lib/content/getContentTemplateDetailHandler.ts new file mode 100644 index 00000000..6051b4c9 --- /dev/null +++ b/lib/content/getContentTemplateDetailHandler.ts @@ -0,0 +1,34 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { validateAuthContext } from "@/lib/auth/validateAuthContext"; +import { loadTemplate } from "@/lib/content/templates"; + +/** + * Handler for GET /api/content/templates/{id}. + * + * @param request - Incoming API request. + * @param params - Route params containing the template id. + * @returns The full template object, or 404 if not found. + */ +export async function getContentTemplateDetailHandler( + request: NextRequest, + { params }: { params: Promise<{ id: string }> }, +): Promise { + const authResult = await validateAuthContext(request); + if (authResult instanceof NextResponse) { + return authResult; + } + + const { id } = await params; + const template = loadTemplate(id); + + if (!template) { + return NextResponse.json( + { status: "error", error: "Template not found" }, + { status: 404, headers: getCorsHeaders() }, + ); + } + + return NextResponse.json(template, { status: 200, headers: getCorsHeaders() }); +} diff --git a/lib/content/getContentTemplatesHandler.ts b/lib/content/getContentTemplatesHandler.ts index d1a65d80..2bf6552d 100644 --- a/lib/content/getContentTemplatesHandler.ts +++ b/lib/content/getContentTemplatesHandler.ts @@ -2,7 +2,7 @@ import type { NextRequest } from "next/server"; import { NextResponse } from "next/server"; import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; import { validateAuthContext } from "@/lib/auth/validateAuthContext"; -import { CONTENT_TEMPLATES } from "@/lib/content/contentTemplates"; +import { listTemplates } from "@/lib/content/templates"; /** * Handler for GET /api/content/templates. @@ -18,7 +18,7 @@ export async function getContentTemplatesHandler(request: NextRequest): Promise< return NextResponse.json( { status: "success", - templates: CONTENT_TEMPLATES, + templates: listTemplates(), }, { status: 200, headers: getCorsHeaders() }, ); diff --git a/lib/content/image/buildImageInput.ts b/lib/content/image/buildImageInput.ts new file mode 100644 index 00000000..3d925e76 --- /dev/null +++ b/lib/content/image/buildImageInput.ts @@ -0,0 +1,63 @@ +import type { z } from "zod"; +import type { createImageBodySchema } from "./validateCreateImageBody"; +import { loadTemplate } from "@/lib/content/templates"; + +const DEFAULT_T2I_MODEL = "fal-ai/nano-banana-2"; +const DEFAULT_EDIT_MODEL = "fal-ai/nano-banana-2/edit"; + +type ImageParams = z.infer; + +interface ImageInput { + model: string; + input: Record; +} + +/** + * Build the fal model name and input payload from validated image params. + * + * @param validated - Validated image generation parameters. + * @returns Object with model name and input payload for fal.subscribe. + */ +export function buildImageInput(validated: ImageParams): ImageInput { + const tpl = validated.template ? loadTemplate(validated.template) : null; + + const prompt = validated.prompt ?? tpl?.image.prompt ?? "portrait photo, natural lighting"; + + const refImageUrl = + validated.reference_image_url ?? + (tpl?.image.reference_images.length + ? tpl.image.reference_images[Math.floor(Math.random() * tpl.image.reference_images.length)] + : undefined); + + const hasReferenceImages = refImageUrl || (validated.images && validated.images.length > 0); + + const input: Record = { + prompt: tpl?.image.style_rules + ? `${prompt}\n\nStyle rules: ${Object.entries(tpl.image.style_rules) + .map(([k, v]) => `${k}: ${Object.values(v).join(", ")}`) + .join(". ")}` + : prompt, + num_images: validated.num_images, + aspect_ratio: validated.aspect_ratio, + resolution: validated.resolution, + output_format: "png", + safety_tolerance: "6", + enable_web_search: true, + thinking_level: "high", + limit_generations: true, + }; + + let model: string; + + if (hasReferenceImages) { + model = validated.model ?? DEFAULT_EDIT_MODEL; + const imageUrls: string[] = []; + if (refImageUrl) imageUrls.push(refImageUrl); + if (validated.images) imageUrls.push(...validated.images); + input.image_urls = imageUrls; + } else { + model = validated.model ?? DEFAULT_T2I_MODEL; + } + + return { model, input }; +} diff --git a/lib/content/image/createImageHandler.ts b/lib/content/image/createImageHandler.ts new file mode 100644 index 00000000..8c41c36d --- /dev/null +++ b/lib/content/image/createImageHandler.ts @@ -0,0 +1,45 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import fal from "@/lib/fal/server"; +import { validateCreateImageBody } from "./validateCreateImageBody"; +import { buildImageInput } from "./buildImageInput"; + +/** + * POST /api/content/image + * + * @param request - Incoming request with image generation parameters. + * @returns JSON with the generated image URL. + */ +export async function createImageHandler(request: NextRequest): Promise { + const validated = await validateCreateImageBody(request); + if (validated instanceof NextResponse) return validated; + + try { + const { model, input } = buildImageInput(validated); + const result = await fal.subscribe(model, { input }); + + const resultData = result.data as Record; + const imageList = resultData?.images as Array> | undefined; + + if (!imageList || imageList.length === 0) { + return NextResponse.json( + { status: "error", error: "Image generation returned no image" }, + { status: 502, headers: getCorsHeaders() }, + ); + } + + const urls = imageList.map(img => img.url as string).filter(Boolean); + + return NextResponse.json( + { imageUrl: urls[0], images: urls }, + { status: 200, headers: getCorsHeaders() }, + ); + } catch (error) { + console.error("Image generation error:", error); + return NextResponse.json( + { status: "error", error: "Image generation failed" }, + { status: 500, headers: getCorsHeaders() }, + ); + } +} diff --git a/lib/content/image/validateCreateImageBody.ts b/lib/content/image/validateCreateImageBody.ts new file mode 100644 index 00000000..837b9362 --- /dev/null +++ b/lib/content/image/validateCreateImageBody.ts @@ -0,0 +1,64 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { z } from "zod"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { safeParseJson } from "@/lib/networking/safeParseJson"; +import { validateAuthContext } from "@/lib/auth/validateAuthContext"; +import { TEMPLATE_IDS } from "@/lib/content/templates"; + +export const createImageBodySchema = z.object({ + template: z.enum(TEMPLATE_IDS).optional(), + prompt: z.string().optional(), + reference_image_url: z.string().url().optional(), + images: z.array(z.string().url()).optional(), + num_images: z.number().int().min(1).max(4).optional().default(1), + aspect_ratio: z + .enum([ + "auto", + "21:9", + "16:9", + "3:2", + "4:3", + "5:4", + "1:1", + "4:5", + "3:4", + "2:3", + "9:16", + "4:1", + "1:4", + "8:1", + "1:8", + ]) + .optional() + .default("auto"), + resolution: z.enum(["0.5K", "1K", "2K", "4K"]).optional().default("1K"), + model: z.string().optional(), +}); + +export type ValidatedCreateImageBody = { accountId: string } & z.infer< + typeof createImageBodySchema +>; + +/** + * Validates auth and request body for POST /api/content/image. + */ +export async function validateCreateImageBody( + request: NextRequest, +): Promise { + const authResult = await validateAuthContext(request); + if (authResult instanceof NextResponse) return authResult; + + const body = await safeParseJson(request); + const result = createImageBodySchema.safeParse(body); + + if (!result.success) { + const firstError = result.error.issues[0]; + return NextResponse.json( + { status: "error", field: firstError.path, error: firstError.message }, + { status: 400, headers: getCorsHeaders() }, + ); + } + + return { accountId: authResult.accountId, ...result.data }; +} diff --git a/lib/content/templates/album-record-store.ts b/lib/content/templates/album-record-store.ts new file mode 100644 index 00000000..77586780 --- /dev/null +++ b/lib/content/templates/album-record-store.ts @@ -0,0 +1,132 @@ +import type { Template } from "./types"; + +const template: Template = { + id: "album-record-store", + description: + "Vinyl record on display in a NYC record store. No artist on camera — product shot of the album. Promotional captions. Vertical 9:16 video, 8 seconds. Best for: release day, album promotion, single drops. Requires: audio. No face image needed.", + image: { + prompt: + "A vinyl record spinning on a turntable inside a cramped, rundown New York City record store. The album cover art is displayed next to the turntable, propped against a stack of records. Wooden crate bins full of vinyl records fill the background. Warm tungsten overhead light, dust particles visible in the air. The store feels lived-in — peeling stickers on the counter, handwritten price tags, faded band posters on the walls. Phone camera, slightly warm color cast.", + reference_images: [ + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/album-record-store/ref-01.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hbGJ1bS1yZWNvcmQtc3RvcmUvcmVmLTAxLnBuZyIsImlhdCI6MTc3NTE4NTA1NywiZXhwIjoxODA2NzIxMDU3fQ.4_aouIYxW9jSZb6U9S_XOgygyVS4Nqg4uPJ0l5qNEz8", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/album-record-store/ref-02.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hbGJ1bS1yZWNvcmQtc3RvcmUvcmVmLTAyLnBuZyIsImlhdCI6MTc3NTE4NTA1NywiZXhwIjoxODA2NzIxMDU3fQ.FcKfpm79HH-cx4NIW_-EJJ7qaxM-LY-Ea72EF3U5zIU", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/album-record-store/ref-03.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hbGJ1bS1yZWNvcmQtc3RvcmUvcmVmLTAzLnBuZyIsImlhdCI6MTc3NTE4NTA1NywiZXhwIjoxODA2NzIxMDU3fQ.Dos9-VI40yCviZNSYRPcc0Owz9QJs1vHvmQ2ptFOCXs", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/album-record-store/ref-04.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hbGJ1bS1yZWNvcmQtc3RvcmUvcmVmLTA0LnBuZyIsImlhdCI6MTc3NTE4NTA1NywiZXhwIjoxODA2NzIxMDU3fQ.Dvk_unwcGS63a-VreepJf3Pm4nm4kYCL0-lThxUkL34", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/album-record-store/ref-05.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hbGJ1bS1yZWNvcmQtc3RvcmUvcmVmLTA1LnBuZyIsImlhdCI6MTc3NTE4NTA1NywiZXhwIjoxODA2NzIxMDU3fQ.KCvBqIkjVmAKj4xoU3y5txw2mNwWl88cbj7Ln0u8v68", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/album-record-store/ref-06.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hbGJ1bS1yZWNvcmQtc3RvcmUvcmVmLTA2LnBuZyIsImlhdCI6MTc3NTE4NTA1NywiZXhwIjoxODA2NzIxMDU3fQ.BIGZ2WG15ecaodHkQ5aSprIGbFnXBjqBH62r_vdZ7Eg", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/album-record-store/ref-07.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hbGJ1bS1yZWNvcmQtc3RvcmUvcmVmLTA3LnBuZyIsImlhdCI6MTc3NTE4NTA1NywiZXhwIjoxODA2NzIxMDU3fQ.88e5hWeqa7d1vLhN4KnsGNKV1JXiU9a0zWHZtELJ9DE", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/album-record-store/ref-08.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hbGJ1bS1yZWNvcmQtc3RvcmUvcmVmLTA4LnBuZyIsImlhdCI6MTc3NTE4NTA1NywiZXhwIjoxODA2NzIxMDU3fQ.9MldLiE0pSW9smN402wQ-xewLBkNUNImn6hzoHY5zwU", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/album-record-store/ref-09.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hbGJ1bS1yZWNvcmQtc3RvcmUvcmVmLTA5LnBuZyIsImlhdCI6MTc3NTE4NTA1NywiZXhwIjoxODA2NzIxMDU3fQ.p7iStudC3RxtBA_hZUP3sz5dOOtVAkVa9iDFB7ItwDU", + ], + style_rules: { + camera: { + type: "iPhone resting on the counter, recording a quick story", + angle: + "slightly above the turntable, looking down at an angle — like someone held their phone over the record to film it spinning", + quality: + "iPhone video quality — warm color cast from the overhead light, slight lens flare, not perfectly sharp, natural vignetting at corners", + focus: "turntable and album art in focus, background bins and shelves slightly soft", + }, + environment: { + feel: "a real independent record store in lower Manhattan or Brooklyn — cramped, cluttered, full of character", + lighting: + "warm tungsten bulbs overhead, maybe a small desk lamp near the register. Pools of warm light, deep shadows between the bins. Dust particles catching the light.", + backgrounds: + "wooden crate bins overflowing with vinyl, hand-lettered genre dividers, faded concert posters and stickers on every surface, a boombox or old speakers on a high shelf, maybe a cat sleeping on a stack of records", + avoid: + "clean modern stores, bright fluorescent lighting, empty shelves, corporate branding, pristine surfaces, anything that looks new or staged", + }, + subject: { + expression: "N/A — no person in the shot, the subject is the album and turntable", + pose: "N/A", + clothing: "N/A", + framing: + "turntable takes up the lower half of frame, album art visible in the upper portion or to the side, surrounded by the store environment", + }, + realism: { + priority: + "this MUST look like a real phone video taken inside an actual NYC record store, not a render or AI image", + texture: + "warm grain from the phone camera, slight dust and scratches visible on the vinyl, wood grain on the crate bins, worn edges on the record sleeves", + imperfections: + "fingerprints on the vinyl, slightly crooked album display, a price sticker on the sleeve, dust on the turntable platter, uneven stacks of records in the background", + avoid: + "clean renders, perfect symmetry, bright even lighting, glossy surfaces, anything that looks digital or AI-generated, stock-photo record stores", + }, + }, + }, + video: { + moods: [ + "warm nostalgia, like walking into a place that reminds you of being a kid", + "quiet pride, the feeling of seeing something you made exist in the real world", + "intimate, like youre showing a close friend something that matters to you", + "reverent, the way people handle vinyl carefully because it feels sacred", + "bittersweet, like the album captured a version of you that doesnt exist anymore", + "hypnotic, the kind of calm that comes from watching something spin in circles", + "peaceful solitude, alone in the store after hours", + "wistful, like remembering the sessions that made this album", + ], + movements: [ + "the vinyl spins steadily, tonearm tracking the groove, dust particles drift through the warm light", + "camera slowly drifts closer to the album art, the vinyl keeps spinning in the background", + "a hand reaches into frame and gently places the needle on the record", + "the turntable spins, the overhead light flickers once, dust motes float lazily", + "someone flips through records in a crate in the background, out of focus, while the vinyl spins", + "the camera barely moves, just the vinyl spinning and the warm light shifting slightly", + "a slight camera drift to reveal more of the store — bins, posters, clutter — then settles back on the turntable", + "the tonearm rides the groove, a tiny reflection of light glints off the spinning vinyl surface", + ], + }, + caption: { + guide: { + templateStyle: + "album art on vinyl in a record store — the kind of post an artist makes when their music hits wax for the first time", + captionRole: + "the caption should feel like the artist posted this themselves. proud but not corny. announcing the vinyl, reflecting on the music, or saying something raw about what the album means.", + tone: "understated pride, like posting a photo of your album in a store and letting the moment speak for itself. not hype-man energy — quiet flex.", + rules: [ + "lowercase only", + "keep it under 80 characters for short, can go longer for medium/long", + "no punctuation at the end unless its a question mark", + "never sound like a press release or marketing copy", + "never say 'out now' or 'stream now' or 'link in bio'", + "dont describe whats in the image", + "can reference the album, the songs, or what they mean to you", + "can reference the physical vinyl / record store experience", + "if it sounds like a label wrote it, rewrite it until it sounds like the artist texted it to a friend", + ], + formats: [ + "a one-line reflection on the album ('i left everything in this one')", + "a quiet flex about being on vinyl ('never thought id see this in a store')", + "a nostalgic moment ('used to dig through bins like this looking for something that felt like home')", + "something the listener would screenshot ('this album is the version of me i was scared to show you')", + "a short dedication or thank you that feels real, not performative", + ], + }, + examples: [ + "i left everything in this one", + "found myself in the crates today", + "never thought id see my name on a spine in a record store", + "wrote this in my bedroom now its on wax", + "this album is the version of me i was scared to show you", + "every scratch on this vinyl is a memory", + "the songs sound different on wax. heavier somehow", + "somebody in new york is gonna find this in a bin one day and feel something", + ], + }, + edit: { + operations: [ + { type: "crop", aspect: "9:16" }, + { + type: "overlay_text", + color: "white", + stroke_color: "black", + position: "bottom", + max_font_size: 42, + }, + { type: "mux_audio", replace: true }, + ], + }, +}; + +export default template; diff --git a/lib/content/templates/artist-caption-bedroom.ts b/lib/content/templates/artist-caption-bedroom.ts new file mode 100644 index 00000000..67cc4b82 --- /dev/null +++ b/lib/content/templates/artist-caption-bedroom.ts @@ -0,0 +1,133 @@ +import type { Template } from "./types"; + +const template: Template = { + id: "artist-caption-bedroom", + description: + "Moody bedroom selfie. Artist on camera with deadpan expression, purple LED lighting, dark room. Short blunt captions in lowercase. Vertical 9:16 video, 8 seconds. Best for: introspective songs, vulnerable moments, daily content. Requires: face image, audio.", + image: { + prompt: + "A candid front-facing selfie INSIDE A BEDROOM. The person is sitting on an unmade bed or at a desk in their bedroom. Purple LED strip lights glow on the wall behind them. The room is dark with only the purple glow illuminating their face. Phone camera, low light, grainy. Wearing a hoodie, deadpan expression. The setting MUST be indoors in a real bedroom, not outside.", + reference_images: [ + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-01.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0wMS5wbmciLCJpYXQiOjE3NzUxODUwNTIsImV4cCI6MTgwNjcyMTA1Mn0.LNONuOqaksZeatR8sFGLLlj3d3QWQ1bhETrANiv5VFo", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-02.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0wMi5wbmciLCJpYXQiOjE3NzUxODUwNTMsImV4cCI6MTgwNjcyMTA1M30.fmcN6QprMwpHMuVEM72XQ9DZwWC49zfwwB5Hk1DT2_c", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-03.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0wMy5wbmciLCJpYXQiOjE3NzUxODUwNTMsImV4cCI6MTgwNjcyMTA1M30.7kRSqn7nnhYmymnOeSf2d8fGTWNWpu87EUL56MTXkkc", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-04.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0wNC5wbmciLCJpYXQiOjE3NzUxODUwNTMsImV4cCI6MTgwNjcyMTA1M30.0xTWb46WAqPSWheoRnyeSKccMiIVLglio3NZPnh3Cb0", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-05.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0wNS5wbmciLCJpYXQiOjE3NzUxODUwNTMsImV4cCI6MTgwNjcyMTA1M30.lYNYQ-NPuvt2jYxei33DRrblLRvd_ksaswH9rBgEccI", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-06.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0wNi5wbmciLCJpYXQiOjE3NzUxODUwNTMsImV4cCI6MTgwNjcyMTA1M30.q3xfJzyINgd68YJyYaII55y3gFUKDb0vSr4uueNSys0", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-07.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0wNy5wbmciLCJpYXQiOjE3NzUxODUwNTMsImV4cCI6MTgwNjcyMTA1M30.2sIZZARH7N5cm4PG_4Y7KOepbrNZXqTt5rdghN-7oIA", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-08.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0wOC5wbmciLCJpYXQiOjE3NzUxODUwNTMsImV4cCI6MTgwNjcyMTA1M30.Rnjr7owp6zoz-RSuBsdgLVvs2xo3uzASAoCvXyn-CKc", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-09.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0wOS5wbmciLCJpYXQiOjE3NzUxODUwNTMsImV4cCI6MTgwNjcyMTA1M30.6jasZ_PBNu7p-rLM7jgzEXe2GwuTsdpNNG9_FOupgXY", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-10.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0xMC5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.hjlEdopp4MstfHLpTl84T2ev54ecedUVsiYXSaV3AP4", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-11.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0xMS5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.E8Sp_BSQqzVMGxx5t4SVYKiT3_CnTxPcvqRcEnRB6rU", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-12.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0xMi5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.ePlhmDPm2LuK2TD7mDgnO7ta0k_cdV8mWF8kwBR3y9k", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-13.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0xMy5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.fe2N42_2A7jj8m-SD3TUel0-wvaOrWn2XiARHLmHp00", + ], + style_rules: { + camera: { + type: "front-facing phone camera", + angle: "held slightly below face, looking down at lens", + quality: "phone camera in low light, slight noise and grain, not DSLR sharp", + focus: "face in focus, background slightly soft but not artificially blurred", + }, + environment: { + feel: "real, uncontrolled, wherever they happen to be", + lighting: + "soft, dim purple glow from a desk lamp or LED strip — barely illuminating the room, heavy shadows, most of the frame is dark, only the face catches light", + backgrounds: + "real lived-in bedroom — unmade bed, plain walls, ceiling vents, clutter, nothing curated or staged", + avoid: + "clean renders, perfect symmetry, stock-photo rooms, AI-looking environments, smooth surfaces, studio backdrops", + }, + subject: { + expression: "deadpan, slightly bored, vulnerable, not smiling for the camera", + pose: "casual — hand in hair, hood up, slouched, not posed or performative", + clothing: "oversized hoodie, sweater, or dark casual top", + framing: "head and shoulders, close crop, face takes up most of the frame", + }, + realism: { + priority: "the image must look like a real phone photo, not AI-generated", + texture: "grainy, slightly noisy, imperfect skin texture visible", + imperfections: "messy hair, wrinkled fabric, uneven lighting, random objects in background", + avoid: + "smooth skin, perfect hair, symmetrical composition, clean backgrounds, hyper-sharp detail, uncanny valley", + }, + }, + }, + video: { + moods: [ + "numb, checked out, staring through the camera not at it", + "melancholy, like they just remembered something they were trying to forget", + "quietly amused, like they heard a joke only they understand", + "vulnerable, guard is down, too tired to pretend", + "bored but in a way thats almost peaceful", + "restless, like they want to say something but wont", + "defiant, calm anger, daring you to say something", + "lonely but pretending theyre fine", + "soft, gentle, like theyre about to whisper a secret", + "dissociating, physically present but mentally somewhere else", + ], + movements: [ + "nearly still, only natural breathing", + "the very corner of their mouth barely lifts into the faintest smirk", + "eyes slowly drift up and to the side like thinking about something", + "very slowly tilts head slightly to one side", + "trying to stay deadpan but fighting a smile, lips press together", + "slow quiet exhale through nose, shoulders drop slightly", + "glances away from camera for a moment then slowly looks back", + "jaw tightens slightly like holding something in", + "one eyebrow raises just barely, like a silent question", + "chest rises and falls in one visible sigh", + ], + }, + caption: { + guide: { + templateStyle: + "deadpan selfie with music playing — artist staring at camera, too cool to care", + captionRole: + "the caption is the hook. short, blunt, makes someone stop scrolling. inspired by the songs vibe, not a quote from it.", + tone: "deadpan, low effort, like you typed it with one thumb while bored", + rules: [ + "lowercase only", + "SHORTER IS ALWAYS BETTER. aim for 20-50 characters. never exceed 60", + "no punctuation at the end", + "no apostrophes or quotes — write whats up not what's up, write dont not don't", + "never promotional", + "never describe whats in the video", + "dont quote the lyrics directly — riff on the vibe instead", + "dont try to be clever or poetic. be blunt and simple", + "if it sounds like an AI wrote it, its too long and too try-hard. simplify", + "think: what would a bored teenager type as a caption in 3 seconds", + ], + formats: [ + "a blunt confession (6-10 words max)", + "a 'date idea:' or 'pov:' setup (keep it short)", + "a self-deprecating one-liner", + "a hyper-specific relatable moment in as few words as possible", + "something dumb that somehow hits hard", + ], + }, + examples: [ + "i still keep our photos in the hidden folder in my camera roll in case you come back to me", + "i'm touring 14 cities in north america this summer (i'm just looking for the girl i wrote my songs abt cause she won't text me back)", + "date idea: we erase our past and fall back in love so i can unwrite this song", + "if anyone could've saved me", + "this came out 8 months ago and caroline still hasn't texted me back", + "it's always 'imy' and never 'islfyiebinfy'", + ], + }, + edit: { + operations: [ + { type: "crop", aspect: "9:16" }, + { + type: "overlay_text", + color: "white", + stroke_color: "black", + position: "bottom", + max_font_size: 42, + }, + { type: "mux_audio", replace: true }, + ], + }, +}; + +export default template; diff --git a/lib/content/templates/artist-caption-outside.ts b/lib/content/templates/artist-caption-outside.ts new file mode 100644 index 00000000..4def1318 --- /dev/null +++ b/lib/content/templates/artist-caption-outside.ts @@ -0,0 +1,147 @@ +import type { Template } from "./types"; + +const template: Template = { + id: "artist-caption-outside", + description: + "Night street scene. Artist on camera, phone-on-ground angle, urban cinematic feel. Confident short captions. Vertical 9:16 video, 8 seconds. Best for: confident tracks, urban energy, night vibes. Requires: face image, audio.", + image: { + prompt: + "A person standing outside at night, phone propped on the ground filming them. Low angle, full body shot. Street lights and city glow. Real phone footage feel, slightly shaky framing.", + reference_images: [ + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-01.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0wMS5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.xV77akF4oFtZGjCkn1roI9M9vPGE96Ux_ZvT5wWgEKA", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-02.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0wMi5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.EljTa5aA6egBf4KXPFCjwsZojOZ7S9QgOEyIiH9HjKE", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-03.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0wMy5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.pi6r-0q6cxRwbYMso0h5LtacMonbcEUJYtuLoOJdWdU", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-04.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0wNC5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.uCQaIDaLv2YM7wMf-6LnfJh3r_A8pu-7i3FNjuQHRUs", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-05.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0wNS5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.EA3lTITRof9pSUJ3KxzK9ZgYEIsWkGXPcPMSCGDVfHg", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-06.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0wNi5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.qXjexkFDzRrPvYso-_WJUH66No1PXUzNow7jdEw04cc", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-07.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0wNy5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.2oExeNxOGr7KEEo5zWThgZWaZhJnnooPWsXj6Gp_4jU", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-08.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0wOC5wbmciLCJpYXQiOjE3NzUxODUwNTUsImV4cCI6MTgwNjcyMTA1NX0.LDCXObRzgYJSPs4IoXtY9pinb1gCO1iVgb9-uX-JMv8", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-09.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0wOS5wbmciLCJpYXQiOjE3NzUxODUwNTUsImV4cCI6MTgwNjcyMTA1NX0.WD5xCYsI3klZHS2cVsrXW6T_x7bdVku22EdD7qkazDs", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-10.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0xMC5wbmciLCJpYXQiOjE3NzUxODUwNTUsImV4cCI6MTgwNjcyMTA1NX0.VfN889NyKAPLKDT6IQVTRzLH4_cegNUGuX3P3bN4oy4", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-11.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0xMS5wbmciLCJpYXQiOjE3NzUxODUwNTUsImV4cCI6MTgwNjcyMTA1NX0.Z1IQGbIeKombxFIAO-Y2YqYF1s8MBsggx1JR1_oFshM", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-12.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0xMi5wbmciLCJpYXQiOjE3NzUxODUwNTUsImV4cCI6MTgwNjcyMTA1NX0.Ch498MgcnLZcUOAESkbwulqS30ZJn5cL0sCLknsB8es", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-13.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0xMy5wbmciLCJpYXQiOjE3NzUxODUwNTUsImV4cCI6MTgwNjcyMTA1NX0.Euiy_gmg3dXaafDS1MCm_IGV3SDvyOmWUja13SffxqQ", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-14.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0xNC5wbmciLCJpYXQiOjE3NzUxODUwNTUsImV4cCI6MTgwNjcyMTA1NX0.RvaxLUBmArSzTjDAzOcSpF3VUfxPIBw98nmNt5f2zjU", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-15.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0xNS5wbmciLCJpYXQiOjE3NzUxODUwNTUsImV4cCI6MTgwNjcyMTA1NX0.UA30E9V-f-euLuAlWyFKt6zoR7J9BAfUdOzuz7-gNJY", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-16.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0xNi5wbmciLCJpYXQiOjE3NzUxODUwNTUsImV4cCI6MTgwNjcyMTA1NX0.s6kmLCjl87FSBGbQ25fGr41YsWndLgot-Spc01WLYxo", + ], + style_rules: { + camera: { + type: "iPhone propped on the sidewalk, recording video", + angle: + "very low, ground level, looking up at the person. Slightly tilted because the phone is leaning against something", + quality: + "iPhone night mode video — auto-exposure pumping, digital noise everywhere, slight purple fringing on bright lights, compressed quality", + }, + environment: { + feel: "somewhere outside at night, wherever they happen to be — doesn't matter where", + lighting: + "whatever light sources are nearby — street lamps, porch lights, car headlights, neon signs, gas station lights. Uneven, one-directional, casting harsh shadows. Not controlled.", + backgrounds: + "real places — parking lot, sidewalk, driveway, park, alley, outside a store, under a street light, by a fence. Blurry background details, messy and unplanned.", + avoid: + "daytime, even lighting, clean or curated backgrounds, professional photography, perfectly exposed, obviously staged locations", + }, + subject: { + expression: "deadpan, unbothered, too cool to care about the camera", + pose: "full body, standing naturally, weight on one leg, hands in pockets or at sides, not posing", + clothing: "dark oversized hoodie or jacket, baggy jeans or cargo pants, dark shoes", + framing: + "full body visible head to toe, person takes up about 50-60% of the frame height, space around them, ground visible at bottom", + }, + realism: { + priority: + "MUST look like a real iPhone video screenshot, not AI. if it looks clean or polished it has failed", + texture: + "heavy digital noise in all dark areas, visible JPEG artifacts, color banding in the sky, slight motion blur on any movement", + imperfections: + "lens flare streaking across frame from street lights, blown out highlights that are pure white, slightly warm color cast from sodium lamps, the ground has texture and cracks, shadows are noisy not smooth", + avoid: + "clean noise-free images, perfect skin, sharp focus on everything, symmetrical composition, studio quality, smooth gradients, any sign of AI generation, evenly lit scenes", + }, + }, + }, + video: { + moods: [ + "numb, checked out, staring through the camera not at it", + "melancholy, like they just remembered something they were trying to forget", + "quietly amused, like they heard a joke only they understand", + "vulnerable, guard is down, too tired to pretend", + "bored but in a way that's almost peaceful", + "restless, like they want to say something but won't", + "defiant, calm anger, daring you to say something", + "lonely but pretending they're fine", + "soft, gentle, like they're about to whisper a secret", + "dissociating, physically present but mentally somewhere else", + ], + movements: [ + "standing still with hands in pockets, staring at the camera", + "slowly turns around so their back faces the camera", + "looks down at the ground and kicks at it with their shoe", + "does a small shrug like whatever", + "blows a bubble with gum", + "slowly puts their hood up", + "does a slow lazy spin", + "waves at the camera sarcastically", + "starts to walk away, stops, looks back", + "sits down on the ground cross legged", + "leans against a wall with arms crossed", + "throws up a peace sign without changing expression", + "tosses something small in the air and catches it", + "mouths the words to the song playing", + "zones out looking up at the sky", + "pulls out phone, looks at it, puts it back", + ], + }, + caption: { + guide: { + templateStyle: + "deadpan selfie with music playing — artist staring at camera, too cool to care", + captionRole: + "the caption is the hook. short, blunt, makes someone stop scrolling. inspired by the songs vibe, not a quote from it.", + tone: "deadpan, low effort, like you typed it with one thumb while bored", + rules: [ + "lowercase only", + "SHORTER IS ALWAYS BETTER. aim for 20-50 characters. never exceed 60", + "no punctuation at the end", + "no apostrophes or quotes — write whats up not what's up, write dont not don't", + "never promotional", + "never describe whats in the video", + "dont quote the lyrics directly — riff on the vibe instead", + "dont try to be clever or poetic. be blunt and simple", + "if it sounds like an AI wrote it, its too long and too try-hard. simplify", + "think: what would a bored teenager type as a caption in 3 seconds", + ], + formats: [ + "a blunt confession (6-10 words max)", + "a 'date idea:' or 'pov:' setup (keep it short)", + "a self-deprecating one-liner", + "a hyper-specific relatable moment in as few words as possible", + "something dumb that somehow hits hard", + ], + }, + examples: [ + "i still keep our photos in the hidden folder in my camera roll in case you come back to me", + "i'm touring 14 cities in north america this summer (i'm just looking for the girl i wrote my songs abt cause she won't text me back)", + "date idea: we erase our past and fall back in love so i can unwrite this song", + "if anyone could've saved me", + "this came out 8 months ago and caroline still hasn't texted me back", + "it's always 'imy' and never 'islfyiebinfy'", + ], + }, + edit: { + operations: [ + { type: "crop", aspect: "9:16" }, + { + type: "overlay_text", + color: "white", + stroke_color: "black", + position: "bottom", + max_font_size: 42, + }, + { type: "mux_audio", replace: true }, + ], + }, +}; + +export default template; diff --git a/lib/content/templates/artist-caption-stage.ts b/lib/content/templates/artist-caption-stage.ts new file mode 100644 index 00000000..33cb01a1 --- /dev/null +++ b/lib/content/templates/artist-caption-stage.ts @@ -0,0 +1,110 @@ +import type { Template } from "./types"; + +const template: Template = { + id: "artist-caption-stage", + description: + "Small venue fan cam. Artist on camera from crowd perspective, performance energy. Hype short captions. Vertical 9:16 video, 8 seconds. Best for: upbeat songs, live feel, hype moments. Requires: face image, audio.", + image: { + prompt: + "A person performing on a small stage at a live show. Fan cam perspective — phone held up in the crowd. Stage lights, slightly blurry, not professional photography.", + reference_images: [ + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-01.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMDEucG5nIiwiaWF0IjoxNzc1MTg1MDU1LCJleHAiOjE4MDY3MjEwNTV9.Ff9Olh-7AH9hpGsnoNjm137i_z5QasP6W6fkd7UgXHs", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-02.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMDIucG5nIiwiaWF0IjoxNzc1MTg1MDU2LCJleHAiOjE4MDY3MjEwNTZ9.5h8pm3f3ns8UOpRII5klLBY6hjyNKc4eln-y2RhOoZw", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-03.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMDMucG5nIiwiaWF0IjoxNzc1MTg1MDU2LCJleHAiOjE4MDY3MjEwNTZ9.Zth40VhNl3aV-IXcRdNrVpJxfDnG9OX8d0lhd3iYUW8", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-04.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMDQucG5nIiwiaWF0IjoxNzc1MTg1MDU2LCJleHAiOjE4MDY3MjEwNTZ9.SVMtgCM9TJ0DEJPB6mXfhu6lLI5ttjpCNNUmyntToTs", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-05.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMDUucG5nIiwiaWF0IjoxNzc1MTg1MDU2LCJleHAiOjE4MDY3MjEwNTZ9.zOthD-7e3-TrRbwygF9ydyAJnycli6ewj8sd_xpHYBs", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-06.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMDYucG5nIiwiaWF0IjoxNzc1MTg1MDU2LCJleHAiOjE4MDY3MjEwNTZ9.4NYpj1wRqwFLf5i_k_vrw8CSg6tTf_kkvaIafwbTfdw", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-07.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMDcucG5nIiwiaWF0IjoxNzc1MTg1MDU2LCJleHAiOjE4MDY3MjEwNTZ9._4ytmg9RN6SR_M6Eo0mNc_kYG5XkCPKp50ApqMg6qq4", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-08.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMDgucG5nIiwiaWF0IjoxNzc1MTg1MDU2LCJleHAiOjE4MDY3MjEwNTZ9.QI2pPs1lDDOHN-BqeSjNm8Fu0TJJwOagcDKCXyb1AqQ", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-09.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMDkucG5nIiwiaWF0IjoxNzc1MTg1MDU2LCJleHAiOjE4MDY3MjEwNTZ9.rDvcjb4DhlC8w7ehpgvL8x7PScPfiQaUQg56vpIIy-4", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-10.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMTAucG5nIiwiaWF0IjoxNzc1MTg1MDU3LCJleHAiOjE4MDY3MjEwNTd9.oQ4VKoltTJJPSQMfJ8E0mEh1mtDXN0JigntzoIhmPo8", + ], + style_rules: { + camera: { + type: "iPhone held up in a crowd recording a concert", + angle: "slightly below stage level, looking up at performer, not perfectly centered", + quality: + "iPhone video screenshot quality — compressed, noisy, not sharp. Digital noise in dark areas. Slight purple fringing on highlights.", + }, + environment: { + feel: "cramped small venue, sweaty, dark, someone's phone screen glowing in the corner", + lighting: + "harsh stage spots from above — blown out orange and red highlights, deep black shadows, face half in darkness. Light spill is uneven and messy.", + backgrounds: + "out of focus crowd silhouettes, blurry stage equipment, maybe a phone screen or two glowing in the audience, exit sign in the distance", + avoid: + "even lighting, clean backgrounds, arena-sized venues, professional concert photography, perfectly exposed images, visible detail in dark areas", + }, + subject: { + expression: "mid-performance — eyes closed singing, chin up, lost in the music", + pose: "holding mic close, one hand up, or gripping mic stand, slightly blurry from movement", + clothing: "dark — black hoodie, dark jacket, nothing bright or styled", + framing: + "not perfectly framed — subject slightly off center, maybe someone's head partially blocking the bottom, cropped awkwardly like a real phone photo", + }, + realism: { + priority: + "this MUST look like a screenshot from someone's iPhone concert video, not a professional photo or AI image", + texture: + "heavy digital noise in shadows, JPEG compression artifacts, slight color banding in gradients, skin has no retouching", + imperfections: + "lens flare bleeding across frame, blown out stage light spots that are pure white, someone's hand or phone slightly visible at edge of frame, chromatic aberration on bright lights, slight motion blur on performer's hands", + avoid: + "clean noise-free images, perfect skin, sharp focus on everything, symmetrical composition, studio quality, any sign of AI generation", + }, + }, + }, + video: { + moods: [], + movements: [], + }, + caption: { + guide: { + templateStyle: + "live performance with emotional or lyric caption — the artist on stage with words that hit", + captionRole: + "the caption adds emotional weight to the image. it can be a lyric, a question, a confession, or a thought that makes the viewer feel something while looking at the performance", + tone: "raw, emotional, vulnerable, poetic — like the artist is speaking directly to one person in the crowd", + rules: [ + "lowercase only", + "max 100 characters (can be longer than casual template since its more emotional)", + "apostrophes are allowed (im, youre, dont all ok — but also i'm, you're, don't all ok)", + "question marks are allowed", + "never promotional", + "never describe what's in the image", + "can be a direct lyric quote from the song", + "can be a rhetorical question", + "should feel like the artist is saying it mid-performance", + ], + formats: [ + "a lyric line that hits hardest out of context", + "a rhetorical question directed at someone specific", + "a confession that feels too honest for a stage", + "a one-line gut punch", + "something that makes you screenshot and send to someone", + ], + }, + examples: [ + "how can you look at me and pretend i'm someone you've never met?", + "i wrote this song about you and you don't even know", + "every time i sing this part i think about leaving", + "this is the last song i'll ever write about you", + "i hope you hear this and it ruins your whole night", + ], + }, + edit: { + operations: [ + { type: "crop", aspect: "9:16" }, + { + type: "overlay_text", + color: "white", + stroke_color: "black", + position: "bottom", + max_font_size: 42, + }, + { type: "mux_audio", replace: true }, + ], + }, +}; + +export default template; diff --git a/lib/content/templates/index.ts b/lib/content/templates/index.ts new file mode 100644 index 00000000..b870a0cb --- /dev/null +++ b/lib/content/templates/index.ts @@ -0,0 +1,4 @@ +export type { Template, TemplateEditOperation } from "./types"; +export { TEMPLATES, TEMPLATE_IDS } from "./templates"; +export { loadTemplate } from "./loadTemplate"; +export { listTemplates } from "./listTemplates"; diff --git a/lib/content/templates/listTemplates.ts b/lib/content/templates/listTemplates.ts new file mode 100644 index 00000000..eff00d9f --- /dev/null +++ b/lib/content/templates/listTemplates.ts @@ -0,0 +1,13 @@ +import { TEMPLATES } from "./templates"; + +/** + * List all available templates with id and description only. + * + * @returns Array of template summaries. + */ +export function listTemplates(): { id: string; description: string }[] { + return Object.values(TEMPLATES).map(t => ({ + id: t.id, + description: t.description, + })); +} diff --git a/lib/content/templates/loadTemplate.ts b/lib/content/templates/loadTemplate.ts new file mode 100644 index 00000000..c044093a --- /dev/null +++ b/lib/content/templates/loadTemplate.ts @@ -0,0 +1,12 @@ +import type { Template } from "./types"; +import { TEMPLATES } from "./templates"; + +/** + * Load a template by ID. Returns null if not found. + * + * @param id - Template identifier. + * @returns The full template config, or null. + */ +export function loadTemplate(id: string): Template | null { + return TEMPLATES[id] ?? null; +} diff --git a/lib/content/templates/templates.ts b/lib/content/templates/templates.ts new file mode 100644 index 00000000..e8d68981 --- /dev/null +++ b/lib/content/templates/templates.ts @@ -0,0 +1,14 @@ +import type { Template } from "./types"; +import bedroomTemplate from "./artist-caption-bedroom"; +import outsideTemplate from "./artist-caption-outside"; +import stageTemplate from "./artist-caption-stage"; +import recordStoreTemplate from "./album-record-store"; + +export const TEMPLATES = { + "artist-caption-bedroom": bedroomTemplate, + "artist-caption-outside": outsideTemplate, + "artist-caption-stage": stageTemplate, + "album-record-store": recordStoreTemplate, +} as const satisfies Record; + +export const TEMPLATE_IDS = Object.keys(TEMPLATES) as [string, ...string[]]; diff --git a/lib/content/templates/types.ts b/lib/content/templates/types.ts new file mode 100644 index 00000000..f275b736 --- /dev/null +++ b/lib/content/templates/types.ts @@ -0,0 +1,31 @@ +export interface TemplateEditOperation { + type: string; + [key: string]: unknown; +} + +export interface Template { + id: string; + description: string; + image: { + prompt: string; + reference_images: string[]; + style_rules: Record>; + }; + video: { + moods: string[]; + movements: string[]; + }; + caption: { + guide: { + templateStyle?: string; + captionRole?: string; + tone: string; + rules: string[]; + formats: string[]; + }; + examples: string[]; + }; + edit: { + operations: TemplateEditOperation[]; + }; +} diff --git a/lib/content/transcribe/createAudioHandler.ts b/lib/content/transcribe/createAudioHandler.ts new file mode 100644 index 00000000..8805a640 --- /dev/null +++ b/lib/content/transcribe/createAudioHandler.ts @@ -0,0 +1,27 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { validateTranscribeAudioBody } from "./validateTranscribeAudioBody"; +import { transcribeAudio } from "./transcribeAudio"; + +/** + * POST /api/content/transcribe + * + * @param request - Incoming request with audio URLs to transcribe. + * @returns JSON with transcription and timestamped segments. + */ +export async function createAudioHandler(request: NextRequest): Promise { + const validated = await validateTranscribeAudioBody(request); + if (validated instanceof NextResponse) return validated; + + try { + const result = await transcribeAudio(validated); + return NextResponse.json(result, { status: 200, headers: getCorsHeaders() }); + } catch (error) { + console.error("Audio processing error:", error); + return NextResponse.json( + { status: "error", error: "Audio processing failed" }, + { status: 500, headers: getCorsHeaders() }, + ); + } +} diff --git a/lib/content/transcribe/transcribeAudio.ts b/lib/content/transcribe/transcribeAudio.ts new file mode 100644 index 00000000..ccebcebc --- /dev/null +++ b/lib/content/transcribe/transcribeAudio.ts @@ -0,0 +1,48 @@ +import type { z } from "zod"; +import fal from "@/lib/fal/server"; +import type { createAudioBodySchema } from "./validateTranscribeAudioBody"; + +const DEFAULT_MODEL = "fal-ai/whisper"; + +type AudioParams = z.infer; + +export interface TranscribeResult { + audioUrl: string; + fullLyrics: string; + segments: Array<{ start: number; end: number; text: string }>; + segmentCount: number; +} + +/** + * Transcribe audio using the fal whisper model. + * + * @param validated - Validated audio transcription parameters. + * @returns Transcription with lyrics, segments, and segment count. + */ +export async function transcribeAudio(validated: AudioParams): Promise { + const audioUrl = validated.audio_urls[0]; + + const result = await fal.subscribe(validated.model ?? DEFAULT_MODEL, { + input: { + audio_url: audioUrl, + task: "transcribe", + chunk_level: validated.chunk_level, + language: validated.language, + diarize: validated.diarize, + }, + }); + + const whisperData = result.data as unknown as { + text?: string; + chunks?: Array<{ timestamp: number[]; text: string }>; + }; + + const fullLyrics = whisperData.text ?? ""; + const segments = (whisperData.chunks ?? []).map(chunk => ({ + start: chunk.timestamp[0] ?? 0, + end: chunk.timestamp[1] ?? 0, + text: chunk.text?.trim() ?? "", + })); + + return { audioUrl, fullLyrics, segments, segmentCount: segments.length }; +} diff --git a/lib/content/transcribe/validateTranscribeAudioBody.ts b/lib/content/transcribe/validateTranscribeAudioBody.ts new file mode 100644 index 00000000..df34a56c --- /dev/null +++ b/lib/content/transcribe/validateTranscribeAudioBody.ts @@ -0,0 +1,41 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { z } from "zod"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { safeParseJson } from "@/lib/networking/safeParseJson"; +import { validateAuthContext } from "@/lib/auth/validateAuthContext"; + +export const createAudioBodySchema = z.object({ + audio_urls: z.array(z.string().url()).min(1), + language: z.string().optional().default("en"), + chunk_level: z.enum(["none", "segment", "word"]).optional().default("word"), + diarize: z.boolean().optional().default(false), + model: z.string().optional(), +}); + +export type ValidatedTranscribeAudioBody = { accountId: string } & z.infer< + typeof createAudioBodySchema +>; + +/** + * Validates auth and request body for POST /api/content/transcribe. + */ +export async function validateTranscribeAudioBody( + request: NextRequest, +): Promise { + const authResult = await validateAuthContext(request); + if (authResult instanceof NextResponse) return authResult; + + const body = await safeParseJson(request); + const result = createAudioBodySchema.safeParse(body); + + if (!result.success) { + const firstError = result.error.issues[0]; + return NextResponse.json( + { status: "error", field: firstError.path, error: firstError.message }, + { status: 400, headers: getCorsHeaders() }, + ); + } + + return { accountId: authResult.accountId, ...result.data }; +} diff --git a/lib/content/upscale/createUpscaleHandler.ts b/lib/content/upscale/createUpscaleHandler.ts new file mode 100644 index 00000000..b76f08b5 --- /dev/null +++ b/lib/content/upscale/createUpscaleHandler.ts @@ -0,0 +1,29 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { validateUpscaleBody } from "./validateUpscaleBody"; +import { upscaleMedia } from "./upscaleMedia"; + +/** + * POST /api/content/upscale + * + * @param request - Incoming request with the URL and type to upscale. + * @returns JSON with the upscaled URL. + */ +export async function createUpscaleHandler(request: NextRequest): Promise { + const validated = await validateUpscaleBody(request); + if (validated instanceof NextResponse) return validated; + + try { + const url = await upscaleMedia(validated); + return NextResponse.json({ url }, { status: 200, headers: getCorsHeaders() }); + } catch (error) { + console.error("Upscale error:", error); + const message = error instanceof Error ? error.message : "Upscale failed"; + const status = message.includes("no result") ? 502 : 500; + return NextResponse.json( + { status: "error", error: message }, + { status, headers: getCorsHeaders() }, + ); + } +} diff --git a/lib/content/upscale/upscaleMedia.ts b/lib/content/upscale/upscaleMedia.ts new file mode 100644 index 00000000..e5928b75 --- /dev/null +++ b/lib/content/upscale/upscaleMedia.ts @@ -0,0 +1,42 @@ +import type { z } from "zod"; +import fal from "@/lib/fal/server"; +import type { createUpscaleBodySchema } from "./validateUpscaleBody"; + +type UpscaleParams = z.infer; + +/** + * Upscale an image or video using the fal seedvr model. + * + * @param validated - Validated upscale parameters. + * @returns The upscaled media URL. + * @throws Error if the upscale returns no result. + */ +export async function upscaleMedia(validated: UpscaleParams): Promise { + const model = + validated.type === "video" ? "fal-ai/seedvr/upscale/video" : "fal-ai/seedvr/upscale/image"; + + const inputKey = validated.type === "video" ? "video_url" : "image_url"; + + const input: Record = { + [inputKey]: validated.url, + upscale_factor: validated.upscale_factor, + }; + if (validated.target_resolution) { + input.upscale_mode = "target"; + input.target_resolution = validated.target_resolution; + } + + const result = await fal.subscribe(model as string, { input }); + + const resultData = result.data as Record; + const url = + validated.type === "video" + ? ((resultData?.video as Record)?.url as string | undefined) + : ((resultData?.image as Record)?.url as string | undefined); + + if (!url) { + throw new Error("Upscale returned no result"); + } + + return url; +} diff --git a/lib/content/upscale/validateUpscaleBody.ts b/lib/content/upscale/validateUpscaleBody.ts new file mode 100644 index 00000000..496ecf1d --- /dev/null +++ b/lib/content/upscale/validateUpscaleBody.ts @@ -0,0 +1,38 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { z } from "zod"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { safeParseJson } from "@/lib/networking/safeParseJson"; +import { validateAuthContext } from "@/lib/auth/validateAuthContext"; + +export const createUpscaleBodySchema = z.object({ + url: z.string().url(), + type: z.enum(["image", "video"]), + upscale_factor: z.number().min(1).max(4).optional().default(2), + target_resolution: z.enum(["720p", "1080p", "1440p", "2160p"]).optional(), +}); + +export type ValidatedUpscaleBody = { accountId: string } & z.infer; + +/** + * Validates auth and request body for POST /api/content/upscale. + */ +export async function validateUpscaleBody( + request: NextRequest, +): Promise { + const authResult = await validateAuthContext(request); + if (authResult instanceof NextResponse) return authResult; + + const body = await safeParseJson(request); + const result = createUpscaleBodySchema.safeParse(body); + + if (!result.success) { + const firstError = result.error.issues[0]; + return NextResponse.json( + { status: "error", field: firstError.path, error: firstError.message }, + { status: 400, headers: getCorsHeaders() }, + ); + } + + return { accountId: authResult.accountId, ...result.data }; +} diff --git a/lib/content/validateCreateContentBody.ts b/lib/content/validateCreateContentBody.ts index f6dccad0..7e02a543 100644 --- a/lib/content/validateCreateContentBody.ts +++ b/lib/content/validateCreateContentBody.ts @@ -4,7 +4,6 @@ import { z } from "zod"; import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; import { safeParseJson } from "@/lib/networking/safeParseJson"; import { validateAuthContext } from "@/lib/auth/validateAuthContext"; -import { DEFAULT_CONTENT_TEMPLATE } from "@/lib/content/contentTemplates"; import { isSupportedContentTemplate } from "@/lib/content/isSupportedContentTemplate"; import { resolveArtistSlug } from "@/lib/content/resolveArtistSlug"; import { songsSchema } from "@/lib/content/songsSchema"; @@ -15,11 +14,7 @@ export const createContentBodySchema = z.object({ artist_account_id: z .string({ message: "artist_account_id is required" }) .uuid("artist_account_id must be a valid UUID"), - template: z - .string() - .min(1, "template cannot be empty") - .optional() - .default(DEFAULT_CONTENT_TEMPLATE), + template: z.string().min(1, "template cannot be empty").optional(), lipsync: z.boolean().optional().default(false), caption_length: z.enum(CAPTION_LENGTHS).optional().default("short"), upscale: z.boolean().optional().default(false), @@ -32,7 +27,7 @@ export type ValidatedCreateContentBody = { accountId: string; artistAccountId: string; artistSlug: string; - template: string; + template?: string; lipsync: boolean; captionLength: "short" | "medium" | "long"; upscale: boolean; @@ -70,8 +65,8 @@ export async function validateCreateContentBody( return authResult; } - const template = result.data.template ?? DEFAULT_CONTENT_TEMPLATE; - if (!isSupportedContentTemplate(template)) { + const template = result.data.template; + if (template && !isSupportedContentTemplate(template)) { return NextResponse.json( { status: "error", diff --git a/lib/content/video/buildFalInput.ts b/lib/content/video/buildFalInput.ts new file mode 100644 index 00000000..8f5805f6 --- /dev/null +++ b/lib/content/video/buildFalInput.ts @@ -0,0 +1,49 @@ +/** + * Maps user-facing fields to the fal input format for each video mode. + * Different fal models expect different field names for the same concept. + * + * @param mode - The resolved video generation mode. + * @param v - Validated request body fields. + * @returns The fal input object with mode-specific field mappings. + */ +export function buildFalInput( + mode: string, + v: { + prompt?: string; + negative_prompt?: string; + image_url?: string; + end_image_url?: string; + video_url?: string; + audio_url?: string; + aspect_ratio: string; + duration: string; + resolution: string; + generate_audio: boolean; + }, +): Record { + const input: Record = { + prompt: v.prompt ?? "", + aspect_ratio: v.aspect_ratio, + duration: v.duration, + resolution: v.resolution, + generate_audio: v.generate_audio, + safety_tolerance: "6", + auto_fix: true, + }; + + if (v.negative_prompt) input.negative_prompt = v.negative_prompt; + + if (mode === "reference" && v.image_url) { + input.image_urls = [v.image_url]; + } else if (mode === "first-last" && v.image_url) { + input.first_frame_url = v.image_url; + if (v.end_image_url) input.last_frame_url = v.end_image_url; + } else if (v.image_url) { + input.image_url = v.image_url; + } + + if (v.video_url) input.video_url = v.video_url; + if (v.audio_url) input.audio_url = v.audio_url; + + return input; +} diff --git a/lib/content/video/createVideoHandler.ts b/lib/content/video/createVideoHandler.ts new file mode 100644 index 00000000..49720d35 --- /dev/null +++ b/lib/content/video/createVideoHandler.ts @@ -0,0 +1,29 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { validateCreateVideoBody } from "./validateCreateVideoBody"; +import { generateVideo } from "./generateVideo"; + +/** + * POST /api/content/video + * + * @param request - Incoming request with video generation parameters. + * @returns JSON with the generated video URL. + */ +export async function createVideoHandler(request: NextRequest): Promise { + const validated = await validateCreateVideoBody(request); + if (validated instanceof NextResponse) return validated; + + try { + const result = await generateVideo(validated); + return NextResponse.json(result, { status: 200, headers: getCorsHeaders() }); + } catch (error) { + console.error("Video generation error:", error); + const message = error instanceof Error ? error.message : "Video generation failed"; + const status = message.includes("no video") ? 502 : 500; + return NextResponse.json( + { status: "error", error: message }, + { status, headers: getCorsHeaders() }, + ); + } +} diff --git a/lib/content/video/generateVideo.ts b/lib/content/video/generateVideo.ts new file mode 100644 index 00000000..4d4f826f --- /dev/null +++ b/lib/content/video/generateVideo.ts @@ -0,0 +1,59 @@ +import type { z } from "zod"; +import fal from "@/lib/fal/server"; +import type { createVideoBodySchema } from "./validateCreateVideoBody"; +import { loadTemplate } from "@/lib/content/templates"; +import { inferMode } from "./inferMode"; +import { buildFalInput } from "./buildFalInput"; + +const MODELS: Record = { + prompt: "fal-ai/veo3.1/fast/image-to-video", + animate: "fal-ai/veo3.1/fast/image-to-video", + reference: "fal-ai/veo3.1/fast/image-to-video", + extend: "fal-ai/veo3.1/fast/image-to-video", + "first-last": "fal-ai/veo3.1/fast/image-to-video", + lipsync: "fal-ai/ltx-2-19b/audio-to-video", +}; + +type VideoParams = z.infer; + +export interface GenerateVideoResult { + videoUrl: string; + mode: string; +} + +/** + * Generate a video using the fal API. + * + * @param validated - Validated video generation parameters. + * @returns Object with the video URL and resolved mode. + * @throws Error if the generation returns no video. + */ +export async function generateVideo(validated: VideoParams): Promise { + const tpl = validated.template ? loadTemplate(validated.template) : null; + + let promptOverride = validated.prompt; + if (!promptOverride && tpl?.video) { + const parts: string[] = []; + if (tpl.video.movements.length) { + parts.push(tpl.video.movements[Math.floor(Math.random() * tpl.video.movements.length)]); + } + if (tpl.video.moods.length) { + parts.push(tpl.video.moods[Math.floor(Math.random() * tpl.video.moods.length)]); + } + if (parts.length) promptOverride = parts.join(". "); + } + + const mode = validated.mode ?? inferMode(validated); + const model = validated.model ?? MODELS[mode] ?? MODELS.prompt; + const input = buildFalInput(mode, { ...validated, prompt: promptOverride ?? validated.prompt }); + + const result = await fal.subscribe(model, { input }); + const resultData = result.data as Record; + const videoUrl = (resultData?.video as Record)?.url as string | undefined; + + if (!videoUrl) { + throw new Error("Video generation returned no video"); + } + + return { videoUrl, mode }; +} diff --git a/lib/content/video/inferMode.ts b/lib/content/video/inferMode.ts new file mode 100644 index 00000000..3b25cbdc --- /dev/null +++ b/lib/content/video/inferMode.ts @@ -0,0 +1,18 @@ +/** + * Infers the video generation mode from the inputs when the caller doesn't specify one. + * + * @param v - Object with optional media URL fields. + * @returns The inferred mode string. + */ +export function inferMode(v: { + audio_url?: string; + video_url?: string; + image_url?: string; + end_image_url?: string; +}): string { + if (v.audio_url && v.image_url) return "lipsync"; + if (v.video_url) return "extend"; + if (v.image_url && v.end_image_url) return "first-last"; + if (v.image_url) return "animate"; + return "prompt"; +} diff --git a/lib/content/video/validateCreateVideoBody.ts b/lib/content/video/validateCreateVideoBody.ts new file mode 100644 index 00000000..f84603c8 --- /dev/null +++ b/lib/content/video/validateCreateVideoBody.ts @@ -0,0 +1,50 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { z } from "zod"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { safeParseJson } from "@/lib/networking/safeParseJson"; +import { validateAuthContext } from "@/lib/auth/validateAuthContext"; +import { TEMPLATE_IDS } from "@/lib/content/templates"; + +export const createVideoBodySchema = z.object({ + template: z.enum(TEMPLATE_IDS).optional(), + mode: z.enum(["prompt", "animate", "reference", "extend", "first-last", "lipsync"]).optional(), + prompt: z.string().optional(), + image_url: z.string().url().optional(), + end_image_url: z.string().url().optional(), + video_url: z.string().url().optional(), + audio_url: z.string().url().optional(), + aspect_ratio: z.enum(["auto", "16:9", "9:16"]).optional().default("auto"), + duration: z.enum(["4s", "6s", "7s", "8s"]).optional().default("8s"), + resolution: z.enum(["720p", "1080p", "4k"]).optional().default("720p"), + negative_prompt: z.string().optional(), + generate_audio: z.boolean().optional().default(false), + model: z.string().optional(), +}); + +export type ValidatedCreateVideoBody = { accountId: string } & z.infer< + typeof createVideoBodySchema +>; + +/** + * Validates auth and request body for POST /api/content/video. + */ +export async function validateCreateVideoBody( + request: NextRequest, +): Promise { + const authResult = await validateAuthContext(request); + if (authResult instanceof NextResponse) return authResult; + + const body = await safeParseJson(request); + const result = createVideoBodySchema.safeParse(body); + + if (!result.success) { + const firstError = result.error.issues[0]; + return NextResponse.json( + { status: "error", field: firstError.path, error: firstError.message }, + { status: 400, headers: getCorsHeaders() }, + ); + } + + return { accountId: authResult.accountId, ...result.data }; +} diff --git a/lib/fal/server.ts b/lib/fal/server.ts new file mode 100644 index 00000000..73174afa --- /dev/null +++ b/lib/fal/server.ts @@ -0,0 +1,13 @@ +import { fal as falClient } from "@fal-ai/client"; + +const FAL_KEY = process.env.FAL_KEY as string; + +if (!FAL_KEY) { + throw new Error("FAL_KEY must be set"); +} + +falClient.config({ credentials: FAL_KEY }); + +const fal = falClient; + +export default fal; diff --git a/lib/trigger/triggerCreateContent.ts b/lib/trigger/triggerCreateContent.ts index 9d6e5bd8..eb41fb3c 100644 --- a/lib/trigger/triggerCreateContent.ts +++ b/lib/trigger/triggerCreateContent.ts @@ -4,7 +4,7 @@ import { CREATE_CONTENT_TASK_ID } from "@/lib/const"; export interface TriggerCreateContentPayload { accountId: string; artistSlug: string; - template: string; + template?: string; lipsync: boolean; /** Controls caption length: "short", "medium", or "long". */ captionLength: "short" | "medium" | "long"; diff --git a/lib/twelvelabs/analyzeVideo.ts b/lib/twelvelabs/analyzeVideo.ts new file mode 100644 index 00000000..3b0af44e --- /dev/null +++ b/lib/twelvelabs/analyzeVideo.ts @@ -0,0 +1,63 @@ +const TWELVELABS_ANALYZE_URL = "https://api.twelvelabs.io/v1.3/analyze"; + +export interface AnalyzeVideoResult { + text: string; + finishReason: string | null; + usage: { output_tokens?: number } | null; +} + +/** + * Call the Twelve Labs video analysis API. + * + * @param validated - Validated request body with video_url, prompt, temperature, and optional max_tokens. + * @returns Analysis result with text, finish reason, and usage. + * @throws Error if TWELVELABS_API_KEY is missing or API call fails. + */ +export async function analyzeVideo(validated: { + video_url: string; + prompt: string; + temperature: number; + max_tokens?: number; +}): Promise { + const apiKey = process.env.TWELVELABS_API_KEY; + if (!apiKey) { + throw new Error("TWELVELABS_API_KEY is not configured"); + } + + const response = await fetch(TWELVELABS_ANALYZE_URL, { + method: "POST", + headers: { + "x-api-key": apiKey, + "Content-Type": "application/json", + }, + body: JSON.stringify({ + video: { type: "url", url: validated.video_url }, + prompt: validated.prompt, + temperature: validated.temperature, + stream: false, + ...(validated.max_tokens && { max_tokens: validated.max_tokens }), + }), + }); + + if (!response.ok) { + const errorBody = await response.text(); + console.error("Twelve Labs analyze error:", response.status, errorBody); + throw new Error(`Video analysis failed: ${response.status}`); + } + + const json = (await response.json()) as { + data?: string; + finish_reason?: string; + usage?: { output_tokens?: number }; + }; + + if (!json.data) { + throw new Error("Video analysis returned no text"); + } + + return { + text: json.data, + finishReason: json.finish_reason ?? null, + usage: json.usage ?? null, + }; +} diff --git a/package.json b/package.json index 7a9df5fb..5d12b8b0 100644 --- a/package.json +++ b/package.json @@ -29,6 +29,7 @@ "@coinbase/x402": "^0.7.3", "@composio/core": "^0.3.4", "@composio/vercel": "^0.3.4", + "@fal-ai/client": "^1.9.5", "@modelcontextprotocol/sdk": "^1.24.3", "@privy-io/node": "^0.6.2", "@supabase/supabase-js": "^2.86.0", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 6d4e05a0..72b683bc 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -47,6 +47,9 @@ importers: '@composio/vercel': specifier: ^0.3.4 version: 0.3.4(@composio/core@0.3.4(ws@8.18.3(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@4.1.13))(ai@6.0.0-beta.122(zod@4.1.13)) + '@fal-ai/client': + specifier: ^1.9.5 + version: 1.9.5 '@modelcontextprotocol/sdk': specifier: ^1.24.3 version: 1.24.3(zod@4.1.13) @@ -754,6 +757,10 @@ packages: resolution: {integrity: sha512-zQ0IqbdX8FZ9aw11vP+dZkKDkS+kgIvQPHnSAXzP9pLu+Rfu3D3XEeLbicvoXJTYnhZiPmsZUxgdzXwNKxRPbA==} engines: {node: '>=14'} + '@fal-ai/client@1.9.5': + resolution: {integrity: sha512-knCMOqXapzL5Lsp4Xh/B/VfvbseKgHg2Kt//MjcxN5weF59/26En3zXTPd8pljl4QAr7b62X5EuNCT69MpyjSA==} + engines: {node: '>=18.0.0'} + '@gemini-wallet/core@0.3.2': resolution: {integrity: sha512-Z4aHi3ECFf5oWYWM3F1rW83GJfB9OvhBYPTmb5q+VyK3uvzvS48lwo+jwh2eOoCRWEuT/crpb9Vwp2QaS5JqgQ==} peerDependencies: @@ -1056,6 +1063,10 @@ packages: '@cfworker/json-schema': optional: true + '@msgpack/msgpack@3.1.3': + resolution: {integrity: sha512-47XIizs9XZXvuJgoaJUIE2lFoID8ugvc0jzSHP+Ptfk8nTbnR8g788wv48N03Kx0UkAv559HWRQ3yzOgzlRNUA==} + engines: {node: '>= 18'} + '@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3': resolution: {integrity: sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw==} cpu: [arm64] @@ -5515,6 +5526,9 @@ packages: resolution: {integrity: sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==} hasBin: true + robot3@0.4.1: + resolution: {integrity: sha512-hzjy826lrxzx8eRgv80idkf8ua1JAepRc9Efdtj03N3KNJuznQCPlyCJ7gnUmDFwZCLQjxy567mQVKmdv2BsXQ==} + rollup@4.55.1: resolution: {integrity: sha512-wDv/Ht1BNHB4upNbK74s9usvl7hObDnvVzknxqY/E/O3X6rW1U1rV1aENEfJ54eFZDTNo7zv1f5N4edCluH7+A==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} @@ -7190,6 +7204,12 @@ snapshots: ethereum-cryptography: 2.2.1 micro-ftch: 0.3.1 + '@fal-ai/client@1.9.5': + dependencies: + '@msgpack/msgpack': 3.1.3 + eventsource-parser: 1.1.2 + robot3: 0.4.1 + '@gemini-wallet/core@0.3.2(viem@2.40.3(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@4.1.13))': dependencies: '@metamask/rpc-errors': 7.0.2 @@ -7564,6 +7584,8 @@ snapshots: transitivePeerDependencies: - supports-color + '@msgpack/msgpack@3.1.3': {} + '@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3': optional: true @@ -13693,6 +13715,8 @@ snapshots: dependencies: glob: 10.5.0 + robot3@0.4.1: {} + rollup@4.55.1: dependencies: '@types/estree': 1.0.8