@@ -186,14 +225,20 @@ export function ProjectSettingsDialog({ open, onOpenChange }: ProjectSettingsDia
{/* Frame rate */}
-
);
diff --git a/apps/ui/src/components/editor/transform/transform-overlay.tsx b/apps/ui/src/components/editor/transform/transform-overlay.tsx
index a522eee..37c7722 100644
--- a/apps/ui/src/components/editor/transform/transform-overlay.tsx
+++ b/apps/ui/src/components/editor/transform/transform-overlay.tsx
@@ -27,7 +27,7 @@ interface TransformOverlayProps {
export function TransformOverlay({ displayWidth, displayHeight }: TransformOverlayProps) {
const settings = useVideoEditorStore((s) => s.settings);
const clips = useVideoEditorStore((s) => s.clips);
- const currentTime = useVideoEditorStore((s) => s.currentTime);
+ const currentTime = useVideoEditorStore((s) => s.currentFrame);
const selectedClipIds = useVideoEditorStore((s) => s.selectedClipIds);
const setSelectedClipIds = useVideoEditorStore((s) => s.setSelectedClipIds);
const tracks = useVideoEditorStore((s) => s.tracks);
diff --git a/apps/ui/src/components/editor/transform/use-transform-drag.ts b/apps/ui/src/components/editor/transform/use-transform-drag.ts
index ac20c06..dc2fa90 100644
--- a/apps/ui/src/components/editor/transform/use-transform-drag.ts
+++ b/apps/ui/src/components/editor/transform/use-transform-drag.ts
@@ -51,7 +51,7 @@ export function useTransformDrag({ displayScale, settings, assetMap }: UseTransf
const base = clip.transform ?? {};
if (!clip.keyframes?.tracks?.length) return base;
- const currentTime = useVideoEditorStore.getState().currentTime;
+ const currentTime = useVideoEditorStore.getState().currentFrame;
const localTime = currentTime - clip.startTime;
const evaluator = new KeyframeEvaluator(clip.keyframes);
const keyframed = evaluator.evaluateTransform(localTime);
@@ -87,7 +87,7 @@ export function useTransformDrag({ displayScale, settings, assetMap }: UseTransf
(clip: EditorClip, property: AnimatableProperty, value: number) => {
const store = useVideoEditorStore.getState();
if (hasKeyframes(clip, property)) {
- const localTime = store.currentTime - clip.startTime;
+ const localTime = store.currentFrame - clip.startTime;
store.addKeyframe(clip.id, property, localTime, value);
} else {
const field = PROPERTY_TO_TRANSFORM_FIELD[property] ?? property;
@@ -104,7 +104,7 @@ export function useTransformDrag({ displayScale, settings, assetMap }: UseTransf
(clip: EditorClip, property: AnimatableProperty, value: number) => {
const store = useVideoEditorStore.getState();
if (hasKeyframes(clip, property)) {
- const localTime = store.currentTime - clip.startTime;
+ const localTime = store.currentFrame - clip.startTime;
store.addKeyframe(clip.id, property, localTime, value);
} else {
store.updateClipLineBox(clip.id, { [property]: value });
@@ -120,7 +120,7 @@ export function useTransformDrag({ displayScale, settings, assetMap }: UseTransf
(clip: EditorClip, property: AnimatableProperty, value: number) => {
const store = useVideoEditorStore.getState();
if (hasKeyframes(clip, property)) {
- const localTime = store.currentTime - clip.startTime;
+ const localTime = store.currentFrame - clip.startTime;
store.addKeyframe(clip.id, property, localTime, value);
} else {
store.updateClipShapeBox(clip.id, { [property]: value });
@@ -156,18 +156,18 @@ export function useTransformDrag({ displayScale, settings, assetMap }: UseTransf
if (!clip || clip.type === "audio") return;
const evaluatedTransform = getEvaluatedTransform(clip);
- const bounds = getClipDisplayBounds(clip, evaluatedTransform, ctx, state.currentTime);
+ const bounds = getClipDisplayBounds(clip, evaluatedTransform, ctx, state.currentFrame);
if (!bounds) return;
// Collect snap targets
- const visibleClips = state.getVisibleClipsAtTime(state.currentTime);
+ const visibleClips = state.getVisibleClipsAtTime(state.currentFrame);
const transforms = new Map
>();
for (const c of visibleClips) {
if (c.type !== "audio") transforms.set(c.id, getEvaluatedTransform(c));
}
snapTargetsRef.current = collectSnapTargets(visibleClips, clipId, transforms, ctx);
- const { startPercentageBox, startLineBox } = captureStartBoxes(clip, state.currentTime);
+ const { startPercentageBox, startLineBox } = captureStartBoxes(clip, state.currentFrame);
dragStateRef.current = {
dragType: "move",
@@ -195,10 +195,10 @@ export function useTransformDrag({ displayScale, settings, assetMap }: UseTransf
if (!clip || clip.type === "audio") return;
const evaluatedTransform = getEvaluatedTransform(clip);
- const bounds = getClipDisplayBounds(clip, evaluatedTransform, ctx, state.currentTime);
+ const bounds = getClipDisplayBounds(clip, evaluatedTransform, ctx, state.currentFrame);
if (!bounds) return;
- const { startPercentageBox, startLineBox } = captureStartBoxes(clip, state.currentTime);
+ const { startPercentageBox, startLineBox } = captureStartBoxes(clip, state.currentFrame);
dragStateRef.current = {
dragType: "resize",
@@ -227,7 +227,7 @@ export function useTransformDrag({ displayScale, settings, assetMap }: UseTransf
if (!clip || clip.type === "audio") return;
const evaluatedTransform = getEvaluatedTransform(clip);
- const bounds = getClipDisplayBounds(clip, evaluatedTransform, ctx, state.currentTime);
+ const bounds = getClipDisplayBounds(clip, evaluatedTransform, ctx, state.currentFrame);
if (!bounds) return;
const centerX = bounds.x + bounds.width / 2;
diff --git a/apps/ui/src/components/header.tsx b/apps/ui/src/components/header.tsx
index ae1b66a..d18c36c 100644
--- a/apps/ui/src/components/header.tsx
+++ b/apps/ui/src/components/header.tsx
@@ -3,6 +3,7 @@ import { LogoIcon } from "@/components/logo";
import { Button } from "@/components/ui/button";
import { Link } from "@tanstack/react-router";
import { useScroll } from "@/hooks/use-scroll";
+import { GithubIcon } from "lucide-react";
export function Header() {
const scrolled = useScroll(10);
@@ -10,7 +11,7 @@ export function Header() {
return (
@@ -25,6 +26,7 @@ export function Header() {
diff --git a/apps/ui/src/components/timeline/canvas-timeline.tsx b/apps/ui/src/components/timeline/canvas-timeline.tsx
index ce2b932..6952fa3 100644
--- a/apps/ui/src/components/timeline/canvas-timeline.tsx
+++ b/apps/ui/src/components/timeline/canvas-timeline.tsx
@@ -11,6 +11,7 @@ import {
addAssetsToStores,
} from "./use-asset-store";
import { TRACK_HEADER_WIDTH, RULER_HEIGHT, TRACK_HEIGHT } from "./constants";
+import { secondsToFrames } from "@tooscut/render-engine";
import { PlusIcon } from "lucide-react";
/**
@@ -53,8 +54,8 @@ export function CanvasTimeline() {
useState
(null);
// Store state for keyboard shortcuts
- const currentTime = useVideoEditorStore((s) => s.currentTime);
- const duration = useVideoEditorStore((s) => s.duration);
+ const currentTime = useVideoEditorStore((s) => s.currentFrame);
+ const duration = useVideoEditorStore((s) => s.durationFrames);
const isPlaying = useVideoEditorStore((s) => s.isPlaying);
const selectedClipIds = useVideoEditorStore((s) => s.selectedClipIds);
const zoom = useVideoEditorStore((s) => s.zoom);
@@ -281,7 +282,7 @@ export function CanvasTimeline() {
];
// Convert screen coordinates to timeline coordinates
- const xToTime = useCallback(
+ const xToFrame = useCallback(
(x: number) => Math.max(0, (x - TRACK_HEADER_WIDTH + scrollX) / zoom),
[zoom, scrollX],
);
@@ -299,7 +300,7 @@ export function CanvasTimeline() {
const x = clientX - rect.left;
const y = clientY - rect.top;
- const time = Math.max(0, (x - TRACK_HEADER_WIDTH + scrollX) / zoom);
+ const frame = Math.max(0, (x - TRACK_HEADER_WIDTH + scrollX) / zoom);
const trackIndex = Math.floor((y - RULER_HEIGHT + scrollY) / TRACK_HEIGHT);
if (trackIndex < 0 || trackIndex >= allTracks.length) return null;
@@ -309,8 +310,8 @@ export function CanvasTimeline() {
if (clip.trackId !== track.fullId) continue;
if (clip.type === "audio") continue; // transitions don't apply to audio clips
const clipEnd = clip.startTime + clip.duration;
- if (time >= clip.startTime && time <= clipEnd) {
- const fraction = (time - clip.startTime) / clip.duration;
+ if (frame >= clip.startTime && frame <= clipEnd) {
+ const fraction = (frame - clip.startTime) / clip.duration;
const edge: "in" | "out" = fraction < 1 / 3 ? "in" : fraction > 2 / 3 ? "out" : "in";
const clipX = TRACK_HEADER_WIDTH + clip.startTime * zoom - scrollX;
const clipWidth = clip.duration * zoom;
@@ -341,7 +342,7 @@ export function CanvasTimeline() {
const x = clientX - rect.left;
const y = clientY - rect.top;
- const time = Math.max(0, (x - TRACK_HEADER_WIDTH + scrollX) / zoom);
+ const frame = Math.max(0, (x - TRACK_HEADER_WIDTH + scrollX) / zoom);
const trackIndex = Math.floor((y - RULER_HEIGHT + scrollY) / TRACK_HEIGHT);
if (trackIndex < 0 || trackIndex >= allTracks.length) return null;
@@ -368,7 +369,7 @@ export function CanvasTimeline() {
const boundaryTime = outgoingEnd;
// Check if cursor is near this boundary (within threshold of the boundary)
- if (Math.abs(time - boundaryTime) < thresholdTime) {
+ if (Math.abs(frame - boundaryTime) < thresholdTime) {
const boundaryX = TRACK_HEADER_WIDTH + boundaryTime * zoom - scrollX;
const clipY = RULER_HEIGHT + trackIndex * TRACK_HEIGHT - scrollY + 4;
return { outgoing, incoming, boundaryX, clipY, trackIndex };
@@ -381,7 +382,7 @@ export function CanvasTimeline() {
// Use refs for drag handler deps to avoid stale closures with native event listeners
const dragHandlerDepsRef = useRef({
- xToTime,
+ xToFrame,
yToTrackIndex,
allTracks,
zoom,
@@ -391,7 +392,7 @@ export function CanvasTimeline() {
getAdjacentClipBoundary,
});
dragHandlerDepsRef.current = {
- xToTime,
+ xToFrame,
yToTrackIndex,
allTracks,
zoom,
@@ -410,7 +411,7 @@ export function CanvasTimeline() {
e.preventDefault();
const {
- xToTime: _xToTime,
+ xToFrame: _xToFrame,
yToTrackIndex: _yToTrackIndex,
allTracks: _allTracks,
zoom: _zoom,
@@ -497,7 +498,7 @@ export function CanvasTimeline() {
const rect = el.getBoundingClientRect();
const x = e.clientX - rect.left;
const y = e.clientY - rect.top;
- const startTime = _xToTime(x);
+ const startTime = _xToFrame(x);
const rawTrackIndex = _yToTrackIndex(y);
// Find nearest video track (files are most likely video/image)
@@ -539,7 +540,7 @@ export function CanvasTimeline() {
const y = e.clientY - rect.top;
// Calculate timeline position
- const startTime = _xToTime(x);
+ const startTime = _xToFrame(x);
const rawTrackIndex = _yToTrackIndex(y);
// Find compatible tracks
@@ -641,7 +642,7 @@ export function CanvasTimeline() {
assets,
allTracks,
clips,
- xToTime,
+ xToFrame,
yToTrackIndex,
addClipToTrack,
setSelectedClipIds,
@@ -658,7 +659,7 @@ export function CanvasTimeline() {
assets,
allTracks,
clips,
- xToTime,
+ xToFrame,
yToTrackIndex,
addClipToTrack,
setSelectedClipIds,
@@ -723,7 +724,7 @@ export function CanvasTimeline() {
const x = e.clientX - rect.left;
const y = e.clientY - rect.top;
// Auto-place at timeline start when the timeline is empty
- const startTime = d.clips.length === 0 ? 0 : d.xToTime(x);
+ const startTime = d.clips.length === 0 ? 0 : d.xToFrame(x);
const rawTrackIndex = d.yToTrackIndex(y);
// Handle text template drop
@@ -832,16 +833,19 @@ export function CanvasTimeline() {
transform = { scale_x: scale, scale_y: scale };
}
+ // Convert asset duration from seconds (asset store) to frames (clip store)
+ const durationFrames = secondsToFrames(asset.duration, d.settings.fps);
+
// Image clips don't set assetDuration since they have no inherent duration limit
const clipId = d.addClipToTrack({
type: clipType,
trackId: track.fullId,
startTime,
- duration: asset.duration,
+ duration: durationFrames,
name: asset.name,
assetId: asset.id,
speed: 1,
- assetDuration: clipType === "image" ? undefined : asset.duration,
+ assetDuration: clipType === "image" ? undefined : durationFrames,
transform,
});
@@ -852,11 +856,11 @@ export function CanvasTimeline() {
type: "audio",
trackId: audioTrack.fullId,
startTime,
- duration: asset.duration,
+ duration: durationFrames,
name: `${asset.name} (Audio)`,
assetId: asset.id,
speed: 1,
- assetDuration: asset.duration,
+ assetDuration: durationFrames,
});
d.linkClipPair(clipId, audioClipId);
}
@@ -888,7 +892,7 @@ export function CanvasTimeline() {
const x = e.clientX - rect.left;
const y = e.clientY - rect.top;
const d = dropHandlerDepsRef.current;
- const dropStartTime = d.clips.length === 0 ? 0 : d.xToTime(x);
+ const dropStartTime = d.clips.length === 0 ? 0 : d.xToFrame(x);
const rawIdx = d.yToTrackIndex(y);
const isAudio = asset.type === "audio";
@@ -911,15 +915,17 @@ export function CanvasTimeline() {
transform = { scale_x: scale, scale_y: scale };
}
+ const fileDurationFrames = secondsToFrames(asset.duration, d.settings.fps);
+
const newClipId = d.addClipToTrack({
type: clipType,
trackId: track.fullId,
startTime: dropStartTime,
- duration: asset.duration,
+ duration: fileDurationFrames,
name: asset.name,
assetId: asset.id,
speed: 1,
- assetDuration: clipType === "image" ? undefined : asset.duration,
+ assetDuration: clipType === "image" ? undefined : fileDurationFrames,
transform,
});
@@ -930,11 +936,11 @@ export function CanvasTimeline() {
type: "audio",
trackId: audioTrack.fullId,
startTime: dropStartTime,
- duration: asset.duration,
+ duration: fileDurationFrames,
name: `${asset.name} (Audio)`,
assetId: asset.id,
speed: 1,
- assetDuration: asset.duration,
+ assetDuration: fileDurationFrames,
});
d.linkClipPair(newClipId, audioClipId);
}
diff --git a/apps/ui/src/components/timeline/constants.ts b/apps/ui/src/components/timeline/constants.ts
index 79e9222..d8abc2a 100644
--- a/apps/ui/src/components/timeline/constants.ts
+++ b/apps/ui/src/components/timeline/constants.ts
@@ -18,14 +18,14 @@ export const CLIP_PADDING = 4;
/** Snap threshold in pixels */
export const SNAP_THRESHOLD = 10;
-/** Maximum zoom level (pixels per second) */
-export const MAX_ZOOM = 500;
+/** Maximum zoom level (pixels per frame) */
+export const MAX_ZOOM = 20;
-/** Minimum zoom level (pixels per second) */
-export const MIN_ZOOM = 1;
+/** Minimum zoom level (pixels per frame) */
+export const MIN_ZOOM = 0.03;
-/** Default zoom level (pixels per second) */
-export const DEFAULT_ZOOM = 50;
+/** Default zoom level (pixels per frame) */
+export const DEFAULT_ZOOM = 1.67;
/** Colors for the timeline */
export const COLORS = {
diff --git a/apps/ui/src/components/timeline/keyframe-curve-editor.tsx b/apps/ui/src/components/timeline/keyframe-curve-editor.tsx
index 6927a30..4d9d941 100644
--- a/apps/ui/src/components/timeline/keyframe-curve-editor.tsx
+++ b/apps/ui/src/components/timeline/keyframe-curve-editor.tsx
@@ -337,12 +337,12 @@ function PropertyGraph({
const graphContentHeight = height - GRAPH_PADDING * 2;
// Coordinate conversion
- const timeToX = useCallback(
+ const frameToX = useCallback(
(time: number) => TRACK_HEADER_WIDTH + (clipStartTime + time) * zoom - scrollX,
[zoom, scrollX, clipStartTime],
);
- const xToTime = useCallback(
+ const xToFrame = useCallback(
(x: number) => {
const absoluteTime = (x - TRACK_HEADER_WIDTH + scrollX) / zoom;
return absoluteTime - clipStartTime;
@@ -393,7 +393,7 @@ function PropertyGraph({
if (type === "point") {
// Drag keyframe point
- const newTime = Math.max(0, xToTime(pos.x));
+ const newTime = Math.max(0, xToFrame(pos.x));
const newValue = yToValue(pos.y);
updateKeyframe(clipId, property, index, { time: newTime, value: newValue });
} else {
@@ -401,9 +401,9 @@ function PropertyGraph({
const nextKf = keyframes[index + 1];
if (!nextKf) return;
- const x1 = timeToX(kf.time);
+ const x1 = frameToX(kf.time);
const y1 = valueToY(kf.value);
- const x2 = timeToX(nextKf.time);
+ const x2 = frameToX(nextKf.time);
const y2 = valueToY(nextKf.value);
const dx = x2 - x1;
const dy = y2 - y1;
@@ -448,9 +448,9 @@ function PropertyGraph({
clipId,
property,
config,
- xToTime,
+ xToFrame,
yToValue,
- timeToX,
+ frameToX,
valueToY,
updateKeyframe,
],
@@ -486,15 +486,15 @@ function PropertyGraph({
value = k1.value + easedT * (k2.value - k1.value);
}
- points.push(timeToX(time), valueToY(value));
+ points.push(frameToX(time), valueToY(value));
}
}
return points;
- }, [keyframes, timeToX, valueToY]);
+ }, [keyframes, frameToX, valueToY]);
// Playhead position
- const playheadX = timeToX(currentTime - clipStartTime);
+ const playheadX = frameToX(currentTime - clipStartTime);
const curvePoints = generateCurvePoints();
// Value scale labels
@@ -551,7 +551,7 @@ function PropertyGraph({
const startTime = Math.floor(scrollX / zoom);
const endTime = Math.ceil((scrollX + graphWidth) / zoom);
for (let t = startTime; t <= endTime; t++) {
- const x = timeToX(t - clipStartTime);
+ const x = frameToX(t - clipStartTime);
if (x >= TRACK_HEADER_WIDTH && x <= width) {
lines.push(
,
@@ -590,7 +590,7 @@ function PropertyGraph({
{/* Keyframe points and bezier handles */}
{keyframes.map((kf, index) => {
- const x = timeToX(kf.time);
+ const x = frameToX(kf.time);
const y = valueToY(kf.value);
const nextKf = keyframes[index + 1];
const showHandles = nextKf && kf.interpolation === "Bezier";
@@ -600,7 +600,7 @@ function PropertyGraph({
{/* Bezier handles */}
{showHandles &&
(() => {
- const x2 = timeToX(nextKf.time);
+ const x2 = frameToX(nextKf.time);
const y2 = valueToY(nextKf.value);
const bezier = kf.easing.custom_bezier ?? CUBIC_BEZIER_PRESETS[kf.easing.preset];
const dx = x2 - x;
@@ -734,7 +734,7 @@ export function KeyframeCurveEditor({ width, clipId, properties }: KeyframeCurve
const scrollX = useVideoEditorStore((s) => s.scrollX);
const zoom = useVideoEditorStore((s) => s.zoom);
- const currentTime = useVideoEditorStore((s) => s.currentTime);
+ const currentTime = useVideoEditorStore((s) => s.currentFrame);
const clips = useVideoEditorStore((s) => s.clips);
const clip = clips.find((c) => c.id === clipId);
diff --git a/apps/ui/src/components/timeline/snap-utils.ts b/apps/ui/src/components/timeline/snap-utils.ts
index 6e98fa1..5185e59 100644
--- a/apps/ui/src/components/timeline/snap-utils.ts
+++ b/apps/ui/src/components/timeline/snap-utils.ts
@@ -1,19 +1,19 @@
import type { EditorClip } from "../../state/video-editor-store";
export interface SnapResult {
- time: number;
+ frame: number;
snapLines: number[];
}
/**
- * Collect all snap target times from clip edges and the playhead.
+ * Collect all snap target frames from clip edges and the playhead.
* Excludes clips in the `excludeIds` set.
- * Returns a sorted array of unique times.
+ * Returns a sorted array of unique frame positions.
*/
export function findSnapTargets(
clips: EditorClip[],
excludeIds: Set,
- currentTime: number,
+ currentFrame: number,
): number[] {
const targets = new Set();
@@ -23,30 +23,30 @@ export function findSnapTargets(
targets.add(clip.startTime + clip.duration);
}
- targets.add(currentTime);
+ targets.add(currentFrame);
return Array.from(targets).sort((a, b) => a - b);
}
/**
- * Find the closest snap target within a threshold (in time units).
- * Returns the snapped time and snap line positions.
+ * Find the closest snap target within a threshold (in frames).
+ * Returns the snapped frame and snap line positions.
*/
-export function snapTime(time: number, targets: number[], thresholdTime: number): SnapResult {
+export function snapFrame(frame: number, targets: number[], thresholdFrames: number): SnapResult {
let closest: number | null = null;
let closestDist = Infinity;
for (const target of targets) {
- const dist = Math.abs(time - target);
+ const dist = Math.abs(frame - target);
if (dist < closestDist) {
closestDist = dist;
closest = target;
}
}
- if (closest !== null && closestDist <= thresholdTime) {
- return { time: closest, snapLines: [closest] };
+ if (closest !== null && closestDist <= thresholdFrames) {
+ return { frame: closest, snapLines: [closest] };
}
- return { time, snapLines: [] };
+ return { frame, snapLines: [] };
}
diff --git a/apps/ui/src/components/timeline/timeline-stage.tsx b/apps/ui/src/components/timeline/timeline-stage.tsx
index a604210..1267c5b 100644
--- a/apps/ui/src/components/timeline/timeline-stage.tsx
+++ b/apps/ui/src/components/timeline/timeline-stage.tsx
@@ -2,17 +2,7 @@
import Konva from "konva";
import { useCallback, useMemo, useRef, useState } from "react";
-import {
- Group,
- Image as KonvaImage,
- Label,
- Layer,
- Line,
- Rect,
- Stage,
- Tag,
- Text,
-} from "react-konva";
+import { Group, Label, Layer, Line, Rect, Shape, Stage, Tag, Text } from "react-konva";
import { useVideoEditorStore } from "../../state/video-editor-store";
import {
CLIP_PADDING,
@@ -24,7 +14,8 @@ import {
TRACK_HEADER_WIDTH,
TRACK_HEIGHT,
} from "./constants";
-import { findSnapTargets, snapTime } from "./snap-utils";
+import { findSnapTargets, snapFrame } from "./snap-utils";
+import { framesToSeconds } from "@tooscut/render-engine";
import {
KonvaEyeIcon,
KonvaEyeOffIcon,
@@ -148,32 +139,52 @@ interface CrossTransitionResizeState {
}
/**
- * Get grid interval based on zoom level.
+ * Get grid interval in frames based on zoom level (pixels per frame) and fps.
+ * Returns frame counts for minor and major gridlines.
*/
-function getGridInterval(pixelsPerSecond: number): {
- minor: number;
- major: number;
-} {
- if (pixelsPerSecond >= 200) return { minor: 0.1, major: 1 };
- if (pixelsPerSecond >= 100) return { minor: 0.5, major: 5 };
- if (pixelsPerSecond >= 50) return { minor: 1, major: 5 };
- if (pixelsPerSecond >= 20) return { minor: 2, major: 10 };
- if (pixelsPerSecond >= 10) return { minor: 5, major: 30 };
- return { minor: 10, major: 60 };
+function getGridInterval(
+ pixelsPerFrame: number,
+ fpsFloat: number,
+): { minor: number; major: number } {
+ const fps = Math.round(fpsFloat);
+ const pps = pixelsPerFrame * fpsFloat;
+
+ // Very high zoom: individual frames visible
+ if (pps >= 600) return { minor: 1, major: Math.max(1, Math.round(fps / 6)) };
+ if (pps >= 400) return { minor: 1, major: Math.max(1, Math.round(fps / 2)) };
+ // High zoom: sub-second intervals
+ if (pps >= 200) return { minor: Math.max(1, Math.round(fps / 10)), major: fps };
+ if (pps >= 100) return { minor: Math.max(1, Math.round(fps / 2)), major: fps * 5 };
+ // Medium zoom: second intervals
+ if (pps >= 50) return { minor: fps, major: fps * 5 };
+ if (pps >= 20) return { minor: fps * 2, major: fps * 10 };
+ // Low zoom: multi-second intervals
+ if (pps >= 10) return { minor: fps * 5, major: fps * 30 };
+ if (pps >= 3) return { minor: fps * 10, major: fps * 60 };
+ // Very low zoom: minute intervals
+ return { minor: fps * 30, major: fps * 120 };
}
/**
- * Format time as MM:SS or MM:SS.ms
+ * Format a frame number as timecode. Adapts format based on magnitude:
+ * - Short durations: SS:FF (e.g., "5:12")
+ * - Medium: M:SS:FF (e.g., "2:05:12")
+ * - Long: H:MM:SS (e.g., "1:02:05")
*/
-function formatTime(seconds: number, showMs = false): string {
- const mins = Math.floor(seconds / 60);
- const secs = Math.floor(seconds % 60);
- const ms = Math.floor((seconds % 1) * 100);
-
- if (showMs) {
- return `${mins}:${secs.toString().padStart(2, "0")}.${ms.toString().padStart(2, "0")}`;
+function formatFrameTimecode(frame: number, fpsFloat: number): string {
+ const totalSeconds = frame / fpsFloat;
+ const hrs = Math.floor(totalSeconds / 3600);
+ const mins = Math.floor((totalSeconds % 3600) / 60);
+ const secs = Math.floor(totalSeconds % 60);
+ const ff = Math.round(frame % fpsFloat);
+
+ if (hrs > 0) {
+ return `${hrs}:${mins.toString().padStart(2, "0")}:${secs.toString().padStart(2, "0")}`;
+ }
+ if (mins > 0) {
+ return `${mins}:${secs.toString().padStart(2, "0")}`;
}
- return `${mins}:${secs.toString().padStart(2, "0")}`;
+ return `${secs}:${ff.toString().padStart(2, "0")}`;
}
export function TimelineStage({
@@ -233,6 +244,8 @@ export function TimelineStage({
clipId: string;
startTime: number;
duration: number;
+ /** Updated inPoint for left-trim preview (frames) */
+ inPoint?: number;
// Linked clip preview
linkedClipId?: string;
linkedTrackIndex?: number;
@@ -242,6 +255,7 @@ export function TimelineStage({
clipId: string;
startTime: number;
duration: number;
+ inPoint?: number;
trackIndex: number;
linkedClipId?: string;
linkedTrackIndex?: number;
@@ -317,8 +331,10 @@ export function TimelineStage({
const zoom = useVideoEditorStore((s) => s.zoom);
const scrollX = useVideoEditorStore((s) => s.scrollX);
const scrollY = useVideoEditorStore((s) => s.scrollY);
- const currentTime = useVideoEditorStore((s) => s.currentTime);
- const duration = useVideoEditorStore((s) => s.duration);
+ const currentTime = useVideoEditorStore((s) => s.currentFrame);
+ const duration = useVideoEditorStore((s) => s.durationFrames);
+ const fps = useVideoEditorStore((s) => s.settings.fps);
+ const fpsFloat = fps.numerator / fps.denominator;
const tracks = useVideoEditorStore((s) => s.tracks);
const clips = useVideoEditorStore((s) => s.clips);
const selectedClipIds = useVideoEditorStore((s) => s.selectedClipIds);
@@ -407,12 +423,12 @@ export function TimelineStage({
const waveformMap = useClipWaveforms(thumbnailClips);
// Coordinate conversion
- const timeToX = useCallback(
+ const frameToX = useCallback(
(time: number) => TRACK_HEADER_WIDTH + time * zoom - scrollX,
[zoom, scrollX],
);
- const xToTime = useCallback(
+ const xToFrame = useCallback(
(x: number) => (x - TRACK_HEADER_WIDTH + scrollX) / zoom,
[zoom, scrollX],
);
@@ -431,25 +447,25 @@ export function TimelineStage({
const contentWidth = TRACK_HEADER_WIDTH + Math.max(duration, 60) * zoom;
const totalHeight = RULER_HEIGHT + allTracks.length * TRACK_HEIGHT;
- // Generate grid lines for ruler
+ // Generate grid lines for ruler (in frames)
const gridLines = useMemo(() => {
- const { minor, major } = getGridInterval(zoom);
- const lines: Array<{ x: number; isMajor: boolean; time: number }> = [];
- const startTime = Math.floor(scrollX / zoom / minor) * minor;
- const endTime = Math.ceil((scrollX + width) / zoom / minor) * minor;
-
- for (
- let time = startTime;
- time <= endTime && time <= Math.max(duration, 60) + 10;
- time += minor
- ) {
- if (time < 0) continue;
- const x = timeToX(time);
+ const { minor, major } = getGridInterval(zoom, fpsFloat);
+ // Ensure minor is at least 1 frame
+ const minorStep = Math.max(1, minor);
+ const majorStep = Math.max(1, major);
+ const lines: Array<{ x: number; isMajor: boolean; frame: number }> = [];
+ const startFrame = Math.floor(scrollX / zoom / minorStep) * minorStep;
+ const endFrame = Math.ceil((scrollX + width) / zoom / minorStep) * minorStep;
+ const maxFrame = Math.max(duration, Math.round(60 * fpsFloat));
+
+ for (let f = startFrame; f <= endFrame && f <= maxFrame + minorStep; f += minorStep) {
+ if (f < 0) continue;
+ const x = frameToX(f);
if (x < TRACK_HEADER_WIDTH || x > width) continue;
- lines.push({ x, isMajor: Math.abs(time % major) < 0.001, time });
+ lines.push({ x, isMajor: majorStep > 0 && f % majorStep === 0, frame: f });
}
return lines;
- }, [scrollX, zoom, width, duration, timeToX]);
+ }, [scrollX, zoom, width, duration, fpsFloat, frameToX]);
// Handle wheel for zoom/scroll
const handleWheel = useCallback(
@@ -518,20 +534,20 @@ export function TimelineStage({
if (trackIndex < 0 || trackIndex >= allTracks.length) return null;
const track = allTracks[trackIndex];
- const time = xToTime(x);
+ const frame = xToFrame(x);
for (const clip of clips) {
if (clip.trackId !== track.fullId) continue;
const clipEnd = clip.startTime + clip.duration;
- if (time >= clip.startTime && time <= clipEnd) {
+ if (frame >= clip.startTime && frame <= clipEnd) {
return { clip, trackIndex };
}
}
return null;
},
- [clips, allTracks, xToTime, yToTrackIndex],
+ [clips, allTracks, xToFrame, yToTrackIndex],
);
// Determine if mouse is near a trim handle
@@ -593,7 +609,7 @@ export function TimelineStage({
// Use actual clip overlap region
const overlapStart = incoming.startTime;
const overlapEnd = outgoing.startTime + outgoing.duration;
- const ctX = timeToX(overlapStart);
+ const ctX = frameToX(overlapStart);
const ctWidth = (overlapEnd - overlapStart) * zoom;
const ctY = trackIndexToY(trackIndex) + CLIP_PADDING;
const ctHeight = TRACK_HEIGHT - CLIP_PADDING * 2;
@@ -610,7 +626,7 @@ export function TimelineStage({
}
return null;
},
- [crossTransitions, clips, allTracks, timeToX, trackIndexToY, zoom],
+ [crossTransitions, clips, allTracks, frameToX, trackIndexToY, zoom],
);
// Handle mouse down on stage
@@ -633,8 +649,8 @@ export function TimelineStage({
// Clicking on ruler - start playhead drag
if (pos.y < RULER_HEIGHT && pos.x > TRACK_HEADER_WIDTH) {
isDraggingPlayheadRef.current = true;
- const time = Math.max(0, Math.min(duration, xToTime(pos.x)));
- seekTo(time);
+ const frame = Math.max(0, Math.min(duration, xToFrame(pos.x)));
+ seekTo(frame);
return;
}
@@ -721,12 +737,12 @@ export function TimelineStage({
// Razor tool: split clip at click position
if (activeTool === "razor") {
- const splitTime = xToTime(pos.x);
+ const splitTime = xToFrame(pos.x);
splitClipAtTime(clip.id, splitTime);
return;
}
- const clipX = timeToX(clip.startTime);
+ const clipX = frameToX(clip.startTime);
const clipWidth = clip.duration * zoom;
// Check for transition resize handle or transition body click
@@ -950,9 +966,9 @@ export function TimelineStage({
},
[
duration,
- xToTime,
+ xToFrame,
zoom,
- timeToX,
+ frameToX,
getClipAtPosition,
getTrimEdge,
seekTo,
@@ -993,8 +1009,8 @@ export function TimelineStage({
// Playhead dragging
if (isDraggingPlayheadRef.current) {
- const time = Math.max(0, Math.min(duration, xToTime(pos.x)));
- seekTo(time);
+ const frame = Math.max(0, Math.min(duration, xToFrame(pos.x)));
+ seekTo(frame);
return;
}
@@ -1032,8 +1048,8 @@ export function TimelineStage({
} else {
anchorNewStart = Math.max(0, originalStartTime + deltaTime);
}
- const snapResult = snapTime(anchorNewStart, snapTargetsRef.current, thresholdTime);
- anchorNewStart = snapResult.time;
+ const snapResult = snapFrame(anchorNewStart, snapTargetsRef.current, thresholdTime);
+ anchorNewStart = snapResult.frame;
setSnapLines(snapResult.snapLines);
const anchorDelta = anchorNewStart - originalStartTime;
@@ -1057,6 +1073,7 @@ export function TimelineStage({
clipId: mc.clipId,
startTime: clipNewStart,
duration: clipNewDuration,
+ inPoint: mc.originalInPoint + (clipNewStart - mc.originalStartTime) * mc.speed,
trackIndex: clipTrackIndex,
linkedClipId: mc.linkedClipId,
linkedTrackIndex: mc.linkedTrackIndex,
@@ -1069,6 +1086,7 @@ export function TimelineStage({
clipId,
startTime: anchorNewStart,
duration: anchorNewDuration,
+ inPoint: originalInPoint + (anchorNewStart - originalStartTime) * speed,
linkedClipId,
linkedTrackIndex,
isMulti: true,
@@ -1086,10 +1104,10 @@ export function TimelineStage({
Math.min(anchorMaxDuration, originalDuration + deltaTime),
);
const endTime = originalStartTime + anchorNewDuration;
- const snapResult = snapTime(endTime, snapTargetsRef.current, thresholdTime);
+ const snapResult = snapFrame(endTime, snapTargetsRef.current, thresholdTime);
anchorNewDuration = Math.max(
0.1,
- Math.min(anchorMaxDuration, snapResult.time - originalStartTime),
+ Math.min(anchorMaxDuration, snapResult.frame - originalStartTime),
);
setSnapLines(snapResult.snapLines);
const anchorDelta = anchorNewDuration - originalDuration;
@@ -1141,17 +1159,19 @@ export function TimelineStage({
newStartTime = Math.max(0, originalStartTime + deltaTime);
}
- const snapResult = snapTime(newStartTime, snapTargetsRef.current, thresholdTime);
- newStartTime = snapResult.time;
+ const snapResult = snapFrame(newStartTime, snapTargetsRef.current, thresholdTime);
+ newStartTime = snapResult.frame;
setSnapLines(snapResult.snapLines);
const newDuration = originalStartTime + originalDuration - newStartTime;
if (newDuration >= 0.1) {
+ const newInPoint = originalInPoint + (newStartTime - originalStartTime) * speed;
setTrimPreview({
clipId,
startTime: newStartTime,
duration: newDuration,
+ inPoint: newInPoint,
linkedClipId,
linkedTrackIndex,
});
@@ -1165,8 +1185,8 @@ export function TimelineStage({
let newDuration = Math.max(0.1, Math.min(maxDuration, originalDuration + deltaTime));
const endTime = originalStartTime + newDuration;
- const snapResult = snapTime(endTime, snapTargetsRef.current, thresholdTime);
- newDuration = snapResult.time - originalStartTime;
+ const snapResult = snapFrame(endTime, snapTargetsRef.current, thresholdTime);
+ newDuration = snapResult.frame - originalStartTime;
newDuration = Math.max(0.1, Math.min(maxDuration, newDuration));
setSnapLines(snapResult.snapLines);
@@ -1285,23 +1305,23 @@ export function TimelineStage({
const mcNewEnd = mcNewStart + mcClip.duration;
// Check left edge snap
- const leftSnap = snapTime(mcNewStart, snapTargetsRef.current, thresholdTime);
+ const leftSnap = snapFrame(mcNewStart, snapTargetsRef.current, thresholdTime);
if (leftSnap.snapLines.length > 0) {
- const dist = Math.abs(leftSnap.time - mcNewStart);
+ const dist = Math.abs(leftSnap.frame - mcNewStart);
if (dist < bestSnapDist) {
bestSnapDist = dist;
- bestSnapDelta = deltaTime + (leftSnap.time - mcNewStart);
+ bestSnapDelta = deltaTime + (leftSnap.frame - mcNewStart);
bestSnapLines = leftSnap.snapLines;
}
}
// Check right edge snap
- const rightSnap = snapTime(mcNewEnd, snapTargetsRef.current, thresholdTime);
+ const rightSnap = snapFrame(mcNewEnd, snapTargetsRef.current, thresholdTime);
if (rightSnap.snapLines.length > 0) {
- const dist = Math.abs(rightSnap.time - mcNewEnd);
+ const dist = Math.abs(rightSnap.frame - mcNewEnd);
if (dist < bestSnapDist) {
bestSnapDist = dist;
- bestSnapDelta = deltaTime + (rightSnap.time - mcNewEnd);
+ bestSnapDelta = deltaTime + (rightSnap.frame - mcNewEnd);
bestSnapLines = rightSnap.snapLines;
}
}
@@ -1332,7 +1352,7 @@ export function TimelineStage({
const mcNewStart = mc.originalStartTime + bestSnapDelta;
multiPreviews.push({
clipId: mc.clipId,
- x: timeToX(mcNewStart),
+ x: frameToX(mcNewStart),
y: trackIndexToY(mc.originalTrackIndex) + CLIP_PADDING,
trackIndex: mc.originalTrackIndex,
});
@@ -1345,7 +1365,7 @@ export function TimelineStage({
) {
multiPreviews.push({
clipId: mc.linkedClipId,
- x: timeToX(mcNewStart),
+ x: frameToX(mcNewStart),
y: trackIndexToY(mc.linkedOriginalTrackIndex) + CLIP_PADDING,
trackIndex: mc.linkedOriginalTrackIndex,
});
@@ -1356,7 +1376,7 @@ export function TimelineStage({
const anchorNewStart = originalStartTime + bestSnapDelta;
setDragPreview({
clipId,
- x: timeToX(anchorNewStart),
+ x: frameToX(anchorNewStart),
y: trackIndexToY(originalTrackIndex) + CLIP_PADDING,
trackIndex: originalTrackIndex,
isMulti: true,
@@ -1378,27 +1398,27 @@ export function TimelineStage({
let newStartTime = Math.max(0, originalStartTime + deltaTime);
- const leftSnap = snapTime(newStartTime, snapTargetsRef.current, thresholdTime);
+ const leftSnap = snapFrame(newStartTime, snapTargetsRef.current, thresholdTime);
const rightEdge = newStartTime + clip.duration;
- const rightSnap = snapTime(rightEdge, snapTargetsRef.current, thresholdTime);
+ const rightSnap = snapFrame(rightEdge, snapTargetsRef.current, thresholdTime);
- const leftDist = Math.abs(leftSnap.time - newStartTime);
- const rightDist = Math.abs(rightSnap.time - rightEdge);
+ const leftDist = Math.abs(leftSnap.frame - newStartTime);
+ const rightDist = Math.abs(rightSnap.frame - rightEdge);
if (leftSnap.snapLines.length > 0 || rightSnap.snapLines.length > 0) {
if (leftSnap.snapLines.length > 0 && rightSnap.snapLines.length > 0) {
if (leftDist <= rightDist) {
- newStartTime = leftSnap.time;
+ newStartTime = leftSnap.frame;
setSnapLines(leftSnap.snapLines);
} else {
- newStartTime = rightSnap.time - clip.duration;
+ newStartTime = rightSnap.frame - clip.duration;
setSnapLines(rightSnap.snapLines);
}
} else if (leftSnap.snapLines.length > 0) {
- newStartTime = leftSnap.time;
+ newStartTime = leftSnap.frame;
setSnapLines(leftSnap.snapLines);
} else {
- newStartTime = rightSnap.time - clip.duration;
+ newStartTime = rightSnap.frame - clip.duration;
setSnapLines(rightSnap.snapLines);
}
} else {
@@ -1419,7 +1439,7 @@ export function TimelineStage({
}
}
- const newX = timeToX(newStartTime);
+ const newX = frameToX(newStartTime);
const newY = trackIndexToY(newTrackIndex) + CLIP_PADDING;
let linkedX: number | undefined;
@@ -1445,7 +1465,7 @@ export function TimelineStage({
}
}
- linkedX = timeToX(newStartTime);
+ linkedX = frameToX(newStartTime);
linkedY = trackIndexToY(linkedTrackIndex) + CLIP_PADDING;
}
}
@@ -1484,7 +1504,7 @@ export function TimelineStage({
for (const clip of clips) {
const trackIndex = allTracks.findIndex((t) => t.fullId === clip.trackId);
if (trackIndex === -1) continue;
- const cx = timeToX(clip.startTime);
+ const cx = frameToX(clip.startTime);
const cy = trackIndexToY(trackIndex) + CLIP_PADDING;
const cw = clip.duration * zoom;
const ch = TRACK_HEIGHT - CLIP_PADDING * 2;
@@ -1541,7 +1561,7 @@ export function TimelineStage({
trackHeight: TRACK_HEIGHT - CLIP_PADDING * 2,
});
} else {
- const clipX = timeToX(clip.startTime);
+ const clipX = frameToX(clip.startTime);
const clipWidth = clip.duration * zoom;
// Check transition resize handles first
@@ -1591,8 +1611,8 @@ export function TimelineStage({
[
duration,
zoom,
- xToTime,
- timeToX,
+ xToFrame,
+ frameToX,
trackIndexToY,
allTracks,
clips,
@@ -1698,14 +1718,14 @@ export function TimelineStage({
.filter((mc) => selectedIds.has(mc.clipId))
.map((mc) => ({
clipId: mc.clipId,
- newStartTime: xToTime(mc.x),
+ newStartTime: xToFrame(mc.x),
}));
batchMoveClips(moves);
} else {
// Single-clip drag
const { clipId } = dragState;
const newTrack = allTracks[dragPreview.trackIndex];
- const newStartTime = xToTime(dragPreview.x);
+ const newStartTime = xToFrame(dragPreview.x);
if (newTrack) {
moveClipTimeAndTrack(clipId, newStartTime, newTrack.fullId);
@@ -1728,7 +1748,7 @@ export function TimelineStage({
transitionResizePreview,
allTracks,
clips,
- xToTime,
+ xToFrame,
moveClipTimeAndTrack,
batchMoveClips,
trimLeft,
@@ -1760,7 +1780,7 @@ export function TimelineStage({
}, []);
// Playhead X position
- const playheadX = timeToX(currentTime);
+ const playheadX = frameToX(currentTime);
// Render a clip
const renderClip = useCallback(
@@ -1788,7 +1808,7 @@ export function TimelineStage({
clipThumbnails?: ClipThumbnailData[],
clipWaveformMap?: Map,
) => {
- const x = overrideX ?? timeToX(clip.startTime);
+ const x = overrideX ?? frameToX(clip.startTime);
const y = overrideY ?? trackIndexToY(trackIndex) + CLIP_PADDING;
const clipWidth = clip.duration * zoom;
const clipHeight = TRACK_HEIGHT - CLIP_PADDING * 2;
@@ -1852,23 +1872,48 @@ export function TimelineStage({
>
{thumbnails.map((thumb) => {
if (!thumb.image) return null;
- // Calculate slot width and scale thumbnail to fit height
const slotWidth = clipWidth / thumbnails.length;
- const thumbAspect = thumb.image.width / thumb.image.height;
- const thumbHeight = clipHeight - 4;
- const thumbWidth = thumbHeight * thumbAspect;
- // Position relative to clip's current x (not stale thumb.x from hook)
const slotX = x + thumb.slotIndex * slotWidth;
- const thumbX = slotX + (slotWidth - thumbWidth) / 2;
+ const slotHeight = clipHeight - 4;
+ const slotY = y + 2;
+
+ // Crop-to-fill: scale image to cover the slot, clip overflow
+ const imgAspect = thumb.image.width / thumb.image.height;
+ const slotAspect = slotWidth / slotHeight;
+
+ let drawW: number;
+ let drawH: number;
+ let drawX: number;
+ let drawY: number;
+
+ if (imgAspect > slotAspect) {
+ drawH = slotHeight;
+ drawW = slotHeight * imgAspect;
+ drawX = slotX + (slotWidth - drawW) / 2;
+ drawY = slotY;
+ } else {
+ drawW = slotWidth;
+ drawH = slotWidth / imgAspect;
+ drawX = slotX;
+ drawY = slotY + (slotHeight - drawH) / 2;
+ }
+
+ const img = thumb.image;
return (
- {
+ const ctx = context._context;
+ ctx.save();
+ ctx.beginPath();
+ ctx.rect(slotX, slotY, slotWidth, slotHeight);
+ ctx.clip();
+ ctx.imageSmoothingEnabled = true;
+ ctx.imageSmoothingQuality = "high";
+ ctx.globalAlpha = baseOpacity;
+ ctx.drawImage(img, drawX, drawY, drawW, drawH);
+ ctx.restore();
+ }}
listening={false}
/>
);
@@ -1883,7 +1928,9 @@ export function TimelineStage({
(() => {
const wf = clipWaveformMap?.get(clip.assetId);
if (!wf) return null;
- const outPoint = clip.inPoint + clip.duration * clip.speed;
+ // Convert frame-based clip values to seconds to match waveform data
+ const inPointSec = framesToSeconds(clip.inPoint, fps);
+ const outPointSec = inPointSec + framesToSeconds(clip.duration, fps) * clip.speed;
return (
{
@@ -1912,8 +1959,8 @@ export function TimelineStage({
width={clipWidth}
height={clipHeight}
waveformData={wf.data}
- inPoint={clip.inPoint}
- outPoint={outPoint}
+ inPoint={inPointSec}
+ outPoint={outPointSec}
duration={wf.duration}
/>
@@ -2094,7 +2141,7 @@ export function TimelineStage({
);
},
[
- timeToX,
+ frameToX,
trackIndexToY,
zoom,
width,
@@ -2183,8 +2230,13 @@ export function TimelineStage({
if (trimPreview?.isMulti && trimPreview.multiClips) {
const mc = trimPreview.multiClips.find((m) => m.clipId === clip.id);
if (mc) {
+ const mcOverrides = {
+ startTime: mc.startTime,
+ duration: mc.duration,
+ ...(mc.inPoint !== undefined ? { inPoint: mc.inPoint } : {}),
+ };
return renderClip(
- { ...clip, startTime: mc.startTime, duration: mc.duration },
+ { ...clip, ...mcOverrides },
mc.trackIndex >= 0 ? mc.trackIndex : trackIndex,
false,
undefined,
@@ -2199,8 +2251,13 @@ export function TimelineStage({
if (linkedMc && linkedMc.linkedTrackIndex !== undefined) {
const linkedTrack = allTracks[linkedMc.linkedTrackIndex];
const linkedIsLocked = linkedTrack?.locked ?? false;
+ const linkedOverrides = {
+ startTime: linkedMc.startTime,
+ duration: linkedMc.duration,
+ ...(linkedMc.inPoint !== undefined ? { inPoint: linkedMc.inPoint } : {}),
+ };
return renderClip(
- { ...clip, startTime: linkedMc.startTime, duration: linkedMc.duration },
+ { ...clip, ...linkedOverrides },
linkedMc.linkedTrackIndex,
false,
undefined,
@@ -2211,10 +2268,15 @@ export function TimelineStage({
);
}
} else if (trimPreview) {
- // Single-clip trim
+ // Single-clip trim — apply startTime, duration, and inPoint (for left-trim)
+ const trimOverrides = {
+ startTime: trimPreview.startTime,
+ duration: trimPreview.duration,
+ ...(trimPreview.inPoint !== undefined ? { inPoint: trimPreview.inPoint } : {}),
+ };
if (trimPreview.clipId === clip.id) {
return renderClip(
- { ...clip, startTime: trimPreview.startTime, duration: trimPreview.duration },
+ { ...clip, ...trimOverrides },
trackIndex,
false,
undefined,
@@ -2231,7 +2293,7 @@ export function TimelineStage({
const linkedTrack = allTracks[trimPreview.linkedTrackIndex];
const linkedIsLocked = linkedTrack?.locked ?? false;
return renderClip(
- { ...clip, startTime: trimPreview.startTime, duration: trimPreview.duration },
+ { ...clip, ...trimOverrides },
trimPreview.linkedTrackIndex,
false,
undefined,
@@ -2272,7 +2334,7 @@ export function TimelineStage({
const overlapEnd = isResizing
? crossTransitionResizePreview.overlapEnd
: outgoing.startTime + outgoing.duration;
- const ctX = timeToX(overlapStart);
+ const ctX = frameToX(overlapStart);
const ctWidth = (overlapEnd - overlapStart) * zoom;
const ctY = trackIndexToY(trackIndex) + CLIP_PADDING;
const ctHeight = TRACK_HEIGHT - CLIP_PADDING * 2;
@@ -2353,7 +2415,7 @@ export function TimelineStage({
? dragPreview.multiClips.map((mc) => {
const mcClip = clips.find((c) => c.id === mc.clipId);
if (!mcClip) return null;
- const newStartTime = xToTime(mc.x);
+ const newStartTime = xToFrame(mc.x);
return renderClip(
{ ...mcClip, startTime: newStartTime },
mc.trackIndex,
@@ -2370,7 +2432,7 @@ export function TimelineStage({
const clip = clips.find((c) => c.id === dragPreview.clipId);
if (!clip) return null;
- const newStartTime = xToTime(dragPreview.x);
+ const newStartTime = xToFrame(dragPreview.x);
return renderClip(
{ ...clip, startTime: newStartTime },
dragPreview.trackIndex,
@@ -2394,7 +2456,7 @@ export function TimelineStage({
const linkedClip = clips.find((c) => c.id === dragPreview.linkedClipId);
if (!linkedClip) return null;
- const newStartTime = xToTime(dragPreview.linkedX);
+ const newStartTime = xToFrame(dragPreview.linkedX);
return renderClip(
{ ...linkedClip, startTime: newStartTime },
dragPreview.linkedTrackIndex,
@@ -2409,7 +2471,7 @@ export function TimelineStage({
{/* Snap lines */}
{snapLines.map((snapTime) => {
- const sx = timeToX(snapTime);
+ const sx = frameToX(snapTime);
if (sx < TRACK_HEADER_WIDTH || sx > width) return null;
return (
{/* Ruler time markers */}
- {gridLines.map((line, i) => (
-
-
- {line.isMajor && (
- {
+ const fps = Math.round(fpsFloat);
+ const isOnSecondBoundary = fps > 0 && line.frame % fps === 0;
+ // Show text only on major lines that fall on a whole-second boundary
+ const showLabel = line.isMajor && isOnSecondBoundary;
+ // Major sub-second lines get a medium tick (between major and minor height)
+ const tickTop = showLabel ? 20 : line.isMajor ? 25 : 30;
+
+ return (
+
+
- )}
-
- ))}
+ {showLabel && (
+
+ )}
+
+ );
+ })}
{/* Track headers background */}
s.zoom);
const setZoom = useVideoEditorStore((s) => s.setZoom);
+ const setScrollX = useVideoEditorStore((s) => s.setScrollX);
const activeTool = useVideoEditorStore((s) => s.activeTool);
const setActiveTool = useVideoEditorStore((s) => s.setActiveTool);
+ /** Zoom to a new level, adjusting scrollX to keep the playhead in place. */
+ const zoomAroundPlayhead = useCallback(
+ (newZoom: number) => {
+ const state = useVideoEditorStore.getState();
+ const playheadFrame = state.currentFrame;
+ const oldZoom = state.zoom;
+ const oldScrollX = state.scrollX;
+
+ // Current screen-x of the playhead
+ const playheadScreenX = TRACK_HEADER_WIDTH + playheadFrame * oldZoom - oldScrollX;
+
+ // New scrollX that keeps the playhead at the same screen position
+ const newScrollX = TRACK_HEADER_WIDTH + playheadFrame * newZoom - playheadScreenX;
+
+ setZoom(newZoom);
+ setScrollX(Math.max(0, newScrollX));
+ },
+ [setZoom, setScrollX],
+ );
+
// Zoom slider uses a log scale for more intuitive feel
// slider value 0..100 maps to MIN_ZOOM..MAX_ZOOM exponentially
const zoomToSlider = useCallback((z: number) => {
@@ -33,18 +54,18 @@ export function TimelineToolbar() {
const handleSliderChange = useCallback(
(value: number[]) => {
- setZoom(sliderToZoom(value[0]));
+ zoomAroundPlayhead(sliderToZoom(value[0]));
},
- [setZoom, sliderToZoom],
+ [zoomAroundPlayhead, sliderToZoom],
);
const handleZoomIn = useCallback(() => {
- setZoom(Math.min(MAX_ZOOM, zoom * 1.2));
- }, [zoom, setZoom]);
+ zoomAroundPlayhead(Math.min(MAX_ZOOM, zoom * 1.2));
+ }, [zoom, zoomAroundPlayhead]);
const handleZoomOut = useCallback(() => {
- setZoom(Math.max(MIN_ZOOM, zoom / 1.2));
- }, [zoom, setZoom]);
+ zoomAroundPlayhead(Math.max(MIN_ZOOM, zoom / 1.2));
+ }, [zoom, zoomAroundPlayhead]);
return (
diff --git a/apps/ui/src/components/timeline/use-asset-store.ts b/apps/ui/src/components/timeline/use-asset-store.ts
index 98fb7a4..97f21a2 100644
--- a/apps/ui/src/components/timeline/use-asset-store.ts
+++ b/apps/ui/src/components/timeline/use-asset-store.ts
@@ -2,6 +2,7 @@
* Asset store for managing imported media files.
*/
import { create } from "zustand";
+import { secondsToFrames } from "@tooscut/render-engine";
import { db } from "../../state/db";
import {
useVideoEditorStore,
@@ -517,12 +518,14 @@ export function handleNativeFileDrop(
*/
export function addAssetsToStores(imported: MediaAsset[]) {
useAssetStore.getState().addAssets(imported);
- const editorAssets = imported.map((a) => ({
+ const projectFps = useVideoEditorStore.getState().settings.fps;
+ const editorAssets: StoreMediaAsset[] = imported.map((a) => ({
id: a.id,
type: a.type,
name: a.name,
url: a.url,
- duration: a.duration,
+ // Convert source duration (seconds) to project frames
+ duration: a.type === "image" ? 0 : secondsToFrames(a.duration, projectFps),
width: a.width,
height: a.height,
thumbnailUrl: a.thumbnailUrl,
diff --git a/apps/ui/src/components/timeline/use-clip-thumbnails.ts b/apps/ui/src/components/timeline/use-clip-thumbnails.ts
index 91e37ad..8879f77 100644
--- a/apps/ui/src/components/timeline/use-clip-thumbnails.ts
+++ b/apps/ui/src/components/timeline/use-clip-thumbnails.ts
@@ -10,18 +10,17 @@
*/
import { useEffect, useRef, useState, useCallback } from "react";
-import { VideoFrameLoaderManager } from "@tooscut/render-engine";
+import { VideoFrameLoaderManager, framesToSeconds } from "@tooscut/render-engine";
import { useVideoEditorStore } from "../../state/video-editor-store";
-import {
- getCachedBitmap,
- getNearestCachedBitmap,
- setCachedBitmap,
- clearThumbnailCache,
-} from "./thumbnail-cache";
+import { getCachedBitmap, setCachedBitmap, clearThumbnailCache } from "./thumbnail-cache";
import { useAssetStore } from "./use-asset-store";
/** Width of each thumbnail slot in pixels */
const THUMBNAIL_SLOT_WIDTH = 80;
+/** Height to resize thumbnails to (accounts for HiDPI) */
+const THUMBNAIL_RESIZE_HEIGHT = Math.round(
+ 80 * (typeof window !== "undefined" ? (window.devicePixelRatio ?? 2) : 2),
+);
/** Buffer zone - number of slots to preload beyond visible area */
const BUFFER_SLOTS = 2;
@@ -99,6 +98,7 @@ export function useClipThumbnails({
const thumbnailDataRef = useRef([]);
const assets = useAssetStore((state) => state.assets);
+ const fps = useVideoEditorStore((s) => s.settings.fps);
// Initialize loader manager (dedicated instance for thumbnails)
useEffect(() => {
@@ -124,22 +124,21 @@ export function useClipThumbnails({
for (let i = 0; i < numSlots; i++) {
// Image clips use timestamp 0 for all slots (same image, no time variation)
- // Video clips compute media time from inPoint and speed
- const mediaTime =
+ // Video clips compute media time from inPoint and speed (all in frames),
+ // then convert to seconds for video frame extraction
+ const mediaTimeFrames =
clip.type === "image"
? 0
: clip.inPoint + (i + 0.5) * (clip.duration / numSlots) * clip.speed;
+ const mediaTime = framesToSeconds(mediaTimeFrames, fps);
- // Check exact cache first, then fall back to nearest cached frame
- const exactBitmap = getCachedBitmap(clip.assetId, mediaTime, THUMBNAIL_SLOT_WIDTH);
- const image =
- exactBitmap ?? getNearestCachedBitmap(clip.assetId, mediaTime, THUMBNAIL_SLOT_WIDTH);
+ const image = getCachedBitmap(clip.assetId, mediaTime, THUMBNAIL_SLOT_WIDTH);
slots.push({
x: 0, // Not used — render positions via slotIndex
timestamp: mediaTime,
image,
- needsLoad: !exactBitmap, // nearest-cache fallback still needs exact load
+ needsLoad: !image,
key: `${clip.id}-${i}`,
assetId: clip.assetId,
slotIndex: i,
@@ -226,7 +225,12 @@ export function useClipThumbnails({
const prioritized = prioritizeSlots(clipData.slots, clipData.startTime, clipData.duration);
for (const slot of prioritized) {
- allSlotsToLoad.push({ clipIdx, slot, assetUrl: asset.url, clipType: clipData.clipType });
+ allSlotsToLoad.push({
+ clipIdx,
+ slot,
+ assetUrl: asset.url,
+ clipType: clipData.clipType,
+ });
}
}
@@ -270,7 +274,11 @@ export function useClipThumbnails({
if (cached) {
const slotIdx = updates[clipIdx].slots.findIndex((s) => s.key === slot.key);
if (slotIdx !== -1) {
- updates[clipIdx].slots[slotIdx] = { ...slot, image: cached, needsLoad: false };
+ updates[clipIdx].slots[slotIdx] = {
+ ...slot,
+ image: cached,
+ needsLoad: false,
+ };
emitUpdate();
}
continue;
@@ -289,7 +297,14 @@ export function useClipThumbnails({
activeLoads--;
break;
}
- bitmap = await loader.getImageBitmap(slot.timestamp);
+ const fullBitmap = await loader.getImageBitmap(slot.timestamp);
+ const aspect = fullBitmap.width / fullBitmap.height;
+ bitmap = await createImageBitmap(fullBitmap, {
+ resizeWidth: Math.round(THUMBNAIL_RESIZE_HEIGHT * aspect),
+ resizeHeight: THUMBNAIL_RESIZE_HEIGHT,
+ resizeQuality: "high",
+ });
+ fullBitmap.close();
}
if (signal.aborted) {
@@ -305,13 +320,21 @@ export function useClipThumbnails({
for (let si = 0; si < updates[clipIdx].slots.length; si++) {
const s = updates[clipIdx].slots[si];
if (s.needsLoad) {
- updates[clipIdx].slots[si] = { ...s, image: bitmap, needsLoad: false };
+ updates[clipIdx].slots[si] = {
+ ...s,
+ image: bitmap,
+ needsLoad: false,
+ };
}
}
} else {
const slotIdx = updates[clipIdx].slots.findIndex((s) => s.key === slot.key);
if (slotIdx !== -1) {
- updates[clipIdx].slots[slotIdx] = { ...slot, image: bitmap, needsLoad: false };
+ updates[clipIdx].slots[slotIdx] = {
+ ...slot,
+ image: bitmap,
+ needsLoad: false,
+ };
}
}
emitUpdate();
diff --git a/apps/ui/src/components/timeline/waveform-display.tsx b/apps/ui/src/components/timeline/waveform-display.tsx
index 6222968..d43ffa6 100644
--- a/apps/ui/src/components/timeline/waveform-display.tsx
+++ b/apps/ui/src/components/timeline/waveform-display.tsx
@@ -7,56 +7,21 @@ interface WaveformDisplayProps {
width: number;
height: number;
waveformData: number[];
+ /** Start of visible region in seconds */
inPoint: number;
+ /** End of visible region in seconds */
outPoint: number;
+ /** Total source duration in seconds */
duration: number;
color?: string;
- clipColor?: string;
}
-// Target bar width in pixels for consistent visual density
-const TARGET_BAR_WIDTH = 2;
-const BAR_GAP = 1;
-
/**
- * Resample waveform data to a target number of bars.
- * Uses peak values when downsampling for better visual representation.
- */
-function resampleWaveform(data: number[], targetBars: number): number[] {
- if (data.length === 0) return [];
- if (data.length <= targetBars) {
- // Upsample: repeat samples to fill gaps
- const result: number[] = [];
- const step = data.length / targetBars;
- for (let i = 0; i < targetBars; i++) {
- const index = Math.min(Math.floor(i * step), data.length - 1);
- result.push(data[index]);
- }
- return result;
- }
-
- // Downsample: use RMS of each bucket so wide buckets don't saturate to 1.0
- const result: number[] = [];
- const samplesPerBar = data.length / targetBars;
-
- for (let i = 0; i < targetBars; i++) {
- const start = Math.floor(i * samplesPerBar);
- const end = Math.min(Math.floor((i + 1) * samplesPerBar), data.length);
-
- let sumSq = 0;
- for (let j = start; j < end; j++) {
- sumSq += data[j] * data[j];
- }
- result.push(Math.sqrt(sumSq / (end - start)));
- }
-
- return result;
-}
-
-/**
- * Renders an audio waveform visualization using Konva Shape.
- * Supports trimming via inPoint/outPoint.
- * Resamples waveform data to maintain consistent bar widths regardless of zoom.
+ * Renders an audio waveform as a mirrored filled area chart.
+ *
+ * Draws every data point at its natural time position (no resampling)
+ * so the waveform shape stays stable during trim drags — only the
+ * visible window shifts, never the point positions.
*/
export function WaveformDisplay({
x,
@@ -67,62 +32,53 @@ export function WaveformDisplay({
inPoint,
outPoint,
duration,
- color = "rgba(255, 255, 255, 0.6)",
- clipColor = "rgba(255, 255, 255, 0.3)",
+ color = "rgba(255, 255, 255, 0.5)",
}: WaveformDisplayProps) {
- if (!waveformData || waveformData.length === 0) {
- return null;
- }
-
- // Calculate which portion of waveform to display based on inPoint/outPoint
- const sourceDuration = duration;
- const startRatio = inPoint / sourceDuration;
- const endRatio = outPoint / sourceDuration;
-
- const startIndex = Math.floor(startRatio * waveformData.length);
- const endIndex = Math.ceil(endRatio * waveformData.length);
- const visibleData = waveformData.slice(startIndex, endIndex);
-
- if (visibleData.length === 0) {
+ if (!waveformData || waveformData.length === 0 || duration <= 0) {
return null;
}
- // Calculate target number of bars based on display width
- const targetBars = Math.max(1, Math.floor(width / (TARGET_BAR_WIDTH + BAR_GAP)));
+ const visibleDuration = outPoint - inPoint;
+ if (visibleDuration <= 0) return null;
- const resampledData = resampleWaveform(visibleData, targetBars);
+ // Find the range of data indices that fall within the visible window
+ const startIdx = Math.max(0, Math.floor((inPoint / duration) * waveformData.length));
+ const endIdx = Math.min(
+ waveformData.length,
+ Math.ceil((outPoint / duration) * waveformData.length),
+ );
+ if (endIdx <= startIdx) return null;
const sceneFunc = (context: Konva.Context) => {
const ctx = context._context;
- const barCount = resampledData.length;
- const barWidth = width / barCount;
- const centerY = height / 2;
- const maxHeight = height * 0.8;
+ const centerY = y + height / 2;
+ const maxAmp = height * 0.4;
ctx.beginPath();
- // Draw waveform as mirrored bars
- for (let i = 0; i < barCount; i++) {
- const value = resampledData[i];
- const barHeight = Math.max(1, value * maxHeight);
- const barX = x + i * barWidth + barWidth / 2;
-
- ctx.moveTo(barX, y + centerY - barHeight / 2);
- ctx.lineTo(barX, y + centerY + barHeight / 2);
+ // Top half (left to right) — each point at its natural time position
+ for (let i = startIdx; i < endIdx; i++) {
+ const t = (i / waveformData.length) * duration; // time in seconds
+ const px = x + ((t - inPoint) / visibleDuration) * width;
+ const py = centerY - waveformData[i] * maxAmp;
+ if (i === startIdx) {
+ ctx.moveTo(px, py);
+ } else {
+ ctx.lineTo(px, py);
+ }
}
- ctx.strokeStyle = color;
- ctx.lineWidth = Math.max(1, Math.min(TARGET_BAR_WIDTH, barWidth * 0.8));
- ctx.lineCap = "round";
- ctx.stroke();
+ // Bottom half (right to left, mirrored)
+ for (let i = endIdx - 1; i >= startIdx; i--) {
+ const t = (i / waveformData.length) * duration;
+ const px = x + ((t - inPoint) / visibleDuration) * width;
+ const py = centerY + waveformData[i] * maxAmp;
+ ctx.lineTo(px, py);
+ }
- // Fill center line
- ctx.beginPath();
- ctx.moveTo(x, y + centerY);
- ctx.lineTo(x + width, y + centerY);
- ctx.strokeStyle = clipColor;
- ctx.lineWidth = 1;
- ctx.stroke();
+ ctx.closePath();
+ ctx.fillStyle = color;
+ ctx.fill();
};
return ;
diff --git a/apps/ui/src/hooks/use-audio-engine.ts b/apps/ui/src/hooks/use-audio-engine.ts
index 1440afe..759a6e3 100644
--- a/apps/ui/src/hooks/use-audio-engine.ts
+++ b/apps/ui/src/hooks/use-audio-engine.ts
@@ -1,13 +1,17 @@
/**
* useAudioEngine - React hook for audio playback
*
- * Uses the WASM audio engine with streaming decode via MediaBunny.
- * Audio data is decoded and uploaded incrementally, so playback can
- * start as soon as the first chunks are available.
+ * Uses the WASM audio engine with windowed decode-ahead via MediaBunny.
+ * All store values are in frames; this hook converts to seconds at the
+ * audio engine boundary.
*/
import { useEffect, useRef, useCallback, useState } from "react";
-import { BrowserAudioEngine, type AudioTimelineState } from "@tooscut/render-engine";
+import {
+ BrowserAudioEngine,
+ framesToSeconds,
+ type AudioTimelineState,
+} from "@tooscut/render-engine";
import { useVideoEditorStore } from "../state/video-editor-store";
import { useAssetStore } from "../components/timeline/use-asset-store";
import audioWasmUrl from "@tooscut/render-engine/wasm/audio-engine/audio_engine_bg.wasm?url";
@@ -28,8 +32,9 @@ export function useAudioEngine() {
const clips = useVideoEditorStore((state) => state.clips);
const tracks = useVideoEditorStore((state) => state.tracks);
const isPlaying = useVideoEditorStore((state) => state.isPlaying);
- const currentTime = useVideoEditorStore((state) => state.currentTime);
+ const currentFrame = useVideoEditorStore((state) => state.currentFrame);
const seekVersion = useVideoEditorStore((state) => state.seekVersion);
+ const fps = useVideoEditorStore((state) => state.settings.fps);
const assets = useAssetStore((state) => state.assets);
@@ -79,7 +84,7 @@ export function useAudioEngine() {
}
}, [assets, isWasmReady]);
- // Sync timeline state to WASM engine
+ // Sync timeline state to WASM engine (convert frames → seconds)
useEffect(() => {
const engine = engineRef.current;
if (!engine || !isWasmReady) return;
@@ -90,9 +95,9 @@ export function useAudioEngine() {
id: clip.id,
sourceId: clip.assetId || clip.id,
trackId: clip.trackId,
- startTime: clip.startTime,
- duration: clip.duration,
- inPoint: clip.inPoint,
+ startTime: framesToSeconds(clip.startTime, fps),
+ duration: framesToSeconds(clip.duration, fps),
+ inPoint: framesToSeconds(clip.inPoint, fps),
speed: clip.speed,
gain: clip.volume ?? 1.0,
fadeIn: 0,
@@ -118,17 +123,18 @@ export function useAudioEngine() {
};
engine.setTimeline(timelineState);
- }, [clips, tracks, isWasmReady]);
+ }, [clips, tracks, fps, isWasmReady]);
- // Sync playback state
+ // Sync playback state (convert frame → seconds for seek)
useEffect(() => {
const engine = engineRef.current;
if (!engine || !isWasmReady) return;
if (isPlaying) {
- const time = useVideoEditorStore.getState().currentTime;
+ const frame = useVideoEditorStore.getState().currentFrame;
+ const seekFps = useVideoEditorStore.getState().settings.fps;
void engine.resume().then(() => {
- engine.seek(time);
+ engine.seek(framesToSeconds(frame, seekFps));
engine.setPlaying(true);
});
} else {
@@ -141,7 +147,8 @@ export function useAudioEngine() {
if (seekVersion === 0) return;
const engine = engineRef.current;
if (engine && isWasmReady) {
- engine.seek(useVideoEditorStore.getState().currentTime);
+ const state = useVideoEditorStore.getState();
+ engine.seek(framesToSeconds(state.currentFrame, state.settings.fps));
}
}, [seekVersion, isWasmReady]);
@@ -150,9 +157,9 @@ export function useAudioEngine() {
if (isPlaying) return;
const engine = engineRef.current;
if (engine && isWasmReady) {
- engine.seek(currentTime);
+ engine.seek(framesToSeconds(currentFrame, fps));
}
- }, [currentTime, isPlaying, isWasmReady]);
+ }, [currentFrame, isPlaying, fps, isWasmReady]);
// Resume audio context on user interaction
const resume = useCallback(async () => {
diff --git a/apps/ui/src/hooks/use-auto-save.ts b/apps/ui/src/hooks/use-auto-save.ts
index 79c22e1..c708974 100644
--- a/apps/ui/src/hooks/use-auto-save.ts
+++ b/apps/ui/src/hooks/use-auto-save.ts
@@ -33,7 +33,13 @@ async function generateThumbnail(projectId: string): Promise {
const compositor = getSharedCompositor();
if (!compositor?.isReady) return;
- const { clips, tracks, crossTransitions, settings, currentTime } = useVideoEditorStore.getState();
+ const {
+ clips,
+ tracks,
+ crossTransitions,
+ settings,
+ currentFrame: currentTime,
+ } = useVideoEditorStore.getState();
// Nothing to render if there are no clips
if (clips.length === 0) return;
diff --git a/apps/ui/src/hooks/use-mp4-export.ts b/apps/ui/src/hooks/use-mp4-export.ts
index c23755c..656551e 100644
--- a/apps/ui/src/hooks/use-mp4-export.ts
+++ b/apps/ui/src/hooks/use-mp4-export.ts
@@ -15,7 +15,7 @@ import {
QUALITY_HIGH,
} from "mediabunny";
import { useCallback, useRef, useState } from "react";
-import { EvaluatorManager, type AudioTimelineState } from "@tooscut/render-engine";
+import { EvaluatorManager, framesToSeconds, type AudioTimelineState } from "@tooscut/render-engine";
import initAudioWasm, {
AudioEngine as WasmAudioEngine,
} from "@tooscut/render-engine/wasm/audio-engine/audio_engine.js";
@@ -168,8 +168,8 @@ export function useMp4Export(): Mp4ExportHandle {
throw new Error("No content to export");
}
- const duration = contentDuration;
- const totalFrames = Math.ceil(duration * frameRate);
+ // contentDuration is in frames (project frame rate)
+ const totalFrames = contentDuration;
const workerCount = requestedWorkers ?? getOptimalWorkerCount();
let pool: FrameRendererPool | null = null;
@@ -377,9 +377,9 @@ export function useMp4Export(): Mp4ExportHandle {
id: clip.id,
sourceId: clip.assetId || clip.id,
trackId: clip.trackId,
- startTime: clip.startTime,
- duration: clip.duration,
- inPoint: clip.inPoint,
+ startTime: framesToSeconds(clip.startTime, settings.fps),
+ duration: framesToSeconds(clip.duration, settings.fps),
+ inPoint: framesToSeconds(clip.inPoint, settings.fps),
speed: clip.speed ?? 1,
gain: clip.volume ?? 1,
fadeIn: 0,
@@ -408,7 +408,8 @@ export function useMp4Export(): Mp4ExportHandle {
engine.set_playing(true);
// Render all audio in chunks
- const totalSamples = Math.ceil(duration * sampleRate);
+ const durationSeconds = framesToSeconds(contentDuration, settings.fps);
+ const totalSamples = Math.ceil(durationSeconds * sampleRate);
const fullOutput = new Float32Array(totalSamples * 2);
const chunkSize = 4096;
let rendered = 0;
@@ -495,17 +496,18 @@ export function useMp4Export(): Mp4ExportHandle {
});
// Build frame tasks
- const exportFrames = getExportFrames(duration, frameRate);
+ const exportFrames = getExportFrames(totalFrames);
const frameTasks: RenderFrameTask[] = [];
+ const exportFps = settings.fps;
- for (const { frameIndex, timelineTime } of exportFrames) {
- // Build render frame using layer-builder
+ for (const { frameIndex } of exportFrames) {
+ // Build render frame using layer-builder (timelineTime in frames)
const { frame, visibleMediaClips, crossTransitionTextureMap } = buildLayersForTime({
clips,
tracks,
crossTransitions,
settings: { ...settings, width, height },
- timelineTime,
+ timelineTime: frameIndex,
evaluatorManager,
includeMutedTracks: false,
});
@@ -518,7 +520,7 @@ export function useMp4Export(): Mp4ExportHandle {
const asset = assetMap.get(assetId);
if (!asset) continue;
- const sourceTime = calculateSourceTime(timelineTime, clip);
+ const sourceTime = calculateSourceTime(frameIndex, clip, exportFps);
textureRequests.push({
assetId,
sourceTime,
@@ -529,7 +531,7 @@ export function useMp4Export(): Mp4ExportHandle {
frameTasks.push({
frameIndex,
- timelineTime,
+ timelineFrame: frameIndex,
frame,
textureRequests,
});
@@ -652,7 +654,7 @@ export function useMp4Export(): Mp4ExportHandle {
return {
blob,
mimeType,
- duration,
+ duration: framesToSeconds(contentDuration, settings.fps),
size: buffer.byteLength,
renderTime,
};
diff --git a/apps/ui/src/lib/layer-builder.ts b/apps/ui/src/lib/layer-builder.ts
index 208404f..ba6c091 100644
--- a/apps/ui/src/lib/layer-builder.ts
+++ b/apps/ui/src/lib/layer-builder.ts
@@ -9,6 +9,7 @@
import {
buildRenderFrame,
KeyframeEvaluator,
+ framesToSeconds,
type TextLayerData,
type ShapeLayerData,
type LineLayerData,
@@ -22,6 +23,8 @@ import {
type CrossTransition,
type Transition,
type CrossTransitionRef,
+ type EditableTrack,
+ type FrameRate,
EvaluatorManager,
} from "@tooscut/render-engine";
import type {
@@ -33,13 +36,13 @@ import type {
LineClip,
ProjectSettings,
} from "../state/video-editor-store";
-import type { EditableTrack } from "@tooscut/render-engine";
export interface LayerBuilderInput {
clips: EditorClip[];
tracks: EditableTrack[];
crossTransitions: CrossTransitionRef[];
settings: ProjectSettings;
+ /** Current timeline position in frames (project frame rate) */
timelineTime: number;
evaluatorManager: EvaluatorManager;
/** If true, ignore muted track filtering (for export) */
@@ -233,9 +236,10 @@ export function buildLayersForTime(input: LayerBuilderInput): LayerBuilderOutput
id: mc.id,
assetId: textureId,
trackId: mc.trackId,
- startTime: mc.startTime,
- duration: mc.duration,
- inPoint: mc.inPoint,
+ // Convert frame-based clip fields to seconds for buildRenderFrame/buildMediaLayerData
+ startTime: framesToSeconds(mc.startTime, settings.fps),
+ duration: framesToSeconds(mc.duration, settings.fps),
+ inPoint: framesToSeconds(mc.inPoint, settings.fps),
transform: mc.transform,
effects: mc.effects,
keyframes: mc.keyframes,
@@ -265,15 +269,16 @@ export function buildLayersForTime(input: LayerBuilderInput): LayerBuilderOutput
let opacity = sc.effects?.opacity ?? 1;
if (sc.keyframes?.tracks?.length) {
- const localTime = timelineTime - sc.startTime;
+ // Keyframes are in seconds — convert frame-based local time to seconds
+ const localTimeSeconds = framesToSeconds(timelineTime - sc.startTime, settings.fps);
const evaluator = new KeyframeEvaluator(sc.keyframes);
- const ex = evaluator.evaluate("x", localTime);
- const ey = evaluator.evaluate("y", localTime);
- const ew = evaluator.evaluate("width", localTime);
- const eh = evaluator.evaluate("height", localTime);
- const cr = evaluator.evaluate("cornerRadius", localTime);
- const sw = evaluator.evaluate("strokeWidth", localTime);
- const op = evaluator.evaluate("opacity", localTime);
+ const ex = evaluator.evaluate("x", localTimeSeconds);
+ const ey = evaluator.evaluate("y", localTimeSeconds);
+ const ew = evaluator.evaluate("width", localTimeSeconds);
+ const eh = evaluator.evaluate("height", localTimeSeconds);
+ const cr = evaluator.evaluate("cornerRadius", localTimeSeconds);
+ const sw = evaluator.evaluate("strokeWidth", localTimeSeconds);
+ const op = evaluator.evaluate("opacity", localTimeSeconds);
if (!Number.isNaN(ex) || !Number.isNaN(ey) || !Number.isNaN(ew) || !Number.isNaN(eh)) {
box = {
@@ -314,14 +319,15 @@ export function buildLayersForTime(input: LayerBuilderInput): LayerBuilderOutput
let opacity = lc.effects?.opacity ?? 1;
if (lc.keyframes?.tracks?.length) {
- const localTime = timelineTime - lc.startTime;
+ // Keyframes are in seconds — convert frame-based local time to seconds
+ const localTimeSeconds = framesToSeconds(timelineTime - lc.startTime, settings.fps);
const evaluator = new KeyframeEvaluator(lc.keyframes);
- const x1 = evaluator.evaluate("x1", localTime);
- const y1 = evaluator.evaluate("y1", localTime);
- const x2 = evaluator.evaluate("x2", localTime);
- const y2 = evaluator.evaluate("y2", localTime);
- const sw = evaluator.evaluate("strokeWidth", localTime);
- const op = evaluator.evaluate("opacity", localTime);
+ const x1 = evaluator.evaluate("x1", localTimeSeconds);
+ const y1 = evaluator.evaluate("y1", localTimeSeconds);
+ const x2 = evaluator.evaluate("x2", localTimeSeconds);
+ const y2 = evaluator.evaluate("y2", localTimeSeconds);
+ const sw = evaluator.evaluate("strokeWidth", localTimeSeconds);
+ const op = evaluator.evaluate("opacity", localTimeSeconds);
if (!Number.isNaN(x1) || !Number.isNaN(y1) || !Number.isNaN(x2) || !Number.isNaN(y2)) {
box = {
@@ -352,6 +358,10 @@ export function buildLayersForTime(input: LayerBuilderInput): LayerBuilderOutput
// audio clips are skipped — not rendered visually
}
+ // Convert frame-based timeline time to seconds for the RenderFrame
+ // (compositor and keyframe evaluator expect seconds)
+ const timelineTimeSeconds = framesToSeconds(timelineTime, settings.fps);
+
const frame = buildRenderFrame({
mediaClips: mediaClipsForRender,
textLayers,
@@ -359,7 +369,7 @@ export function buildLayersForTime(input: LayerBuilderInput): LayerBuilderOutput
lineLayers,
tracks: renderTracks,
trackIndexMap,
- timelineTime,
+ timelineTime: timelineTimeSeconds,
width: settings.width,
height: settings.height,
evaluatorManager,
@@ -376,15 +386,18 @@ export function buildLayersForTime(input: LayerBuilderInput): LayerBuilderOutput
}
/**
- * Calculate source time for a clip given timeline time.
+ * Calculate source time (in seconds) for a clip given timeline frame.
+ * All clip fields are in frames. Returns seconds for video frame extraction.
*/
export function calculateSourceTime(
- timelineTime: number,
+ timelineFrame: number,
clip: { startTime: number; inPoint: number; speed?: number },
+ fps: FrameRate,
): number {
- const clipLocalTime = timelineTime - clip.startTime;
+ const clipLocalFrames = timelineFrame - clip.startTime;
const speed = clip.speed ?? 1;
- return clip.inPoint + clipLocalTime * speed;
+ const sourceFrames = clip.inPoint + clipLocalFrames * speed;
+ return framesToSeconds(sourceFrames, fps);
}
/**
@@ -403,24 +416,11 @@ export function getVisibleAssetIds(visibleClips: EditorClip[]): Set {
/**
* Get all frames that need to be rendered for export.
- * Returns an array of { frameIndex, timelineTime } for each frame.
+ * @param durationFrames - Total project duration in frames
+ * @returns Array of frame indices
*/
-export function getExportFrames(
- duration: number,
- frameRate: number,
-): Array<{ frameIndex: number; timelineTime: number }> {
- const frames: Array<{ frameIndex: number; timelineTime: number }> = [];
- const frameDuration = 1 / frameRate;
- const totalFrames = Math.ceil(duration * frameRate);
-
- for (let i = 0; i < totalFrames; i++) {
- frames.push({
- frameIndex: i,
- timelineTime: i * frameDuration,
- });
- }
-
- return frames;
+export function getExportFrames(durationFrames: number): Array<{ frameIndex: number }> {
+ return Array.from({ length: durationFrames }, (_, i) => ({ frameIndex: i }));
}
/**
diff --git a/apps/ui/src/routes/__root.tsx b/apps/ui/src/routes/__root.tsx
index 2e33818..2f7f15d 100644
--- a/apps/ui/src/routes/__root.tsx
+++ b/apps/ui/src/routes/__root.tsx
@@ -23,11 +23,20 @@ export const Route = createRootRoute({
},
],
scripts: [
- {
- defer: true,
- src: "https://cloud.umami.is/script.js",
- "data-website-id": "7776ab6b-d097-4eb4-ad02-a4e6e4adbd3d",
- },
+ import.meta.env.VITE_UMAMI_WEBSITE_ID
+ ? {
+ defer: true,
+ src: "https://cloud.umami.is/script.js",
+ "data-website-id": import.meta.env.VITE_UMAMI_WEBSITE_ID,
+ }
+ : undefined,
+ import.meta.env.DEV
+ ? {
+ src: "https://unpkg.com/react-scan/dist/auto.global.js",
+ crossOrigin: "anonymous",
+ strategy: "beforeInteractive",
+ }
+ : undefined,
],
}),
diff --git a/apps/ui/src/routes/editor/$projectId.tsx b/apps/ui/src/routes/editor/$projectId.tsx
index 4aca1ff..ee049e5 100644
--- a/apps/ui/src/routes/editor/$projectId.tsx
+++ b/apps/ui/src/routes/editor/$projectId.tsx
@@ -23,10 +23,14 @@ export const Route = createFileRoute("/editor/$projectId")({
component: EditorPage,
ssr: false,
pendingComponent: EditorSkeleton,
+ validateSearch: (search: Record) => ({
+ new: search.new === true || search.new === "true",
+ }),
});
function EditorPage() {
const { projectId } = Route.useParams();
+ const { new: isNewProject } = Route.useSearch();
const navigate = useNavigate();
const [loading, setLoading] = useState(true);
const [error, setError] = useState(null);
@@ -129,7 +133,7 @@ function EditorPage() {
return (
<>
}
+ toolbar={}
assetPanel={}
previewPanel={}
propertiesPanel={}
diff --git a/apps/ui/src/routes/projects.tsx b/apps/ui/src/routes/projects.tsx
index 54a0406..a474e27 100644
--- a/apps/ui/src/routes/projects.tsx
+++ b/apps/ui/src/routes/projects.tsx
@@ -75,7 +75,7 @@ function ProjectChooser() {
const project: LocalProject = {
id,
name: "Untitled Project",
- settings: { width: 1920, height: 1080, fps: 30 },
+ settings: { width: 1920, height: 1080, fps: { numerator: 30, denominator: 1 } },
content: {
tracks,
clips: [],
@@ -87,7 +87,11 @@ function ProjectChooser() {
};
await db.projects.add(project);
- void navigate({ to: "/editor/$projectId", params: { projectId: id } });
+ void navigate({
+ to: "/editor/$projectId",
+ params: { projectId: id },
+ search: { new: true } as any,
+ });
};
const handleConfirmDelete = async () => {
@@ -101,7 +105,11 @@ function ProjectChooser() {
};
const handleOpenProject = (projectId: string) => {
- void navigate({ to: "/editor/$projectId", params: { projectId } });
+ void navigate({
+ to: "/editor/$projectId",
+ params: { projectId },
+ search: { new: false } as any,
+ });
};
if (projects === undefined) {
diff --git a/apps/ui/src/state/db.ts b/apps/ui/src/state/db.ts
index 9718c45..795794e 100644
--- a/apps/ui/src/state/db.ts
+++ b/apps/ui/src/state/db.ts
@@ -36,6 +36,79 @@ class EditorDatabase extends Dexie {
projects: "id, updatedAt, name",
fileHandles: "id",
});
+
+ // V2: Migrate fps from number to FrameRate { numerator, denominator }
+ this.version(2)
+ .stores({
+ projects: "id, updatedAt, name",
+ fileHandles: "id",
+ })
+ .upgrade((tx) => {
+ return tx
+ .table("projects")
+ .toCollection()
+ .modify((project: any) => {
+ if (typeof project.settings?.fps === "number") {
+ project.settings.fps = {
+ numerator: project.settings.fps,
+ denominator: 1,
+ };
+ }
+ });
+ });
+
+ // V3: Convert all time-based values (seconds) to frame-based values (integer frames)
+ this.version(3)
+ .stores({
+ projects: "id, updatedAt, name",
+ fileHandles: "id",
+ })
+ .upgrade((tx) => {
+ return tx
+ .table("projects")
+ .toCollection()
+ .modify((project: any) => {
+ const fps = project.settings?.fps;
+ if (!fps?.numerator) return;
+
+ const fpsFloat = fps.numerator / fps.denominator;
+
+ // Convert clip time fields from seconds to frames
+ for (const clip of project.content?.clips ?? []) {
+ if (typeof clip.startTime === "number") {
+ clip.startTime = Math.round(clip.startTime * fpsFloat);
+ }
+ if (typeof clip.duration === "number" && clip.duration < 1000) {
+ // Heuristic: if duration < 1000, it's likely still in seconds
+ // (a 1000-frame clip at 30fps is ~33s, unlikely for seconds-based)
+ clip.duration = Math.max(1, Math.round(clip.duration * fpsFloat));
+ }
+ if (typeof clip.inPoint === "number") {
+ clip.inPoint = Math.round(clip.inPoint * fpsFloat);
+ }
+ if (typeof clip.assetDuration === "number" && clip.assetDuration < 100000) {
+ clip.assetDuration = Math.round(clip.assetDuration * fpsFloat);
+ }
+ }
+
+ // Convert cross-transition time fields
+ for (const ct of project.content?.crossTransitions ?? []) {
+ if (typeof ct.duration === "number" && ct.duration < 1000) {
+ ct.duration = Math.max(1, Math.round(ct.duration * fpsFloat));
+ }
+ if (typeof ct.boundary === "number") {
+ ct.boundary = Math.round(ct.boundary * fpsFloat);
+ }
+ }
+
+ // Convert asset durations
+ for (const asset of project.content?.assets ?? []) {
+ if (typeof asset.duration === "number" && asset.duration < 100000) {
+ asset.duration = Math.round(asset.duration * fpsFloat);
+ }
+ }
+ });
+ });
}
}
diff --git a/apps/ui/src/state/video-editor-store.ts b/apps/ui/src/state/video-editor-store.ts
index 2ca4569..728991b 100644
--- a/apps/ui/src/state/video-editor-store.ts
+++ b/apps/ui/src/state/video-editor-store.ts
@@ -30,6 +30,7 @@ import {
type CrossTransitionRef,
type CrossTransitionType,
type AudioEffectsParams,
+ type FrameRate,
addTrackPair,
removeTrackPair,
addClip,
@@ -143,16 +144,20 @@ export interface NewClipInput {
/**
* Media asset stored in the editor.
+ * Duration is in frame counts relative to the project frame rate.
*/
export interface MediaAsset {
id: string;
type: "video" | "audio" | "image";
name: string;
url: string;
+ /** Asset duration in frames (project frame rate) */
duration: number;
width?: number;
height?: number;
thumbnailUrl?: string;
+ /** Native frame rate of the source media (for frame-accurate extraction) */
+ sourceFps?: FrameRate;
}
/**
@@ -161,7 +166,7 @@ export interface MediaAsset {
export interface ProjectSettings {
width: number;
height: number;
- fps: number;
+ fps: FrameRate;
}
// ============================================================================
@@ -176,8 +181,10 @@ interface VideoEditorState {
tracks: EditableTrack[];
clips: EditorClip[];
crossTransitions: CrossTransitionRef[];
- currentTime: number;
- duration: number;
+ /** Current playhead position in frames (project frame rate) */
+ currentFrame: number;
+ /** Total project duration in frames (project frame rate) */
+ durationFrames: number;
isPlaying: boolean;
/** Incremented on user-initiated seeks so audio engine can detect them */
seekVersion: number;
@@ -213,9 +220,9 @@ interface VideoEditorState {
updateAssetUrl: (assetId: string, url: string) => void;
// Actions - Playback
- setCurrentTime: (time: number) => void;
- /** Seek to a time (user-initiated) — also increments seekVersion for audio sync */
- seekTo: (time: number) => void;
+ setCurrentFrame: (frame: number) => void;
+ /** Seek to a frame (user-initiated) — also increments seekVersion for audio sync */
+ seekTo: (frame: number) => void;
setIsPlaying: (playing: boolean) => void;
togglePlayback: () => void;
@@ -385,10 +392,14 @@ function availableExtensionBefore(clip: EditorClip): number {
return Math.max(0, clip.inPoint / speed);
}
-function calculateDuration(clips: EditorClip[]): number {
- if (clips.length === 0) return 30; // Default 30 seconds
+/** Calculate project duration in frames. Adds 150 frames (~5s at 30fps) padding after last clip. */
+function calculateDurationFrames(clips: EditorClip[], fps: FrameRate): number {
+ const fpsFloat = fps.numerator / fps.denominator;
+ const defaultDuration = Math.round(30 * fpsFloat); // 30 seconds default
+ const padding = Math.round(5 * fpsFloat); // 5 seconds padding
+ if (clips.length === 0) return defaultDuration;
const maxEnd = Math.max(...clips.map((c) => c.startTime + c.duration));
- return Math.max(30, maxEnd + 5);
+ return Math.max(defaultDuration, maxEnd + padding);
}
/**
@@ -633,14 +644,14 @@ export const useVideoEditorStore = create()(
settings: {
width: 1920,
height: 1080,
- fps: 30,
+ fps: { numerator: 30, denominator: 1 },
},
tracks: [],
clips: [],
crossTransitions: [],
- currentTime: 0,
- duration: 30,
+ currentFrame: 0,
+ durationFrames: 900, // 30s at 30fps
isPlaying: false,
seekVersion: 0,
@@ -649,7 +660,7 @@ export const useVideoEditorStore = create()(
selectedCrossTransition: null,
clipboard: [],
- zoom: 50,
+ zoom: 1.67, // pixels per frame (~50px/s at 30fps)
scrollX: 0,
scrollY: 0,
activeTool: "select" as const,
@@ -665,12 +676,12 @@ export const useVideoEditorStore = create()(
crossTransitions: data.crossTransitions ?? [],
assets: data.assets,
settings: data.settings,
- currentTime: 0,
+ currentFrame: 0,
isPlaying: false,
selectedClipIds: [],
selectedTransition: null,
selectedCrossTransition: null,
- duration: calculateDuration(data.clips),
+ durationFrames: calculateDurationFrames(data.clips, data.settings.fps),
}),
resetStore: () =>
@@ -679,17 +690,17 @@ export const useVideoEditorStore = create()(
clips: [],
crossTransitions: [],
assets: [],
- settings: { width: 1920, height: 1080, fps: 30 },
- currentTime: 0,
+ settings: { width: 1920, height: 1080, fps: { numerator: 30, denominator: 1 } },
+ currentFrame: 0,
isPlaying: false,
selectedClipIds: [],
selectedTransition: null,
selectedCrossTransition: null,
- zoom: 50,
+ zoom: 1.67, // pixels per frame (~50px/s at 30fps)
scrollX: 0,
scrollY: 0,
activeTool: "select" as const,
- duration: 30,
+ durationFrames: 900, // 30s at 30fps
}),
setSettings: (updates) =>
@@ -703,17 +714,17 @@ export const useVideoEditorStore = create()(
})),
// Playback actions
- setCurrentTime: (time) => set({ currentTime: Math.max(0, time) }),
- seekTo: (time) =>
+ setCurrentFrame: (frame) => set({ currentFrame: Math.max(0, Math.round(frame)) }),
+ seekTo: (frame) =>
set((state) => ({
- currentTime: Math.max(0, time),
+ currentFrame: Math.max(0, Math.round(frame)),
seekVersion: state.seekVersion + 1,
})),
setIsPlaying: (isPlaying) => set({ isPlaying }),
togglePlayback: () => set((state) => ({ isPlaying: !state.isPlaying })),
// View actions
- setZoom: (zoom) => set({ zoom: Math.max(1, Math.min(500, zoom)) }),
+ setZoom: (zoom) => set({ zoom: Math.max(0.03, Math.min(20, zoom)) }),
setScrollX: (scrollX) => set({ scrollX: Math.max(0, scrollX) }),
setScrollY: (scrollY) => set({ scrollY: Math.max(0, scrollY) }),
setActiveTool: (activeTool) => set({ activeTool }),
@@ -747,7 +758,7 @@ export const useVideoEditorStore = create()(
// Calculate the offset: shift all pasted clips so the earliest starts at playhead
const earliestStart = Math.min(...state.clipboard.map((c) => c.startTime));
- const offset = state.currentTime - earliestStart;
+ const offset = state.currentFrame - earliestStart;
// Build an old-id → new-id map for relinking
const idMap = new Map();
@@ -778,7 +789,7 @@ export const useVideoEditorStore = create()(
set((s) => {
let clips = addClip(s.clips, newClip);
clips = resolveOverlaps(clips, s.tracks, newId, newClip.startTime, newClip.trackId);
- return { clips, duration: calculateDuration(clips) };
+ return { clips, durationFrames: calculateDurationFrames(clips, get().settings.fps) };
});
}
@@ -807,7 +818,7 @@ export const useVideoEditorStore = create()(
tracks,
clips,
selectedClipIds: state.selectedClipIds.filter((id) => clips.some((c) => c.id === id)),
- duration: calculateDuration(clips),
+ durationFrames: calculateDurationFrames(clips, get().settings.fps),
};
}),
@@ -888,7 +899,7 @@ export const useVideoEditorStore = create()(
clips = resolveOverlaps(clips, state.tracks, id, newClip.startTime, trackId);
return {
clips,
- duration: calculateDuration(clips),
+ durationFrames: calculateDurationFrames(clips, get().settings.fps),
};
});
@@ -906,7 +917,7 @@ export const useVideoEditorStore = create()(
clips,
crossTransitions,
selectedClipIds: state.selectedClipIds.filter((id) => clips.some((c) => c.id === id)),
- duration: calculateDuration(clips),
+ durationFrames: calculateDurationFrames(clips, get().settings.fps),
};
}),
@@ -928,7 +939,7 @@ export const useVideoEditorStore = create()(
return {
clips,
crossTransitions: validateCrossTransitions(clips, state.crossTransitions),
- duration: calculateDuration(clips),
+ durationFrames: calculateDurationFrames(clips, get().settings.fps),
};
}),
@@ -991,7 +1002,7 @@ export const useVideoEditorStore = create()(
return {
clips,
crossTransitions: validateCrossTransitions(clips, state.crossTransitions),
- duration: calculateDuration(clips),
+ durationFrames: calculateDurationFrames(clips, get().settings.fps),
};
}
@@ -1010,7 +1021,7 @@ export const useVideoEditorStore = create()(
return {
clips,
crossTransitions: validateCrossTransitions(clips, state.crossTransitions),
- duration: calculateDuration(clips),
+ durationFrames: calculateDurationFrames(clips, get().settings.fps),
};
}
@@ -1030,7 +1041,7 @@ export const useVideoEditorStore = create()(
return {
clips,
crossTransitions: validateCrossTransitions(clips, state.crossTransitions),
- duration: calculateDuration(clips),
+ durationFrames: calculateDurationFrames(clips, get().settings.fps),
};
}),
@@ -1101,7 +1112,7 @@ export const useVideoEditorStore = create()(
return {
clips,
crossTransitions: validateCrossTransitions(clips, state.crossTransitions),
- duration: calculateDuration(clips),
+ durationFrames: calculateDurationFrames(clips, get().settings.fps),
};
}),
@@ -1123,7 +1134,7 @@ export const useVideoEditorStore = create()(
return {
clips,
crossTransitions: validateCrossTransitions(clips, state.crossTransitions),
- duration: calculateDuration(clips),
+ durationFrames: calculateDurationFrames(clips, get().settings.fps),
};
}),
@@ -1146,7 +1157,7 @@ export const useVideoEditorStore = create()(
return {
clips,
crossTransitions: validateCrossTransitions(clips, state.crossTransitions),
- duration: calculateDuration(clips),
+ durationFrames: calculateDurationFrames(clips, get().settings.fps),
};
}),
@@ -1193,7 +1204,7 @@ export const useVideoEditorStore = create()(
return {
clips,
crossTransitions: validateCrossTransitions(clips, state.crossTransitions),
- duration: calculateDuration(clips),
+ durationFrames: calculateDurationFrames(clips, get().settings.fps),
};
}),
@@ -1221,7 +1232,7 @@ export const useVideoEditorStore = create()(
return {
clips: sortClipsByStartTime(result.updatedClips),
- duration: calculateDuration(result.updatedClips),
+ durationFrames: calculateDurationFrames(result.updatedClips, get().settings.fps),
};
}),
diff --git a/apps/ui/src/workers/frame-renderer.worker.ts b/apps/ui/src/workers/frame-renderer.worker.ts
index ff127d2..f45eb72 100644
--- a/apps/ui/src/workers/frame-renderer.worker.ts
+++ b/apps/ui/src/workers/frame-renderer.worker.ts
@@ -30,7 +30,7 @@ export interface FrameRendererConfig {
export interface RenderFrameTask {
frameIndex: number;
- timelineTime: number;
+ timelineFrame: number;
frame: RenderFrame;
/** Asset IDs that need texture upload with their source timestamps */
textureRequests: Array<{
diff --git a/packages/render-engine/globals.d.ts b/packages/render-engine/globals.d.ts
index 8a3f2a1..b8c82d9 100644
--- a/packages/render-engine/globals.d.ts
+++ b/packages/render-engine/globals.d.ts
@@ -1,7 +1,24 @@
import type { GPU } from "@webgpu/types";
+import type { RenderFrame } from "./src/types";
+import type { SnapshotOptions } from "./src/testing/snapshot-tester";
declare global {
interface Navigator {
readonly gpu: GPU;
}
}
+
+declare module "vitest" {
+ interface Assertion {
+ toMatchRenderSnapshot(
+ frame: RenderFrame,
+ snapshotName: string,
+ options?: SnapshotOptions,
+ ): Promise;
+ toMatchImageData(
+ frame: RenderFrame,
+ expected: ImageData,
+ options?: SnapshotOptions,
+ ): Promise;
+ }
+}
diff --git a/packages/render-engine/src/clip-operations.ts b/packages/render-engine/src/clip-operations.ts
index c37ad31..6a59562 100644
--- a/packages/render-engine/src/clip-operations.ts
+++ b/packages/render-engine/src/clip-operations.ts
@@ -15,9 +15,11 @@ import type { CrossTransitionType, Easing } from "./types.js";
/**
* Full clip data for editing operations.
* Extends ClipBounds with all editable properties.
+ * All time values are in integer frame counts relative to the project frame rate.
*/
export interface EditableClip extends ClipBounds {
trackId: string;
+ /** In-point within the source media in frames */
inPoint: number;
linkedClipId?: string;
}
diff --git a/packages/render-engine/src/frame-builder.ts b/packages/render-engine/src/frame-builder.ts
index be0c9ab..330d58d 100644
--- a/packages/render-engine/src/frame-builder.ts
+++ b/packages/render-engine/src/frame-builder.ts
@@ -25,23 +25,28 @@ import { KeyframeEvaluator } from "./keyframe-evaluator.js";
/**
* Minimal clip interface for visibility checks.
+ * All time values are in integer frame counts relative to the project frame rate.
*/
export interface ClipBounds {
id: string;
+ /** Start position on the timeline in frames */
startTime: number;
+ /** Duration on the timeline in frames */
duration: number;
}
/**
* Cross transition reference linking two clips.
+ * All time values are in integer frame counts relative to the project frame rate.
*/
export interface CrossTransitionRef {
id: string;
outgoingClipId: string;
incomingClipId: string;
+ /** Transition duration in frames */
duration: number;
type: CrossTransitionType;
- /** Original cut point on the timeline. The transition region is [boundary - duration/2, boundary + duration/2]. */
+ /** Original cut point on the timeline in frames. The transition region is [boundary - duration/2, boundary + duration/2]. */
boundary: number;
easing: Easing;
}
diff --git a/packages/render-engine/src/testing/vitest-integration.ts b/packages/render-engine/src/testing/vitest-integration.ts
index 3f72ec7..c119021 100644
--- a/packages/render-engine/src/testing/vitest-integration.ts
+++ b/packages/render-engine/src/testing/vitest-integration.ts
@@ -257,21 +257,12 @@ export function setupRenderEngineMatchers() {
const expected = await ctx.storage.load(snapshotName);
if (!expected) {
- if (ctx.updateSnapshots) {
- // Save the new snapshot
- await ctx.storage.save(snapshotName, actual);
- return {
- pass: true,
- message: () => `Snapshot "${snapshotName}" created`,
- };
- } else {
- return {
- pass: false,
- message: () =>
- `Snapshot "${snapshotName}" not found at ${ctx.storage.getPath(snapshotName)}. ` +
- `Run with --update-snapshots to create it.`,
- };
- }
+ // Auto-create snapshot on first run (no reference exists yet)
+ await ctx.storage.save(snapshotName, actual);
+ return {
+ pass: true,
+ message: () => `Snapshot "${snapshotName}" created (first run)`,
+ };
}
// Compare
diff --git a/packages/render-engine/src/types.ts b/packages/render-engine/src/types.ts
index d194bd3..1b0f83e 100644
--- a/packages/render-engine/src/types.ts
+++ b/packages/render-engine/src/types.ts
@@ -394,3 +394,54 @@ export const ANIMATABLE_PROPERTIES = {
} as const;
export type AnimatableProperty = keyof typeof ANIMATABLE_PROPERTIES;
+
+// ============================================================================
+// Frame Rate
+// ============================================================================
+
+/**
+ * Rational frame rate representation.
+ *
+ * Uses numerator/denominator to exactly represent rates like 29.97fps (30000/1001)
+ * without floating-point error.
+ */
+export interface FrameRate {
+ numerator: number;
+ denominator: number;
+}
+
+/**
+ * Standard frame rate presets.
+ */
+export const FRAME_RATE_PRESETS = {
+ "23.976": { numerator: 24000, denominator: 1001 },
+ "24": { numerator: 24, denominator: 1 },
+ "25": { numerator: 25, denominator: 1 },
+ "29.97": { numerator: 30000, denominator: 1001 },
+ "30": { numerator: 30, denominator: 1 },
+ "50": { numerator: 50, denominator: 1 },
+ "59.94": { numerator: 60000, denominator: 1001 },
+ "60": { numerator: 60, denominator: 1 },
+} as const satisfies Record;
+
+/**
+ * Convert a frame count to seconds using a rational frame rate.
+ */
+export function framesToSeconds(frames: number, fps: FrameRate): number {
+ return (frames * fps.denominator) / fps.numerator;
+}
+
+/**
+ * Convert seconds to a frame count using a rational frame rate.
+ * Rounds to the nearest frame.
+ */
+export function secondsToFrames(seconds: number, fps: FrameRate): number {
+ return Math.round((seconds * fps.numerator) / fps.denominator);
+}
+
+/**
+ * Get the frame rate as a floating-point number (e.g., 29.97).
+ */
+export function frameRateToFloat(fps: FrameRate): number {
+ return fps.numerator / fps.denominator;
+}
diff --git a/packages/render-engine/src/video-frame-loader.ts b/packages/render-engine/src/video-frame-loader.ts
index 91b7c2d..1794d46 100644
--- a/packages/render-engine/src/video-frame-loader.ts
+++ b/packages/render-engine/src/video-frame-loader.ts
@@ -96,6 +96,8 @@ class HTMLVideoElementAdapter implements VideoFrameSourceAdapter {
private objectUrl: string | null = null;
private seekPromise: Promise | null = null;
private seekResolve: (() => void) | null = null;
+ /** Mutex to serialize getImageBitmap calls (prevents seek race conditions) */
+ private frameLock: Promise = Promise.resolve();
private constructor(video: HTMLVideoElement, info: VideoAssetInfo, objectUrl: string | null) {
this.video = video;
@@ -180,25 +182,39 @@ class HTMLVideoElementAdapter implements VideoFrameSourceAdapter {
throw new Error("VideoFrameLoader has been disposed");
}
- const clampedTime = Math.max(0, Math.min(timestamp, this._info.duration));
+ // Serialize access to the video element to prevent seek race conditions.
+ let resolve!: () => void;
+ const nextLock = new Promise((r) => {
+ resolve = r;
+ });
+ const prevLock = this.frameLock;
+ this.frameLock = nextLock;
- // Seek if needed
- if (Math.abs(this.video.currentTime - clampedTime) > 0.01) {
- await this.seekTo(clampedTime);
- }
+ await prevLock;
- // Wait for video to have data
- if (this.video.readyState < HTMLMediaElement.HAVE_CURRENT_DATA) {
- await new Promise((resolve) => {
- const onCanPlay = () => {
- this.video.removeEventListener("canplay", onCanPlay);
- resolve();
- };
- this.video.addEventListener("canplay", onCanPlay);
- });
- }
+ try {
+ const clampedTime = Math.max(0, Math.min(timestamp, this._info.duration));
- return createImageBitmap(this.video);
+ // Seek if needed
+ if (Math.abs(this.video.currentTime - clampedTime) > 0.01) {
+ await this.seekTo(clampedTime);
+ }
+
+ // Wait for video to have data
+ if (this.video.readyState < HTMLMediaElement.HAVE_CURRENT_DATA) {
+ await new Promise((r) => {
+ const onCanPlay = () => {
+ this.video.removeEventListener("canplay", onCanPlay);
+ r();
+ };
+ this.video.addEventListener("canplay", onCanPlay);
+ });
+ }
+
+ return await createImageBitmap(this.video);
+ } finally {
+ resolve();
+ }
}
private async seekTo(time: number): Promise {
diff --git a/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--complex-layering-renders-all-layer-types-with-interleaved-z-ordering-1.png b/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--complex-layering-renders-all-layer-types-with-interleaved-z-ordering-1.png
index 45dadde..ab1bcf9 100644
Binary files a/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--complex-layering-renders-all-layer-types-with-interleaved-z-ordering-1.png and b/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--complex-layering-renders-all-layer-types-with-interleaved-z-ordering-1.png differ
diff --git a/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--presentation-slides-renders-a-data-visualization-slide-with-annotations-1.png b/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--presentation-slides-renders-a-data-visualization-slide-with-annotations-1.png
index e18d393..c02e634 100644
Binary files a/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--presentation-slides-renders-a-data-visualization-slide-with-annotations-1.png and b/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--presentation-slides-renders-a-data-visualization-slide-with-annotations-1.png differ
diff --git a/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--social-media-post-designs-renders-a-YouTube-thumbnail-layout-1.png b/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--social-media-post-designs-renders-a-YouTube-thumbnail-layout-1.png
index 1d9e26a..c74455d 100644
Binary files a/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--social-media-post-designs-renders-a-YouTube-thumbnail-layout-1.png and b/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--social-media-post-designs-renders-a-YouTube-thumbnail-layout-1.png differ
diff --git a/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--social-media-post-designs-renders-an-Instagram-story-layout-1.png b/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--social-media-post-designs-renders-an-Instagram-story-layout-1.png
index 26977f2..ef90028 100644
Binary files a/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--social-media-post-designs-renders-an-Instagram-story-layout-1.png and b/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--social-media-post-designs-renders-an-Instagram-story-layout-1.png differ
diff --git a/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--tutorial-content-renders-a-software-tutorial-with-step-annotations-1.png b/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--tutorial-content-renders-a-software-tutorial-with-step-annotations-1.png
index 98bb054..2ae8032 100644
Binary files a/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--tutorial-content-renders-a-software-tutorial-with-step-annotations-1.png and b/packages/render-engine/tests/__screenshots__/complex-compositions.test.ts/complex-compositions--1920x1080--tutorial-content-renders-a-software-tutorial-with-step-annotations-1.png differ
diff --git a/packages/render-engine/tests/__screenshots__/compositor.test.ts/compositor-basic-rendering-renders-a-solid-red-layer-filling-the-canvas-2.png b/packages/render-engine/tests/__screenshots__/compositor.test.ts/compositor-basic-rendering-renders-a-solid-red-layer-filling-the-canvas-2.png
new file mode 100644
index 0000000..7e8b985
Binary files /dev/null and b/packages/render-engine/tests/__screenshots__/compositor.test.ts/compositor-basic-rendering-renders-a-solid-red-layer-filling-the-canvas-2.png differ
diff --git a/packages/render-engine/tests/complex-compositions.test.ts b/packages/render-engine/tests/complex-compositions.test.ts
index 430a6f6..1530cc2 100644
--- a/packages/render-engine/tests/complex-compositions.test.ts
+++ b/packages/render-engine/tests/complex-compositions.test.ts
@@ -46,7 +46,6 @@ describe("complex compositions (1920x1080)", () => {
});
afterEach(async () => {
- // Capture screenshot after each test for visual verification
await tester.captureScreenshot();
});
diff --git a/packages/render-engine/tests/compositor.test.ts b/packages/render-engine/tests/compositor.test.ts
index 753c0fa..b864bfe 100644
--- a/packages/render-engine/tests/compositor.test.ts
+++ b/packages/render-engine/tests/compositor.test.ts
@@ -36,7 +36,6 @@ describe("compositor", () => {
});
afterEach(async () => {
- // Capture screenshot after each test for visual verification
await tester.captureScreenshot();
});
@@ -56,6 +55,9 @@ describe("compositor", () => {
pixels.expectPixelAtPercent(50, 50).redGreaterThan(200);
pixels.expectPixelAtPercent(50, 50).greenLessThan(50);
pixels.expectPixelAtPercent(50, 50).blueLessThan(50);
+
+ // Visual snapshot (creates reference on first run, compares on subsequent)
+ await expect(tester).toMatchRenderSnapshot(frame, "basic-solid-red");
});
it("renders a solid green layer", async () => {
diff --git a/packages/render-engine/tests/frame-rate.test.ts b/packages/render-engine/tests/frame-rate.test.ts
new file mode 100644
index 0000000..bdd4da3
--- /dev/null
+++ b/packages/render-engine/tests/frame-rate.test.ts
@@ -0,0 +1,140 @@
+import { describe, it, expect } from "vitest";
+import {
+ framesToSeconds,
+ secondsToFrames,
+ frameRateToFloat,
+ FRAME_RATE_PRESETS,
+ type FrameRate,
+} from "../src/types";
+
+describe("FrameRate", () => {
+ describe("FRAME_RATE_PRESETS", () => {
+ it("has correct rational representations", () => {
+ expect(FRAME_RATE_PRESETS["24"]).toEqual({ numerator: 24, denominator: 1 });
+ expect(FRAME_RATE_PRESETS["30"]).toEqual({ numerator: 30, denominator: 1 });
+ expect(FRAME_RATE_PRESETS["29.97"]).toEqual({ numerator: 30000, denominator: 1001 });
+ expect(FRAME_RATE_PRESETS["23.976"]).toEqual({ numerator: 24000, denominator: 1001 });
+ expect(FRAME_RATE_PRESETS["59.94"]).toEqual({ numerator: 60000, denominator: 1001 });
+ });
+ });
+
+ describe("frameRateToFloat", () => {
+ it("converts integer rates exactly", () => {
+ expect(frameRateToFloat(FRAME_RATE_PRESETS["24"])).toBe(24);
+ expect(frameRateToFloat(FRAME_RATE_PRESETS["30"])).toBe(30);
+ expect(frameRateToFloat(FRAME_RATE_PRESETS["60"])).toBe(60);
+ });
+
+ it("converts drop-frame rates approximately", () => {
+ expect(frameRateToFloat(FRAME_RATE_PRESETS["29.97"])).toBeCloseTo(29.97, 2);
+ expect(frameRateToFloat(FRAME_RATE_PRESETS["23.976"])).toBeCloseTo(23.976, 2);
+ expect(frameRateToFloat(FRAME_RATE_PRESETS["59.94"])).toBeCloseTo(59.94, 2);
+ });
+ });
+
+ describe("framesToSeconds", () => {
+ it("converts frame 0 to 0 seconds", () => {
+ expect(framesToSeconds(0, FRAME_RATE_PRESETS["30"])).toBe(0);
+ expect(framesToSeconds(0, FRAME_RATE_PRESETS["29.97"])).toBe(0);
+ });
+
+ it("converts integer frame rates exactly", () => {
+ const fps30: FrameRate = FRAME_RATE_PRESETS["30"];
+ expect(framesToSeconds(30, fps30)).toBe(1);
+ expect(framesToSeconds(1, fps30)).toBeCloseTo(1 / 30, 10);
+ expect(framesToSeconds(150, fps30)).toBe(5);
+
+ const fps24: FrameRate = FRAME_RATE_PRESETS["24"];
+ expect(framesToSeconds(24, fps24)).toBe(1);
+ expect(framesToSeconds(48, fps24)).toBe(2);
+
+ const fps60: FrameRate = FRAME_RATE_PRESETS["60"];
+ expect(framesToSeconds(60, fps60)).toBe(1);
+ expect(framesToSeconds(600, fps60)).toBe(10);
+ });
+
+ it("converts 29.97fps correctly", () => {
+ const fps = FRAME_RATE_PRESETS["29.97"];
+ // 30000 frames at 30000/1001 fps = 1001 seconds
+ expect(framesToSeconds(30000, fps)).toBe(1001);
+ // 1 frame at 29.97fps
+ expect(framesToSeconds(1, fps)).toBeCloseTo(1001 / 30000, 10);
+ });
+
+ it("converts 23.976fps correctly", () => {
+ const fps = FRAME_RATE_PRESETS["23.976"];
+ // 24000 frames at 24000/1001 fps = 1001 seconds
+ expect(framesToSeconds(24000, fps)).toBe(1001);
+ });
+
+ it("handles large frame counts", () => {
+ const fps = FRAME_RATE_PRESETS["30"];
+ // 1 hour = 108000 frames at 30fps
+ expect(framesToSeconds(108000, fps)).toBe(3600);
+ });
+ });
+
+ describe("secondsToFrames", () => {
+ it("converts 0 seconds to frame 0", () => {
+ expect(secondsToFrames(0, FRAME_RATE_PRESETS["30"])).toBe(0);
+ expect(secondsToFrames(0, FRAME_RATE_PRESETS["29.97"])).toBe(0);
+ });
+
+ it("converts integer frame rates exactly", () => {
+ const fps30 = FRAME_RATE_PRESETS["30"];
+ expect(secondsToFrames(1, fps30)).toBe(30);
+ expect(secondsToFrames(5, fps30)).toBe(150);
+ expect(secondsToFrames(0.1, fps30)).toBe(3);
+
+ const fps60 = FRAME_RATE_PRESETS["60"];
+ expect(secondsToFrames(1, fps60)).toBe(60);
+ expect(secondsToFrames(10, fps60)).toBe(600);
+ });
+
+ it("rounds to nearest frame", () => {
+ const fps30 = FRAME_RATE_PRESETS["30"];
+ // 0.5 / 30 = 0.0167 seconds is between frames 0 and 1
+ // 0.02 seconds * 30 = 0.6 → rounds to 1
+ expect(secondsToFrames(0.02, fps30)).toBe(1);
+ // 0.01 seconds * 30 = 0.3 → rounds to 0
+ expect(secondsToFrames(0.01, fps30)).toBe(0);
+ });
+
+ it("converts 29.97fps correctly", () => {
+ const fps = FRAME_RATE_PRESETS["29.97"];
+ expect(secondsToFrames(1, fps)).toBe(30); // 1 * 30000/1001 = 29.97 → rounds to 30
+ expect(secondsToFrames(1001, fps)).toBe(30000); // exact
+ });
+
+ it("handles large durations", () => {
+ const fps = FRAME_RATE_PRESETS["30"];
+ expect(secondsToFrames(3600, fps)).toBe(108000); // 1 hour
+ });
+ });
+
+ describe("round-trip accuracy", () => {
+ const allPresets = Object.values(FRAME_RATE_PRESETS);
+
+ it("frames → seconds → frames is identity for all presets", () => {
+ for (const fps of allPresets) {
+ for (const frame of [0, 1, 10, 100, 1000, 30000, 108000]) {
+ const seconds = framesToSeconds(frame, fps);
+ const roundTrip = secondsToFrames(seconds, fps);
+ expect(roundTrip).toBe(frame);
+ }
+ }
+ });
+
+ it("seconds → frames → seconds is accurate within half-frame for all presets", () => {
+ for (const fps of allPresets) {
+ const halfFrame = framesToSeconds(1, fps) / 2;
+ for (const seconds of [0, 0.5, 1, 5, 10, 60, 300, 3600]) {
+ const frames = secondsToFrames(seconds, fps);
+ const roundTrip = framesToSeconds(frames, fps);
+ // Allow a tiny epsilon for floating-point math on top of the half-frame tolerance
+ expect(Math.abs(roundTrip - seconds)).toBeLessThanOrEqual(halfFrame + 1e-10);
+ }
+ }
+ });
+ });
+});
diff --git a/packages/render-engine/tests/video-frame-loader.test.ts b/packages/render-engine/tests/video-frame-loader.test.ts
index f6c8a8c..c5deb70 100644
--- a/packages/render-engine/tests/video-frame-loader.test.ts
+++ b/packages/render-engine/tests/video-frame-loader.test.ts
@@ -11,6 +11,7 @@
import { beforeAll, describe, expect, it } from "vitest";
import { VideoFrameLoader, VideoFrameLoaderManager } from "../src/video-frame-loader.js";
+import { Compositor } from "../src/compositor.js";
describe("VideoFrameLoader", () => {
let testVideoBlob: Blob;
@@ -58,7 +59,9 @@ describe("VideoFrameLoader", () => {
});
it("extracts a frame from URL-loaded video", async () => {
- const loader = await VideoFrameLoader.fromUrl(LOCAL_VIDEO_URL, { mode: "export" });
+ const loader = await VideoFrameLoader.fromUrl(LOCAL_VIDEO_URL, {
+ mode: "export",
+ });
const frame = await loader.getFrame(1);
@@ -71,7 +74,9 @@ describe("VideoFrameLoader", () => {
});
it("gets RGBA data from URL-loaded video", async () => {
- const loader = await VideoFrameLoader.fromUrl(LOCAL_VIDEO_URL, { mode: "export" });
+ const loader = await VideoFrameLoader.fromUrl(LOCAL_VIDEO_URL, {
+ mode: "export",
+ });
const rgbaData = await loader.getRgbaData(0.5);
@@ -87,7 +92,9 @@ describe("VideoFrameLoader", () => {
});
it("gets VideoFrame from URL-loaded video for GPU upload", async () => {
- const loader = await VideoFrameLoader.fromUrl(LOCAL_VIDEO_URL, { mode: "export" });
+ const loader = await VideoFrameLoader.fromUrl(LOCAL_VIDEO_URL, {
+ mode: "export",
+ });
const videoFrame = await loader.getVideoFrame(0.5);
@@ -105,7 +112,9 @@ describe("VideoFrameLoader", () => {
const SAMPLE_VIDEO_URL = "/tests/fixtures/videos/sample-480p.mp4";
it("loads a longer video and gets metadata", async () => {
- const loader = await VideoFrameLoader.fromUrl(SAMPLE_VIDEO_URL, { mode: "export" });
+ const loader = await VideoFrameLoader.fromUrl(SAMPLE_VIDEO_URL, {
+ mode: "export",
+ });
expect(loader.info.width).toBe(480);
expect(loader.info.height).toBe(270);
@@ -116,7 +125,9 @@ describe("VideoFrameLoader", () => {
});
it("extracts frame from middle of video", async () => {
- const loader = await VideoFrameLoader.fromUrl(SAMPLE_VIDEO_URL, { mode: "export" });
+ const loader = await VideoFrameLoader.fromUrl(SAMPLE_VIDEO_URL, {
+ mode: "export",
+ });
// Get a frame from the middle of the video
const frame = await loader.getFrame(15);
@@ -131,7 +142,9 @@ describe("VideoFrameLoader", () => {
});
it("extracts frame near end of video", async () => {
- const loader = await VideoFrameLoader.fromUrl(SAMPLE_VIDEO_URL, { mode: "export" });
+ const loader = await VideoFrameLoader.fromUrl(SAMPLE_VIDEO_URL, {
+ mode: "export",
+ });
const frame = await loader.getFrame(28);
@@ -143,7 +156,9 @@ describe("VideoFrameLoader", () => {
});
it("gets RGBA data from longer video", async () => {
- const loader = await VideoFrameLoader.fromUrl(SAMPLE_VIDEO_URL, { mode: "export" });
+ const loader = await VideoFrameLoader.fromUrl(SAMPLE_VIDEO_URL, {
+ mode: "export",
+ });
const rgbaData = await loader.getRgbaData(10);
@@ -159,7 +174,9 @@ describe("VideoFrameLoader", () => {
});
it("iterates over frames in a range", async () => {
- const loader = await VideoFrameLoader.fromUrl(SAMPLE_VIDEO_URL, { mode: "export" });
+ const loader = await VideoFrameLoader.fromUrl(SAMPLE_VIDEO_URL, {
+ mode: "export",
+ });
const frames: number[] = [];
// Get frames from 10s to 11s
@@ -181,7 +198,9 @@ describe("VideoFrameLoader", () => {
});
it("measures seek performance across video", async () => {
- const loader = await VideoFrameLoader.fromUrl(SAMPLE_VIDEO_URL, { mode: "export" });
+ const loader = await VideoFrameLoader.fromUrl(SAMPLE_VIDEO_URL, {
+ mode: "export",
+ });
const times: number[] = [];
// Random seeks across the video (worst case for decoder)
@@ -209,7 +228,9 @@ describe("VideoFrameLoader", () => {
});
it("measures sequential playback performance", async () => {
- const loader = await VideoFrameLoader.fromUrl(SAMPLE_VIDEO_URL, { mode: "export" });
+ const loader = await VideoFrameLoader.fromUrl(SAMPLE_VIDEO_URL, {
+ mode: "export",
+ });
const times: number[] = [];
// Simulate 30fps playback for 1 second starting at 15s
@@ -242,7 +263,9 @@ describe("VideoFrameLoader", () => {
describe("frame extraction", () => {
it("gets a frame at timestamp 0", async () => {
- const loader = await VideoFrameLoader.fromBlob(testVideoBlob, { mode: "export" });
+ const loader = await VideoFrameLoader.fromBlob(testVideoBlob, {
+ mode: "export",
+ });
const frame = await loader.getFrame(0);
@@ -255,7 +278,9 @@ describe("VideoFrameLoader", () => {
});
it("gets a frame at middle of video", async () => {
- const loader = await VideoFrameLoader.fromBlob(testVideoBlob, { mode: "export" });
+ const loader = await VideoFrameLoader.fromBlob(testVideoBlob, {
+ mode: "export",
+ });
const frame = await loader.getFrame(1.0);
@@ -268,7 +293,9 @@ describe("VideoFrameLoader", () => {
});
it("clamps timestamp to valid range", async () => {
- const loader = await VideoFrameLoader.fromBlob(testVideoBlob, { mode: "export" });
+ const loader = await VideoFrameLoader.fromBlob(testVideoBlob, {
+ mode: "export",
+ });
// Request beyond duration
const frame = await loader.getFrame(10.0);
@@ -282,7 +309,9 @@ describe("VideoFrameLoader", () => {
});
it("clamps negative timestamp to 0", async () => {
- const loader = await VideoFrameLoader.fromBlob(testVideoBlob, { mode: "export" });
+ const loader = await VideoFrameLoader.fromBlob(testVideoBlob, {
+ mode: "export",
+ });
const frame = await loader.getFrame(-5.0);
@@ -296,7 +325,9 @@ describe("VideoFrameLoader", () => {
describe("VideoFrame extraction", () => {
it("gets a WebCodecs VideoFrame", async () => {
- const loader = await VideoFrameLoader.fromBlob(testVideoBlob, { mode: "export" });
+ const loader = await VideoFrameLoader.fromBlob(testVideoBlob, {
+ mode: "export",
+ });
const videoFrame = await loader.getVideoFrame(0.5);
@@ -311,7 +342,9 @@ describe("VideoFrameLoader", () => {
describe("RGBA data extraction", () => {
it("gets raw RGBA pixel data", async () => {
- const loader = await VideoFrameLoader.fromBlob(testVideoBlob, { mode: "export" });
+ const loader = await VideoFrameLoader.fromBlob(testVideoBlob, {
+ mode: "export",
+ });
const result = await loader.getRgbaData(0);
@@ -337,7 +370,9 @@ describe("VideoFrameLoader", () => {
describe("frame iteration", () => {
it("iterates over frames in a range", async () => {
- const loader = await VideoFrameLoader.fromBlob(testVideoBlob, { mode: "export" });
+ const loader = await VideoFrameLoader.fromBlob(testVideoBlob, {
+ mode: "export",
+ });
const frames: number[] = [];
for await (const frame of loader.frames(0, 1.0)) {
@@ -358,7 +393,9 @@ describe("VideoFrameLoader", () => {
describe("disposal", () => {
it("throws after disposal", async () => {
- const loader = await VideoFrameLoader.fromBlob(testVideoBlob, { mode: "export" });
+ const loader = await VideoFrameLoader.fromBlob(testVideoBlob, {
+ mode: "export",
+ });
loader.dispose();
expect(loader.disposed).toBe(true);
@@ -368,7 +405,9 @@ describe("VideoFrameLoader", () => {
describe("performance", () => {
it("measures sequential frame access time", async () => {
- const loader = await VideoFrameLoader.fromBlob(testVideoBlob, { mode: "export" });
+ const loader = await VideoFrameLoader.fromBlob(testVideoBlob, {
+ mode: "export",
+ });
const times: number[] = [];
// Warm up
@@ -400,7 +439,9 @@ describe("VideoFrameLoader", () => {
});
it("measures random seek time", async () => {
- const loader = await VideoFrameLoader.fromBlob(testVideoBlob, { mode: "export" });
+ const loader = await VideoFrameLoader.fromBlob(testVideoBlob, {
+ mode: "export",
+ });
const times: number[] = [];
// Random seeks (worst case for decoder)
@@ -494,11 +535,6 @@ describe("VideoFrameLoader + Compositor integration", () => {
});
it("renders video frame through compositor", async () => {
- // This test demonstrates the full pipeline:
- // VideoFrameLoader → RGBA data → Compositor → Canvas
-
- const { Compositor } = await import("../src/compositor.js");
-
// Create compositor
const canvas = document.createElement("canvas");
canvas.width = 320;
@@ -506,7 +542,9 @@ describe("VideoFrameLoader + Compositor integration", () => {
const compositor = await Compositor.fromCanvas(canvas);
// Load video frame
- const loader = await VideoFrameLoader.fromBlob(testVideoBlob, { mode: "export" });
+ const loader = await VideoFrameLoader.fromBlob(testVideoBlob, {
+ mode: "export",
+ });
const rgbaData = await loader.getRgbaData(0.5);
// Upload to compositor
@@ -563,7 +601,9 @@ describe("VideoFrameLoader + Compositor integration", () => {
// This test demonstrates using WebCodecs VideoFrame
// which can be uploaded via uploadBitmap for zero-copy transfer
- const loader = await VideoFrameLoader.fromBlob(testVideoBlob, { mode: "export" });
+ const loader = await VideoFrameLoader.fromBlob(testVideoBlob, {
+ mode: "export",
+ });
// Get WebCodecs VideoFrame
const videoFrame = await loader.getVideoFrame(1.0);
diff --git a/packages/render-engine/tests/visual-layers.test.ts b/packages/render-engine/tests/visual-layers.test.ts
index fca1710..867b99b 100644
--- a/packages/render-engine/tests/visual-layers.test.ts
+++ b/packages/render-engine/tests/visual-layers.test.ts
@@ -24,6 +24,7 @@ import {
lineLayer,
} from "../src/testing/snapshot-tester.js";
import { generateSceneTexture } from "../src/testing/test-renderer.js";
+import { VideoFrameLoader } from "../src/video-frame-loader.js";
describe("visual layers", () => {
let tester: SnapshotTester;
@@ -41,7 +42,6 @@ describe("visual layers", () => {
});
afterEach(async () => {
- // Capture screenshot after each test for visual verification
await tester.captureScreenshot();
});
@@ -344,14 +344,6 @@ describe("visual layers", () => {
// ============================================================================
describe("video layers", () => {
- // Import VideoFrameLoader dynamically to avoid issues if mediabunny isn't available
- let VideoFrameLoader: typeof import("../src/video-frame-loader.js").VideoFrameLoader;
-
- beforeAll(async () => {
- const module = await import("../src/video-frame-loader.js");
- VideoFrameLoader = module.VideoFrameLoader;
- });
-
/**
* Helper to load a video frame and add it as a texture.
* Uses MediaBunny's VideoFrameLoader for frame-accurate video decoding.