From 12314adb9ad652bdba2fddad3c4ae2a708f3eedf Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Fri, 10 Jan 2025 07:45:29 -0500 Subject: [PATCH 01/23] Add typescript --- package.json | 6 ++++-- tsconfig.json | 3 +++ 2 files changed, 7 insertions(+), 2 deletions(-) create mode 100644 tsconfig.json diff --git a/package.json b/package.json index 05bc100b..f4a04299 100644 --- a/package.json +++ b/package.json @@ -33,6 +33,7 @@ "p-map": "^7.0.2" }, "scripts": { + "prepare": "tsc -b", "test": "node test.js", "lint": "eslint ." }, @@ -44,10 +45,11 @@ "editly": "cli.js" }, "devDependencies": { - "@types/fabric": "^5.2.4", + "@tsconfig/node-lts": "^22.0.1", "eslint": "^8.22.0", "eslint-config-airbnb-base": "^15.0.0", "eslint-plugin-import": "^2.29.1", - "eslint-plugin-import-exports-imports-resolver": "^1.0.1" + "eslint-plugin-import-exports-imports-resolver": "^1.0.1", + "typescript": "^5.7.3" } } diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 00000000..eae32087 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,3 @@ +{ + "extends": "@tsconfig/node-lts/tsconfig.json" +} From 0a696cb7ce181556ea3a8faff0177aa3e106f942 Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Fri, 10 Jan 2025 07:49:13 -0500 Subject: [PATCH 02/23] Convert util.{js,ts} --- package.json | 3 + types.ts | 20 +++++++ util.js => util.ts | 143 ++++++++++++++++++++++++--------------------- 3 files changed, 99 insertions(+), 67 deletions(-) create mode 100644 types.ts rename util.js => util.ts (50%) diff --git a/package.json b/package.json index f4a04299..524a893b 100644 --- a/package.json +++ b/package.json @@ -46,10 +46,13 @@ }, "devDependencies": { "@tsconfig/node-lts": "^22.0.1", + "@types/fs-extra": "^11.0.4", + "@types/lodash-es": "^4.17.12", "eslint": "^8.22.0", "eslint-config-airbnb-base": "^15.0.0", "eslint-plugin-import": "^2.29.1", "eslint-plugin-import-exports-imports-resolver": "^1.0.1", + "ts-node": "^10.9.2", "typescript": "^5.7.3" } } diff --git a/types.ts b/types.ts new file mode 100644 index 00000000..cd15170f --- /dev/null +++ b/types.ts @@ -0,0 +1,20 @@ +// Types used internally and not exposed through any external interfaces. +// TODO[ts]: Move these elsewhere + +export type Stream = { + codec_type: string; + r_frame_rate: string; + width?: number; + height?: number; + tags?: { + rotate: string; + }; + side_data_list?: { + rotation: string; + }[]; +}; + +export type Keyframe = { + t: number; + props: Record; +}; diff --git a/util.js b/util.ts similarity index 50% rename from util.js rename to util.ts index ea72fb06..3420c9bc 100644 --- a/util.js +++ b/util.ts @@ -1,9 +1,12 @@ import { execa } from 'execa'; import assert from 'assert'; -import sortBy from 'lodash-es/sortBy.js'; -import fsExtra from 'fs-extra'; +import { sortBy } from 'lodash-es'; +import { pathExists } from 'fs-extra'; -export function parseFps(fps) { +import type { Keyframe, Stream } from './types.js'; +import type { Position, PositionObject, Transition } from './index.js'; + +export function parseFps(fps?: string) { const match = typeof fps === 'string' && fps.match(/^([0-9]+)\/([0-9]+)$/); if (match) { const num = parseInt(match[1], 10); @@ -13,31 +16,33 @@ export function parseFps(fps) { return undefined; } -export async function readDuration(ffprobePath, p) { +export async function readDuration(ffprobePath: string, p: string) { const { stdout } = await execa(ffprobePath, ['-v', 'error', '-show_entries', 'format=duration', '-of', 'default=noprint_wrappers=1:nokey=1', p]); const parsed = parseFloat(stdout); assert(!Number.isNaN(parsed)); return parsed; } -export async function readFileStreams(ffprobePath, p) { +export async function readFileStreams(ffprobePath: string, p: string) { const { stdout } = await execa(ffprobePath, [ '-show_entries', 'stream', '-of', 'json', p, ]); - const json = JSON.parse(stdout); - return json.streams; + return JSON.parse(stdout).streams as Stream[]; } -export async function readVideoFileInfo(ffprobePath, p) { + +export async function readVideoFileInfo(ffprobePath: string, p: string) { const streams = await readFileStreams(ffprobePath, p); const stream = streams.find((s) => s.codec_type === 'video'); // TODO + if (!stream) return; // TODO[ts]: what's the right thing to do here? + const duration = await readDuration(ffprobePath, p); - let rotation = parseInt(stream.tags && stream.tags.rotate, 10); + let rotation = parseInt(stream.tags?.rotate ?? '', 10); // If we can't find rotation, try side_data_list - if (Number.isNaN(rotation) && Array.isArray(stream.side_data_list) && stream.side_data_list[0] && stream.side_data_list[0].rotation) { + if (Number.isNaN(rotation) && stream.side_data_list?.[0]?.rotation) { rotation = parseInt(stream.side_data_list[0].rotation, 10); } @@ -51,13 +56,13 @@ export async function readVideoFileInfo(ffprobePath, p) { }; } -export async function readAudioFileInfo(ffprobePath, p) { +export async function readAudioFileInfo(ffprobePath: string, p: string) { const duration = await readDuration(ffprobePath, p); return { duration }; } -export function toArrayInteger(buffer) { +export function toArrayInteger(buffer: Buffer) { if (buffer.length > 0) { const data = new Uint8ClampedArray(buffer.length); for (let i = 0; i < buffer.length; i += 1) { @@ -69,69 +74,71 @@ export function toArrayInteger(buffer) { } // x264 requires multiple of 2 -export const multipleOf2 = (x) => Math.round(x / 2) * 2; +export const multipleOf2 = (x: number) => Math.round(x / 2) * 2; -export function getPositionProps({ position, width, height }) { +export function getPositionProps({ position, width, height }: { position: Position | PositionObject, width: number, height: number }) { let originY = 'center'; let originX = 'center'; let top = height / 2; let left = width / 2; - const margin = 0.05; - if (position === 'top') { - originY = 'top'; - top = height * margin; - } else if (position === 'bottom') { - originY = 'bottom'; - top = height * (1 - margin); - } else if (position === 'center') { - originY = 'center'; - top = height / 2; - } else if (position === 'top-left') { - originX = 'left'; - originY = 'top'; - left = width * margin; - top = height * margin; - } else if (position === 'top-right') { - originX = 'right'; - originY = 'top'; - left = width * (1 - margin); - top = height * margin; - } else if (position === 'center-left') { - originX = 'left'; - originY = 'center'; - left = width * margin; - top = height / 2; - } else if (position === 'center-right') { - originX = 'right'; - originY = 'center'; - left = width * (1 - margin); - top = height / 2; - } else if (position === 'bottom-left') { - originX = 'left'; - originY = 'bottom'; - left = width * margin; - top = height * (1 - margin); - } else if (position === 'bottom-right') { - originX = 'right'; - originY = 'bottom'; - left = width * (1 - margin); - top = height * (1 - margin); - } - if (position && position.x != null) { - originX = position.originX || 'left'; - left = width * position.x; - } - if (position && position.y != null) { - originY = position.originY || 'top'; - top = height * position.y; + if (typeof position === 'string') { + if (position === 'top') { + originY = 'top'; + top = height * margin; + } else if (position === 'bottom') { + originY = 'bottom'; + top = height * (1 - margin); + } else if (position === 'center') { + originY = 'center'; + top = height / 2; + } else if (position === 'top-left') { + originX = 'left'; + originY = 'top'; + left = width * margin; + top = height * margin; + } else if (position === 'top-right') { + originX = 'right'; + originY = 'top'; + left = width * (1 - margin); + top = height * margin; + } else if (position === 'center-left') { + originX = 'left'; + originY = 'center'; + left = width * margin; + top = height / 2; + } else if (position === 'center-right') { + originX = 'right'; + originY = 'center'; + left = width * (1 - margin); + top = height / 2; + } else if (position === 'bottom-left') { + originX = 'left'; + originY = 'bottom'; + left = width * margin; + top = height * (1 - margin); + } else if (position === 'bottom-right') { + originX = 'right'; + originY = 'bottom'; + left = width * (1 - margin); + top = height * (1 - margin); + } + } else { + if (position?.x != null) { + originX = position.originX || 'left'; + left = width * position.x; + } + if (position?.y != null) { + originY = position.originY || 'top'; + top = height * position.y; + } } return { originX, originY, top, left }; } -export function getFrameByKeyFrames(keyframes, progress) { +export function getFrameByKeyFrames(keyframes: Keyframe[], progress: number) { if (keyframes.length < 2) throw new Error('Keyframes must be at least 2'); const sortedKeyframes = sortBy(keyframes, 't'); @@ -158,15 +165,17 @@ export function getFrameByKeyFrames(keyframes, progress) { return Object.fromEntries(Object.entries(prevKeyframe.props).map(([propName, prevVal]) => ([propName, prevVal + ((nextKeyframe.props[propName] - prevVal) * interProgress)]))); } -export const isUrl = (path) => /^https?:\/\//.test(path); +export const isUrl = (path: string) => /^https?:\/\//.test(path); -export const assertFileValid = async (path, allowRemoteRequests) => { +export const assertFileValid = async (path: string, allowRemoteRequests: boolean) => { if (isUrl(path)) { assert(allowRemoteRequests, 'Remote requests are not allowed'); return; } - assert(await fsExtra.pathExists(path), `File does not exist ${path}`); + assert(await pathExists(path), `File does not exist ${path}`); }; // See #16 -export const checkTransition = (transition) => assert(transition == null || typeof transition === 'object', 'Transition must be an object'); +export function checkTransition(transition?: Transition) { + assert(transition == null || typeof transition === 'object', 'Transition must be an object'); +} From ba16aa25def5cb5658ec59146951d202ac93f990 Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Fri, 10 Jan 2025 09:24:48 -0500 Subject: [PATCH 03/23] Convert transition.{js,ts} --- parseConfig.js | 2 +- transitions.js | 62 -------------------------------------------------- transitions.ts | 57 ++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 58 insertions(+), 63 deletions(-) delete mode 100644 transitions.js create mode 100644 transitions.ts diff --git a/parseConfig.js b/parseConfig.js index cbdf5811..146bee1f 100644 --- a/parseConfig.js +++ b/parseConfig.js @@ -128,7 +128,7 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar const userClipDurationOrDefault = userClipDuration || defaults.duration; if (videoLayers.length === 0) assert(userClipDurationOrDefault, `Duration parameter is required for videoless clip ${clipIndex}`); - const transition = calcTransition(defaults, userTransition, clipIndex === clips.length - 1); + const transition = calcTransition(defaults.transition, userTransition, clipIndex === clips.length - 1); let layersOut = flatMap(await pMap(layers, async (layerIn) => { const globalLayerDefaults = defaults.layer || {}; diff --git a/transitions.js b/transitions.js deleted file mode 100644 index 6c8a64cb..00000000 --- a/transitions.js +++ /dev/null @@ -1,62 +0,0 @@ -import assert from 'assert'; - -const randomTransitionsSet = ['fade', 'fadegrayscale', 'directionalwarp', 'crosswarp', 'dreamyzoom', 'burn', 'crosszoom', 'simplezoom', 'linearblur', 'directional-left', 'directional-right', 'directional-up', 'directional-down']; - -function getRandomTransition() { - return randomTransitionsSet[Math.floor(Math.random() * randomTransitionsSet.length)]; -} - -// https://easings.net/ - -export function easeOutExpo(x) { - return x === 1 ? 1 : 1 - (2 ** (-10 * x)); -} - -export function easeInOutCubic(x) { - return x < 0.5 ? 4 * x * x * x : 1 - ((-2 * x + 2) ** 3) / 2; -} - -function getTransitionEasingFunction(easing, transitionName) { - if (easing !== null) { - if (easing) return { easeOutExpo }[easing]; - if (transitionName === 'directional') return easeOutExpo; - } - return (progress) => progress; -} - -export function calcTransition(defaults, transition, isLastClip) { - if (transition === null || isLastClip) return { duration: 0 }; - - const getTransitionDefault = (key) => (defaults.transition ? defaults.transition[key] : undefined); - - let transitionOrDefault = { - name: (transition && transition.name) || getTransitionDefault('name'), - duration: (transition && transition.duration != null) ? transition.duration : getTransitionDefault('duration'), - params: (transition && transition.params) || getTransitionDefault('params'), - easing: (transition && transition.easing !== undefined) ? transition.easing : getTransitionDefault('easing'), - audioOutCurve: (transition && transition.audioOutCurve) || getTransitionDefault('audioOutCurve'), - audioInCurve: (transition && transition.audioInCurve) || getTransitionDefault('audioInCurve'), - }; - - assert(!transitionOrDefault.duration || transitionOrDefault.name, 'Please specify transition name or set duration to 0'); - - if (transitionOrDefault.name === 'random' && transitionOrDefault.duration) { - transitionOrDefault = { ...transitionOrDefault, name: getRandomTransition() }; - } - - const aliasedTransition = { - 'directional-left': { name: 'directional', params: { direction: [1, 0] } }, - 'directional-right': { name: 'directional', params: { direction: [-1, 0] } }, - 'directional-down': { name: 'directional', params: { direction: [0, 1] } }, - 'directional-up': { name: 'directional', params: { direction: [0, -1] } }, - }[transitionOrDefault.name]; - if (aliasedTransition) { - transitionOrDefault = { ...transitionOrDefault, ...aliasedTransition }; - } - - return { - ...transitionOrDefault, - duration: transitionOrDefault.duration || 0, - easingFunction: getTransitionEasingFunction(transitionOrDefault.easing, transitionOrDefault.name), - }; -} diff --git a/transitions.ts b/transitions.ts new file mode 100644 index 00000000..8e6a94c8 --- /dev/null +++ b/transitions.ts @@ -0,0 +1,57 @@ +import assert from 'assert'; +import type { Transition } from './index.js'; + +const randomTransitionsSet = ['fade', 'fadegrayscale', 'directionalwarp', 'crosswarp', 'dreamyzoom', 'burn', 'crosszoom', 'simplezoom', 'linearblur', 'directional-left', 'directional-right', 'directional-up', 'directional-down']; + +function getRandomTransition() { + return randomTransitionsSet[Math.floor(Math.random() * randomTransitionsSet.length)]; +} + +// https://easings.net/ + +export function easeOutExpo(x: number) { + return x === 1 ? 1 : 1 - (2 ** (-10 * x)); +} + +export function easeInOutCubic(x: number) { + return x < 0.5 ? 4 * x * x * x : 1 - ((-2 * x + 2) ** 3) / 2; +} + +function getTransitionEasingFunction(easing: string | null | undefined, transitionName?: string) { + if (easing !== null) { + // FIXME[TS]: `easing` always appears to be null or undefined, so this never gets called + if (easing) return { easeOutExpo }[easing]; + if (transitionName === 'directional') return easeOutExpo; + } + return (progress: number) => progress; +} + +const TransitionAliases: Record> = { + 'directional-left': { name: 'directional', params: { direction: [1, 0] } }, + 'directional-right': { name: 'directional', params: { direction: [-1, 0] } }, + 'directional-down': { name: 'directional', params: { direction: [0, 1] } }, + 'directional-up': { name: 'directional', params: { direction: [0, -1] } }, +} + +export function calcTransition(defaults: Transition | undefined, transition: Transition, isLastClip: boolean) { + if (transition === null || isLastClip) return { duration: 0 }; + + let transitionOrDefault: Transition = { ...defaults, ...transition } + + assert(!transitionOrDefault.duration || transitionOrDefault.name, 'Please specify transition name or set duration to 0'); + + if (transitionOrDefault.name === 'random' && transitionOrDefault.duration) { + transitionOrDefault = { ...transitionOrDefault, name: getRandomTransition() }; + } + + const aliasedTransition = transitionOrDefault.name ? TransitionAliases[transitionOrDefault.name] : undefined; + if (aliasedTransition) { + transitionOrDefault = { ...transitionOrDefault, ...aliasedTransition }; + } + + return { + ...transitionOrDefault, + duration: transitionOrDefault.duration || 0, + easingFunction: getTransitionEasingFunction(transitionOrDefault.easing, transitionOrDefault.name), + }; +} From 72f576ce5743fd76e88ce76fb672fa5c852fec9b Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Fri, 10 Jan 2025 09:33:29 -0500 Subject: [PATCH 04/23] Convert test.{js,ts} --- test.js => test.ts | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename test.js => test.ts (100%) diff --git a/test.js b/test.ts similarity index 100% rename from test.js rename to test.ts From b230e890955e2576f50481adb858faf9d41c5997 Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Fri, 10 Jan 2025 09:43:17 -0500 Subject: [PATCH 05/23] Convert ffmpeg.{js,ts} --- ffmpeg.js | 31 ------------------------------- ffmpeg.ts | 37 +++++++++++++++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 31 deletions(-) delete mode 100644 ffmpeg.js create mode 100644 ffmpeg.ts diff --git a/ffmpeg.js b/ffmpeg.js deleted file mode 100644 index 351675f7..00000000 --- a/ffmpeg.js +++ /dev/null @@ -1,31 +0,0 @@ -import fsExtra from 'fs-extra'; -import { execa } from 'execa'; -import assert from 'assert'; -import { compareVersions } from 'compare-versions'; - -export const getFfmpegCommonArgs = ({ enableFfmpegLog }) => (enableFfmpegLog ? [] : ['-hide_banner', '-loglevel', 'error']); - -export const getCutFromArgs = ({ cutFrom }) => (cutFrom ? ['-ss', cutFrom] : []); - -export const getCutToArgs = ({ cutTo, cutFrom, speedFactor }) => (cutTo ? ['-t', (cutTo - cutFrom) * speedFactor] : []); - -export async function createConcatFile(segments, concatFilePath) { - // https://superuser.com/questions/787064/filename-quoting-in-ffmpeg-concat - await fsExtra.writeFile(concatFilePath, segments.map((seg) => `file '${seg.replace(/'/g, "'\\''")}'`).join('\n')); -} - -export async function testFf(exePath, name) { - const minRequiredVersion = '4.3.1'; - - try { - const { stdout } = await execa(exePath, ['-version']); - const firstLine = stdout.split('\n')[0]; - const match = firstLine.match(`${name} version ([0-9.]+)`); - assert(match, 'Unknown version string'); - const versionStr = match[1]; - console.log(`${name} version ${versionStr}`); - assert(compareVersions(versionStr, minRequiredVersion, '>='), 'Version is outdated'); - } catch (err) { - console.error(`WARNING: ${name}:`, err.message); - } -} diff --git a/ffmpeg.ts b/ffmpeg.ts new file mode 100644 index 00000000..db557e06 --- /dev/null +++ b/ffmpeg.ts @@ -0,0 +1,37 @@ +import fsExtra from 'fs-extra'; +import { execa } from 'execa'; +import assert from 'assert'; +import { compareVersions } from 'compare-versions'; + +export function getFfmpegCommonArgs({ enableFfmpegLog }: { enableFfmpegLog?: boolean }) { + return enableFfmpegLog ? [] : ['-hide_banner', '-loglevel', 'error']; +} + +export function getCutFromArgs({ cutFrom }: { cutFrom?: number }) { + return cutFrom ? ['-ss', cutFrom] : []; +} + +export function getCutToArgs({ cutTo, cutFrom, speedFactor }: { cutTo?: number; cutFrom?: number; speedFactor: number }) { + return cutFrom && cutTo ? ['-t', (cutTo - cutFrom) * speedFactor] : []; +} + +export async function createConcatFile(segments: string[], concatFilePath: string) { + // https://superuser.com/questions/787064/filename-quoting-in-ffmpeg-concat + await fsExtra.writeFile(concatFilePath, segments.map((seg) => `file '${seg.replace(/'/g, "'\\''")}'`).join('\n')); +} + +export async function testFf(exePath: string, name: string) { + const minRequiredVersion = '4.3.1'; + + try { + const { stdout } = await execa(exePath, ['-version']); + const firstLine = stdout.split('\n')[0]; + const match = firstLine.match(`${name} version ([0-9.]+)`); + assert(match, 'Unknown version string'); + const versionStr = match[1]; + console.log(`${name} version ${versionStr}`); + assert(compareVersions(versionStr, minRequiredVersion), 'Version is outdated'); + } catch (err) { + console.error(`WARNING: ${name}:`, err); + } +} From 001b0621b220ee61b37052a5cd0c17a98e25abe8 Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Fri, 10 Jan 2025 09:54:09 -0500 Subject: [PATCH 06/23] Convert sources/glFrameSource.{js,ts} --- package.json | 2 ++ .../{glFrameSource.js => glFrameSource.ts} | 35 ++++++++++++++----- 2 files changed, 29 insertions(+), 8 deletions(-) rename sources/{glFrameSource.js => glFrameSource.ts} (67%) diff --git a/package.json b/package.json index 524a893b..436e8d6a 100644 --- a/package.json +++ b/package.json @@ -47,6 +47,8 @@ "devDependencies": { "@tsconfig/node-lts": "^22.0.1", "@types/fs-extra": "^11.0.4", + "@types/gl": "^6.0.5", + "@types/gl-shader": "^4.2.5", "@types/lodash-es": "^4.17.12", "eslint": "^8.22.0", "eslint-config-airbnb-base": "^15.0.0", diff --git a/sources/glFrameSource.js b/sources/glFrameSource.ts similarity index 67% rename from sources/glFrameSource.js rename to sources/glFrameSource.ts index 48e81847..fc3d074a 100644 --- a/sources/glFrameSource.js +++ b/sources/glFrameSource.ts @@ -1,10 +1,23 @@ import GL from 'gl'; import createShader from 'gl-shader'; -import fsExtra from 'fs-extra'; +import { readFile } from 'node:fs/promises'; // I have no idea what I'm doing but it works ¯\_(ツ)_/¯ -export default async function createGlFrameSource({ width, height, channels, params }) { +export type CreateGlFrameSourceOptions = { + width: number; + height: number; + channels: number; + params: { + vertexPath?: string; + fragmentPath?: string; + vertexSrc?: string; + fragmentSrc?: string; + speed?: number; + }; +} + +export default async function createGlFrameSource({ width, height, channels, params }: CreateGlFrameSourceOptions) { const gl = GL(width, height); const defaultVertexSrc = ` @@ -13,24 +26,30 @@ export default async function createGlFrameSource({ width, height, channels, par gl_Position = vec4(position, 0.0, 1.0 ); } `; - const { vertexPath, fragmentPath, vertexSrc: vertexSrcIn, fragmentSrc: fragmentSrcIn, speed = 1 } = params; + const { + vertexPath, + fragmentPath, + vertexSrc: vertexSrcIn, + fragmentSrc: fragmentSrcIn, + speed = 1 + } = params; let fragmentSrc = fragmentSrcIn; let vertexSrc = vertexSrcIn; - if (fragmentPath) fragmentSrc = await fsExtra.readFile(fragmentPath); - if (vertexPath) vertexSrc = await fsExtra.readFile(vertexPath); + if (fragmentPath) fragmentSrc = (await readFile(fragmentPath)).toString(); + if (vertexPath) vertexSrc = (await readFile(vertexPath)).toString(); if (!vertexSrc) vertexSrc = defaultVertexSrc; - const shader = createShader(gl, vertexSrc, fragmentSrc); + const shader = createShader(gl, vertexSrc, fragmentSrc ?? ''); const buffer = gl.createBuffer(); gl.bindBuffer(gl.ARRAY_BUFFER, buffer); // https://blog.mayflower.de/4584-Playing-around-with-pixel-shaders-in-WebGL.html gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1, -1, 1, -1, 1, 1, -1, 1]), gl.STATIC_DRAW); - async function readNextFrame(progress) { + async function readNextFrame(progress: number) { shader.bind(); shader.attributes.position.pointer(); @@ -56,6 +75,6 @@ export default async function createGlFrameSource({ width, height, channels, par return { readNextFrame, - close: () => {}, + close: () => { }, }; } From 1bf2a0c5f3e383156ddad6299e14f9b568454b79 Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Fri, 10 Jan 2025 10:36:00 -0500 Subject: [PATCH 07/23] Convert colors.{js,ts} --- colors.js => colors.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename colors.js => colors.ts (98%) diff --git a/colors.js b/colors.ts similarity index 98% rename from colors.js rename to colors.ts index 81e1b8f4..5c625a52 100644 --- a/colors.js +++ b/colors.ts @@ -178,7 +178,7 @@ export function getRandomColor(colors = allColors) { return { remainingColors, color: colors[index] || allColors[0] }; } -export function getRandomColors(num) { +export function getRandomColors(num: number) { let colors = allColors; const out = []; for (let i = 0; i < Math.min(num, allColors.length); i += 1) { From d940f5d9d02c12017b10ff1b7f9dcbcee0e01cd2 Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Mon, 13 Jan 2025 17:09:42 -0500 Subject: [PATCH 08/23] Convert audio.{js,ts} --- audio.js => audio.ts | 62 ++++++++++++++++++++++++++++---------------- ffmpeg.ts | 2 +- 2 files changed, 41 insertions(+), 23 deletions(-) rename audio.js => audio.ts (78%) diff --git a/audio.js b/audio.ts similarity index 78% rename from audio.js rename to audio.ts index b99057eb..f04d375d 100644 --- a/audio.js +++ b/audio.ts @@ -1,17 +1,33 @@ import pMap from 'p-map'; import { join, basename, resolve } from 'path'; import { execa } from 'execa'; -import flatMap from 'lodash-es/flatMap.js'; +import { flatMap } from 'lodash-es'; import { getFfmpegCommonArgs, getCutFromArgs } from './ffmpeg.js'; import { readFileStreams } from './util.js'; -export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) => { - async function createMixedAudioClips({ clips, keepSourceAudio }) { +import type { AudioLayer, AudioNormalizationOptions, AudioTrack, Clip, Config, Layer, Transition, VideoLayer } from './index.js' + +export type AudioOptions = { + ffmpegPath: string; + ffprobePath: string; + enableFfmpegLog: boolean; + verbose: boolean; + tmpDir: string; +} + +export type EditAudioOptions = Required> & { + arbitraryAudio: AudioTrack[] +}; + +type LayerWithAudio = (AudioLayer | VideoLayer) & { speedFactor: number }; + +export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: AudioOptions) => { + async function createMixedAudioClips({ clips, keepSourceAudio }: { clips: Clip[], keepSourceAudio: boolean }) { return pMap(clips, async (clip, i) => { const { duration, layers, transition } = clip; - async function runInner() { + async function runInner(): Promise<{ clipAudioPath: string, silent: boolean }> { const clipAudioPath = join(tmpDir, `clip${i}-audio.flac`); async function createSilence() { @@ -20,7 +36,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = '-f', 'lavfi', '-i', 'anullsrc=channel_layout=stereo:sample_rate=44100', '-sample_fmt', 's32', '-ar', '48000', - '-t', duration, + '-t', duration!.toString(), '-c:a', 'flac', '-y', clipAudioPath, @@ -33,10 +49,11 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = // Has user enabled keep source audio? if (!keepSourceAudio) return createSilence(); - const audioLayers = layers.filter(({ type, start, stop }) => ( + // TODO:[ts]: Layers is always an array once config is parsed. Fix this in types + const audioLayers = (layers as Layer[]).filter(({ type, start, stop }) => ( ['audio', 'video'].includes(type) // TODO: We don't support audio for start/stop layers - && !start && stop == null)); + && !start && stop == null)) as LayerWithAudio[]; if (audioLayers.length === 0) return createSilence(); @@ -60,13 +77,13 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = atempoFilter = `atempo=${atempo}`; } - const cutToArg = (cutTo - cutFrom) * speedFactor; + const cutToArg = (cutTo! - cutFrom!) * speedFactor; const args = [ ...getFfmpegCommonArgs({ enableFfmpegLog }), ...getCutFromArgs({ cutFrom }), '-i', path, - '-t', cutToArg, + '-t', cutToArg!.toString(), '-sample_fmt', 's32', '-ar', '48000', '-map', 'a:0', '-c:a', 'flac', @@ -78,10 +95,10 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = // console.log(args); await execa(ffmpegPath, args); - return { + return [ layerAudioPath, audioLayer, - }; + ]; } catch (err) { if (verbose) console.error('Cannot extract audio from video', path, err); // Fall back to silence @@ -89,17 +106,17 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = } }, { concurrency: 4 }); - const processedAudioLayers = processedAudioLayersRaw.filter((p) => p); + const processedAudioLayers = processedAudioLayersRaw.filter((r): r is [string, LayerWithAudio] => r !== undefined); if (processedAudioLayers.length < 1) return createSilence(); - if (processedAudioLayers.length === 1) return { clipAudioPath: processedAudioLayers[0].layerAudioPath }; + if (processedAudioLayers.length === 1) return { clipAudioPath: processedAudioLayers[0][0], silent: false }; // Merge/mix all layers' audio - const weights = processedAudioLayers.map(({ audioLayer }) => (audioLayer.mixVolume != null ? audioLayer.mixVolume : 1)); + const weights = processedAudioLayers.map(([_, { mixVolume }]) => mixVolume ?? 1); const args = [ ...getFfmpegCommonArgs({ enableFfmpegLog }), - ...flatMap(processedAudioLayers, ({ layerAudioPath }) => ['-i', layerAudioPath]), + ...flatMap(processedAudioLayers, ([layerAudioPath]) => ['-i', layerAudioPath]), '-filter_complex', `amix=inputs=${processedAudioLayers.length}:duration=longest:weights=${weights.join(' ')}`, '-c:a', 'flac', '-y', @@ -107,7 +124,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = ]; await execa(ffmpegPath, args); - return { clipAudioPath }; + return { clipAudioPath, silent: false }; } const { clipAudioPath, silent } = await runInner(); @@ -120,7 +137,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = }, { concurrency: 4 }); } - async function crossFadeConcatClipAudio(clipAudio) { + async function crossFadeConcatClipAudio(clipAudio: { path: string, transition?: Transition | null }[]) { if (clipAudio.length < 2) { return clipAudio[0].path; } @@ -134,7 +151,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = const outStream = `[concat${i}]`; const epsilon = 0.0001; // If duration is 0, ffmpeg seems to default to 1 sec instead, hence epsilon. - let ret = `${inStream}[${i + 1}:a]acrossfade=d=${Math.max(epsilon, transition.duration)}:c1=${transition.audioOutCurve || 'tri'}:c2=${transition.audioInCurve || 'tri'}`; + let ret = `${inStream}[${i + 1}:a]acrossfade=d=${Math.max(epsilon, transition?.duration ?? 0)}:c1=${transition?.audioOutCurve ?? 'tri'}:c2=${transition?.audioInCurve ?? 'tri'}`; inStream = outStream; @@ -156,7 +173,8 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = return outPath; } - async function mixArbitraryAudio({ streams, audioNorm, outputVolume }) { + // FIXME[ts]: parseConfig sets `loop` on arbitrary audio tracks. Should that be part of the `AudioTrack` interface? + async function mixArbitraryAudio({ streams, audioNorm, outputVolume }: { streams: (AudioTrack & { loop?: number })[], audioNorm: AudioNormalizationOptions, outputVolume: number | string }) { let maxGain = 30; let gaussSize = 5; if (audioNorm) { @@ -182,7 +200,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = const args = [ ...getFfmpegCommonArgs({ enableFfmpegLog }), ...(flatMap(streams, ({ path, loop }) => ([ - '-stream_loop', (loop || 0), + '-stream_loop', (loop || 0).toString(), '-i', path, ]))), '-vn', @@ -199,7 +217,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = return mixedAudioPath; } - async function editAudio({ keepSourceAudio, clips, arbitraryAudio, clipsAudioVolume, audioNorm, outputVolume }) { + async function editAudio({ keepSourceAudio, clips, arbitraryAudio, clipsAudioVolume, audioNorm, outputVolume }: EditAudioOptions) { // We need clips to process audio, because we need to know duration if (clips.length === 0) return undefined; @@ -217,7 +235,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = // Merge & fade the clip audio files const concatedClipAudioPath = await crossFadeConcatClipAudio(clipAudio); - const streams = [ + const streams: AudioTrack[] = [ // The first stream is required, as it determines the length of the output audio. // All other streams will be truncated to its length { path: concatedClipAudioPath, mixVolume: clipsAudioVolume }, diff --git a/ffmpeg.ts b/ffmpeg.ts index db557e06..323a218f 100644 --- a/ffmpeg.ts +++ b/ffmpeg.ts @@ -8,7 +8,7 @@ export function getFfmpegCommonArgs({ enableFfmpegLog }: { enableFfmpegLog?: boo } export function getCutFromArgs({ cutFrom }: { cutFrom?: number }) { - return cutFrom ? ['-ss', cutFrom] : []; + return cutFrom ? ['-ss', cutFrom.toString()] : []; } export function getCutToArgs({ cutTo, cutFrom, speedFactor }: { cutTo?: number; cutFrom?: number; speedFactor: number }) { From aab3c057c21ffd34789cb671eca9e8594dcef270 Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Tue, 14 Jan 2025 21:19:01 -0500 Subject: [PATCH 09/23] Convert parseConfig.{ts,js} --- index.d.ts | 125 ++++++++---------------- parseConfig.js => parseConfig.ts | 141 +++++++++++++++------------ sources/fabric.js | 2 - sources/fabric/fabricFrameSources.js | 8 +- transitions.ts | 2 +- util.ts | 8 +- 6 files changed, 132 insertions(+), 154 deletions(-) rename parseConfig.js => parseConfig.ts (66%) diff --git a/index.d.ts b/index.d.ts index 83527dd7..41cbc467 100644 --- a/index.d.ts +++ b/index.d.ts @@ -281,6 +281,32 @@ declare namespace Editly { } + interface TextLayer extends BaseLayer { + /** + * Subtitle text to show. + */ + text: string; + + /** + * Text color. + * Defaults to '#ffffff'. + */ + textColor?: string; + + /** + * Set font (`.ttf`). + * Defaults to system font. + */ + fontPath?: string; + + /** + * WARNING: Undocumented feature! + * The font family to use. Must already be registered using `fontPath`. + * If `fontPath` is also provided, this will be ignored. + */ + fontFamily?: string; + } + interface VideoPostProcessingFunctionArgs { canvas: Fabric.Canvas; image: Fabric.FabricImage; @@ -500,30 +526,13 @@ declare namespace Editly { } - interface TitleLayer extends BaseLayer, KenBurns { + interface TitleLayer extends TextLayer, KenBurns { /** * Layer type. */ type: 'title'; - /** - * Title text to show, keep it short. - */ - text: string; - - /** - * Text color. - * Defaults to '#ffffff'. - */ - textColor?: string; - - /** - * Set font (`.ttf`). - * Defaults to system font. - */ - fontPath?: string; - /** * Position. */ @@ -531,30 +540,13 @@ declare namespace Editly { } - interface SubtitleLayer extends BaseLayer { + interface SubtitleLayer extends TextLayer { /** * Layer type. */ type: 'subtitle'; - /** - * Subtitle text to show. - */ - text: string; - - /** - * Text color. - * Defaults to '#ffffff'. - */ - textColor?: string; - - /** - * Set font (`.ttf`). - * Defaults to system font. - */ - fontPath?: string; - /** * WARNING: Undocumented feature! */ @@ -565,30 +557,13 @@ declare namespace Editly { /** * Title with background. */ - interface TitleBackgroundLayer extends BaseLayer { + interface TitleBackgroundLayer extends TextLayer { /** * Layer type. */ type: 'title-background'; - /** - * Title text to show, keep it short. - */ - text: string; - - /** - * Text color. - * Defaults to '#ffffff'. - */ - textColor?: string; - - /** - * Set font (`.ttf`). - * Defaults to system font. - */ - fontPath?: string; - /** * Background layer. * Defaults to random background. @@ -597,30 +572,13 @@ declare namespace Editly { } - interface NewsTitleLayer extends BaseLayer { + interface NewsTitleLayer extends TextLayer { /** * Layer type. */ type: 'news-title'; - /** - * Title text to show, keep it short. - */ - text: string; - - /** - * Text color. - * Defaults to '#ffffff'. - */ - textColor?: string; - - /** - * Set font (`.ttf`). - * Defaults to system font. - */ - fontPath?: string; - /** * Background color. * Defaults to '#d02a42'. @@ -634,24 +592,13 @@ declare namespace Editly { } - interface SlideInTextLayer extends BaseLayer { + interface SlideInTextLayer extends TextLayer { /** * Layer type. */ type: 'slide-in-text'; - /** - * Title text to show, keep it short. - */ - text: string; - - /** - * Set font (`.ttf`). - * Defaults to system font. - */ - fontPath?: string; - /** * Font size. */ @@ -664,6 +611,7 @@ declare namespace Editly { /** * Color. + * @deprecated use `fontColor` instead. */ color?: string; @@ -715,7 +663,7 @@ declare namespace Editly { * Array of two colors. * Defaults to random colors. */ - colors?: [ string, string ]; + colors?: [string, string]; } @@ -730,7 +678,7 @@ declare namespace Editly { * Array of two colors. * Defaults to random colors. */ - colors?: [ string, string ]; + colors?: [string, string]; } @@ -831,6 +779,11 @@ declare namespace Editly { */ type: 'editly-banner'; + /** + * Set font (`.ttf`). + * Defaults to system font. + */ + fontPath?: string; } /** diff --git a/parseConfig.js b/parseConfig.ts similarity index 66% rename from parseConfig.js rename to parseConfig.ts index 146bee1f..148b16ce 100644 --- a/parseConfig.js +++ b/parseConfig.ts @@ -10,15 +10,29 @@ import { assertFileValid, checkTransition, } from './util.js'; -import { registerFont } from './sources/fabric.js'; +import { registerFont } from 'canvas'; import { calcTransition } from './transitions.js'; +import type { AudioTrack, CanvasLayer, Config, EditlyBannerLayer, FabricLayer, GlLayer, ImageLayer, ImageOverlayLayer, Layer, LinearGradientLayer, NewsTitleLayer, SlideInTextLayer, SubtitleLayer, TitleBackgroundLayer, TitleLayer, VideoLayer } from './index.js'; + +type LayerDuration = T & { + layerDuration: number; +} + +type ProcessedVideoLayer = LayerDuration & { + framerateStr: string; + inputWidth: number; + inputHeight: number; + speedFactor: number; +} + +type ProcessedLayer = LayerDuration> | ProcessedVideoLayer; const dirname = fileURLToPath(new URL('.', import.meta.url)); // Cache -const loadedFonts = []; +const loadedFonts: string[] = []; -async function validateArbitraryAudio(audio, allowRemoteRequests) { +async function validateArbitraryAudio(audio: AudioTrack[] | undefined, allowRemoteRequests?: boolean) { assert(audio === undefined || Array.isArray(audio)); if (audio) { @@ -34,7 +48,12 @@ async function validateArbitraryAudio(audio, allowRemoteRequests) { } } -export default async function parseConfig({ defaults: defaultsIn = {}, clips, arbitraryAudio: arbitraryAudioIn, backgroundAudioPath, backgroundAudioVolume, loopAudio, allowRemoteRequests, ffprobePath }) { +type ParseConfigOptions = Required> & { + arbitraryAudio: AudioTrack[]; + backgroundAudioPath?: string; +}; + +export default async function parseConfig({ defaults: defaultsIn = {}, clips, arbitraryAudio: arbitraryAudioIn, backgroundAudioPath, backgroundAudioVolume, loopAudio, allowRemoteRequests, ffprobePath }: ParseConfigOptions) { const defaults = { duration: 4, ...defaultsIn, @@ -47,57 +66,63 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar }, }; - async function handleLayer(layer) { - const { type, ...restLayer } = layer; - + async function handleLayer(layer: Layer): Promise { // https://github.com/mifi/editly/issues/39 - if (['image', 'image-overlay'].includes(type)) { - await assertFileValid(restLayer.path, allowRemoteRequests); - } else if (type === 'gl') { - await assertFileValid(restLayer.fragmentPath, allowRemoteRequests); + if (layer.type === 'image' || layer.type === 'image-overlay') { + await assertFileValid((layer as (ImageOverlayLayer | ImageLayer)).path, allowRemoteRequests); + } else if (layer.type === 'gl') { + await assertFileValid(layer.fragmentPath, allowRemoteRequests); } - if (['fabric', 'canvas'].includes(type)) assert(typeof layer.func === 'function', '"func" must be a function'); + if (['fabric', 'canvas'].includes(layer.type)) { + assert(typeof (layer as FabricLayer | CanvasLayer).func === 'function', '"func" must be a function'); + } - if (['image', 'image-overlay', 'fabric', 'canvas', 'gl', 'radial-gradient', 'linear-gradient', 'fill-color'].includes(type)) return layer; + if (['image', 'image-overlay', 'fabric', 'canvas', 'gl', 'radial-gradient', 'linear-gradient', 'fill-color'].includes(layer.type)) { + return layer; + } // TODO if random-background radial-gradient linear etc - if (type === 'pause') return handleLayer({ ...restLayer, type: 'fill-color' }); + if (layer.type === 'pause') { + return handleLayer({ ...layer, type: 'fill-color' }); + } - if (type === 'rainbow-colors') return handleLayer({ type: 'gl', fragmentPath: join(dirname, 'shaders/rainbow-colors.frag') }); + if (layer.type === 'rainbow-colors') { + return handleLayer({ type: 'gl', fragmentPath: join(dirname, 'shaders/rainbow-colors.frag') } as GlLayer); + } - if (type === 'editly-banner') { - const { fontPath } = layer; + if (layer.type === 'editly-banner') { + const { fontPath } = layer as EditlyBannerLayer; return [ - await handleLayer({ type: 'linear-gradient' }), - await handleLayer({ fontPath, type: 'title', text: 'Made with\nEDITLY\nmifi.no' }), - ]; + await handleLayer({ type: 'linear-gradient' } as LinearGradientLayer), + await handleLayer({ type: 'title', text: 'Made with\nEDITLY\nmifi.no', fontPath } as TitleLayer), + ].flat(); } // For convenience - if (type === 'title-background') { - const { text, textColor, background, fontFamily, fontPath } = layer; + if (layer.type === 'title-background') { + const { text, textColor, background, fontFamily, fontPath } = layer as TitleBackgroundLayer; const outLayers = []; if (background) { if (background.type === 'radial-gradient') outLayers.push(await handleLayer({ type: 'radial-gradient', colors: background.colors })); else if (background.type === 'linear-gradient') outLayers.push(await handleLayer({ type: 'linear-gradient', colors: background.colors })); else if (background.color) outLayers.push(await handleLayer({ type: 'fill-color', color: background.color })); } else { - const backgroundTypes = ['radial-gradient', 'linear-gradient', 'fill-color']; + const backgroundTypes: ('radial-gradient' | 'linear-gradient' | 'fill-color')[] = ['radial-gradient', 'linear-gradient', 'fill-color']; const randomType = backgroundTypes[Math.floor(Math.random() * backgroundTypes.length)]; outLayers.push(await handleLayer({ type: randomType })); } outLayers.push(await handleLayer({ type: 'title', fontFamily, fontPath, text, textColor })); - return outLayers; + return outLayers.flat(); } - if (['title', 'subtitle', 'news-title', 'slide-in-text'].includes(type)) { - assert(layer.text, 'Please specify a text'); + if (['title', 'subtitle', 'news-title', 'slide-in-text'].includes(layer.type)) { + const { fontPath, ...rest } = layer as TitleLayer | SubtitleLayer | NewsTitleLayer | SlideInTextLayer; + assert(rest.text, 'Please specify a text'); - let { fontFamily } = layer; - const { fontPath, ...rest } = layer; + let { fontFamily } = rest; if (fontPath) { - fontFamily = Buffer.from(basename(fontPath)).toString('base64'); + const fontFamily = Buffer.from(basename(fontPath)).toString('base64'); if (!loadedFonts.includes(fontFamily)) { registerFont(fontPath, { family: fontFamily, weight: 'regular', style: 'normal' }); loadedFonts.push(fontFamily); @@ -106,10 +131,10 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar return { ...rest, fontFamily }; } - throw new Error(`Invalid layer type ${type}`); + throw new Error(`Invalid layer type ${layer.type}`); } - const detachedAudioByClip = {}; + const detachedAudioByClip: Record = {}; let clipsOut = await pMap(clips, async (clip, clipIndex) => { assert(typeof clip === 'object', '"clips" must contain objects with one or more layers'); @@ -130,15 +155,14 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar const transition = calcTransition(defaults.transition, userTransition, clipIndex === clips.length - 1); - let layersOut = flatMap(await pMap(layers, async (layerIn) => { + let layersOut: Layer[] = flatMap(await pMap(layers, async (layerIn: T) => { const globalLayerDefaults = defaults.layer || {}; const thisLayerDefaults = (defaults.layerType || {})[layerIn.type]; - const layer = { ...globalLayerDefaults, ...thisLayerDefaults, ...layerIn }; - const { type, path } = layer; + const layer: T = { ...globalLayerDefaults, ...thisLayerDefaults, ...layerIn }; - if (type === 'video') { - const { duration: fileDuration, width: widthIn, height: heightIn, framerateStr, rotation } = await readVideoFileInfo(ffprobePath, path); + if (layer.type === 'video') { + const { duration: fileDuration, width: widthIn, height: heightIn, framerateStr, rotation } = await readVideoFileInfo(ffprobePath, layer.path); let { cutFrom, cutTo } = layer; if (!cutFrom) cutFrom = 0; cutFrom = Math.max(cutFrom, 0); @@ -149,41 +173,41 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar cutTo = Math.min(cutTo, fileDuration); assert(cutFrom < cutTo, 'cutFrom must be lower than cutTo'); - const inputDuration = cutTo - cutFrom; + const layerDuration = cutTo - cutFrom; - const isRotated = [-90, 90, 270, -270].includes(rotation); + const isRotated = rotation && [-90, 90, 270, -270].includes(rotation); const inputWidth = isRotated ? heightIn : widthIn; const inputHeight = isRotated ? widthIn : heightIn; - return { ...layer, cutFrom, cutTo, inputDuration, framerateStr, inputWidth, inputHeight }; + return { ...layer, cutFrom, cutTo, layerDuration, framerateStr, inputWidth, inputHeight } as ProcessedVideoLayer; } // Audio is handled later - if (['audio', 'detached-audio'].includes(type)) return layer; + if (['audio', 'detached-audio'].includes(layer.type)) return layer; return handleLayer(layer); }, { concurrency: 1 })); let clipDuration = userClipDurationOrDefault; - const firstVideoLayer = layersOut.find((layer) => layer.type === 'video'); - if (firstVideoLayer && !userClipDuration) clipDuration = firstVideoLayer.inputDuration; + const firstVideoLayer = layersOut.find((layer): layer is ProcessedVideoLayer => layer.type === 'video'); + if (firstVideoLayer && !userClipDuration) clipDuration = firstVideoLayer.layerDuration; assert(clipDuration); // We need to map again, because for audio, we need to know the correct clipDuration - layersOut = await pMap(layersOut, async (layerIn) => { - const { type, path, stop, start = 0 } = layerIn; + layersOut = (await pMap(layersOut, async (layerIn: T) => { + if (!layerIn.start) layerIn.start = 0 // This feature allows the user to show another layer overlayed (or replacing) parts of the lower layers (start - stop) - const layerDuration = ((stop || clipDuration) - start); - assert(layerDuration > 0 && layerDuration <= clipDuration, `Invalid start ${start} or stop ${stop} (${clipDuration})`); + const layerDuration = ((layerIn.stop || clipDuration) - layerIn.start); + assert(layerDuration > 0 && layerDuration <= clipDuration, `Invalid start ${layerIn.start} or stop ${layerIn.stop} (${clipDuration})`); // TODO Also need to handle video layers (speedFactor etc) // TODO handle audio in case of start/stop - const layer = { ...layerIn, start, layerDuration }; + const layer: LayerDuration = { ...layerIn, layerDuration }; - if (type === 'audio') { - const { duration: fileDuration } = await readAudioFileInfo(ffprobePath, path); + if (layer.type === 'audio') { + const { duration: fileDuration } = await readAudioFileInfo(ffprobePath, layer.path); let { cutFrom, cutTo } = layer; // console.log({ cutFrom, cutTo, fileDuration, clipDuration }); @@ -197,22 +221,20 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar cutTo = Math.min(cutTo, fileDuration); assert(cutFrom < cutTo, 'cutFrom must be lower than cutTo'); - const inputDuration = cutTo - cutFrom; + const layerDuration = cutTo - cutFrom; - const speedFactor = clipDuration / inputDuration; + const speedFactor = clipDuration / layerDuration; return { ...layer, cutFrom, cutTo, speedFactor }; } - if (type === 'video') { - const { inputDuration } = layer; - + if (layer.type === 'video') { let speedFactor; // If user explicitly specified duration for clip, it means that should be the output duration of the video if (userClipDuration) { // Later we will speed up or slow down video using this factor - speedFactor = userClipDuration / inputDuration; + speedFactor = userClipDuration / layerDuration; } else { speedFactor = 1; } @@ -222,15 +244,14 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar // These audio tracks are detached from the clips (can run over multiple clips) // This is useful so we can have audio start relative to their parent clip's start time - if (type === 'detached-audio') { - const { cutFrom, cutTo, mixVolume } = layer; + if (layer.type === 'detached-audio') { if (!detachedAudioByClip[clipIndex]) detachedAudioByClip[clipIndex] = []; - detachedAudioByClip[clipIndex].push({ path, cutFrom, cutTo, mixVolume, start }); + detachedAudioByClip[clipIndex].push(layer); return undefined; // Will be filtered out } return layer; - }); + })).filter((l) => l !== undefined); // Filter out deleted layers layersOut = layersOut.filter((l) => l); @@ -243,7 +264,7 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar }, { concurrency: 1 }); let totalClipDuration = 0; - const clipDetachedAudio = []; + const clipDetachedAudio: AudioTrack[] = []; // Need to map again because now we know all clip durations, and we can adjust transitions so they are safe clipsOut = await pMap(clipsOut, async (clip, i) => { diff --git a/sources/fabric.js b/sources/fabric.js index 9b19a00b..98ad32ee 100644 --- a/sources/fabric.js +++ b/sources/fabric.js @@ -2,8 +2,6 @@ import * as fabric from 'fabric/node'; import { createCanvas, ImageData } from 'canvas'; import { boxBlurImage } from '../BoxBlur.js'; -export { registerFont } from 'canvas'; - // Fabric is used as a fundament for compositing layers in editly export function canvasToRgba(ctx) { diff --git a/sources/fabric/fabricFrameSources.js b/sources/fabric/fabricFrameSources.js index cc70da9c..08fbdfda 100644 --- a/sources/fabric/fabricFrameSources.js +++ b/sources/fabric/fabricFrameSources.js @@ -379,14 +379,18 @@ async function getFadedObject({ object, progress }) { return fadedImage; } -export async function slideInTextFrameSource({ width, height, params: { position, text, fontSize = 0.05, charSpacing = 0.1, color = '#ffffff', fontFamily = defaultFontFamily } = {} }) { +export async function slideInTextFrameSource({ width, height, params: { position, text, fontSize = 0.05, charSpacing = 0.1, textColor = '#ffffff', color = undefined, fontFamily = defaultFontFamily } = {} }) { + if (color) { + console.warn('slide-in-text: color is deprecated, use textColor.'); + } + async function onRender(progress, canvas) { const fontSizeAbs = Math.round(width * fontSize); const { left, top, originX, originY } = getPositionProps({ position, width, height }); const textBox = new fabric.FabricText(text, { - fill: color, + fill: color ?? textColor, fontFamily, fontSize: fontSizeAbs, charSpacing: width * charSpacing, diff --git a/transitions.ts b/transitions.ts index 8e6a94c8..3f8b005c 100644 --- a/transitions.ts +++ b/transitions.ts @@ -33,7 +33,7 @@ const TransitionAliases: Record> = { 'directional-up': { name: 'directional', params: { direction: [0, -1] } }, } -export function calcTransition(defaults: Transition | undefined, transition: Transition, isLastClip: boolean) { +export function calcTransition(defaults: Transition | null | undefined, transition: Transition | null | undefined, isLastClip: boolean) { if (transition === null || isLastClip) return { duration: 0 }; let transitionOrDefault: Transition = { ...defaults, ...transition } diff --git a/util.ts b/util.ts index 3420c9bc..832a6d32 100644 --- a/util.ts +++ b/util.ts @@ -35,7 +35,9 @@ export async function readVideoFileInfo(ffprobePath: string, p: string) { const streams = await readFileStreams(ffprobePath, p); const stream = streams.find((s) => s.codec_type === 'video'); // TODO - if (!stream) return; // TODO[ts]: what's the right thing to do here? + if (!stream) { + throw new Error(`Could not find a video stream in ${p}`); + } const duration = await readDuration(ffprobePath, p); @@ -167,7 +169,7 @@ export function getFrameByKeyFrames(keyframes: Keyframe[], progress: number) { export const isUrl = (path: string) => /^https?:\/\//.test(path); -export const assertFileValid = async (path: string, allowRemoteRequests: boolean) => { +export const assertFileValid = async (path: string, allowRemoteRequests?: boolean) => { if (isUrl(path)) { assert(allowRemoteRequests, 'Remote requests are not allowed'); return; @@ -176,6 +178,6 @@ export const assertFileValid = async (path: string, allowRemoteRequests: boolean }; // See #16 -export function checkTransition(transition?: Transition) { +export function checkTransition(transition?: Transition | null) { assert(transition == null || typeof transition === 'object', 'Transition must be an object'); } From 16f5cba4a1a6e89aea511c508f0edee3fff3c58f Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Wed, 15 Jan 2025 10:11:30 -0500 Subject: [PATCH 10/23] Convert sources/fabric.{js,ts} --- BoxBlur.d.ts | 10 ++++++ index.d.ts | 23 ++++++++----- sources/{fabric.js => fabric.ts} | 57 ++++++++++++++++++-------------- sources/glFrameSource.ts | 17 ++-------- types.ts | 16 +++++++++ 5 files changed, 75 insertions(+), 48 deletions(-) create mode 100644 BoxBlur.d.ts rename sources/{fabric.js => fabric.ts} (65%) diff --git a/BoxBlur.d.ts b/BoxBlur.d.ts new file mode 100644 index 00000000..84ab2193 --- /dev/null +++ b/BoxBlur.d.ts @@ -0,0 +1,10 @@ +import type { CanvasRenderingContext2D } from "canvas"; + +declare function boxBlurImage( + context: CanvasRenderingContext2D, + width: number, + height: number, + radius: number, + blurAlphaChannel: boolean, + iterations: number +); diff --git a/index.d.ts b/index.d.ts index 41cbc467..1af76eb0 100644 --- a/index.d.ts +++ b/index.d.ts @@ -1,4 +1,5 @@ import type * as Fabric from 'fabric/node'; +import type { Canvas } from "canvas" /** * Edit and render videos. @@ -691,21 +692,23 @@ declare namespace Editly { } - type OnRenderCallback = (progress: number, canvas: Fabric.Canvas) => OptionalPromise; - type OnCloseCallback = () => OptionalPromise; - - interface CustomFunctionCallbacks { - onRender: OnRenderCallback; - onClose?: OnCloseCallback; + interface CustomFabricFunctionCallbacks { + onRender: (progress: number, canvas: Fabric.Canvas) => OptionalPromise; + onClose?: () => OptionalPromise; } interface CustomCanvasFunctionArgs { width: number; height: number; - canvas: Fabric.Canvas; + canvas: Canvas; + } + + interface CustomCanvasFunctionCallbacks { + onRender: (progress: number) => OptionalPromise; + onClose?: () => OptionalPromise; } - type CustomCanvasFunction = (args: CustomCanvasFunctionArgs) => OptionalPromise; + type CustomCanvasFunction = (args: CustomCanvasFunctionArgs) => OptionalPromise; interface CanvasLayer extends BaseLayer { @@ -729,7 +732,7 @@ declare namespace Editly { params: any; } - type CustomFabricFunction = (args: CustomFabricFunctionArgs) => OptionalPromise; + type CustomFabricFunction = (args: CustomFabricFunctionArgs) => OptionalPromise; interface FabricLayer extends BaseLayer { @@ -767,6 +770,8 @@ declare namespace Editly { */ speed?: number; + vertexSrc?: string; + fragmentSrc?: string; } /** diff --git a/sources/fabric.js b/sources/fabric.ts similarity index 65% rename from sources/fabric.js rename to sources/fabric.ts index 98ad32ee..88780f73 100644 --- a/sources/fabric.js +++ b/sources/fabric.ts @@ -1,20 +1,14 @@ import * as fabric from 'fabric/node'; -import { createCanvas, ImageData } from 'canvas'; +import { type CanvasRenderingContext2D, createCanvas, ImageData } from 'canvas'; import { boxBlurImage } from '../BoxBlur.js'; +import type { CreateFrameSourceOptions } from '../types.js'; +import type { CanvasLayer, CustomFabricFunctionCallbacks, Layer } from '../index.js'; -// Fabric is used as a fundament for compositing layers in editly - -export function canvasToRgba(ctx) { - // const bgra = canvas.toBuffer('raw'); +export type FabricFrameSourceCallback = (options: CreateFrameSourceOptions & { fabric: typeof fabric }) => CustomFabricFunctionCallbacks; - /* const rgba = Buffer.allocUnsafe(bgra.length); - for (let i = 0; i < bgra.length; i += 4) { - rgba[i + 0] = bgra[i + 2]; - rgba[i + 1] = bgra[i + 1]; - rgba[i + 2] = bgra[i + 0]; - rgba[i + 3] = bgra[i + 3]; - } */ +// Fabric is used as a fundament for compositing layers in editly +export function canvasToRgba(ctx: CanvasRenderingContext2D) { // We cannot use toBuffer('raw') because it returns pre-multiplied alpha data (a different format) // https://gamedev.stackexchange.com/questions/138813/whats-the-difference-between-alpha-and-premulalpha // https://github.com/Automattic/node-canvas#image-pixel-formats-experimental @@ -22,7 +16,7 @@ export function canvasToRgba(ctx) { return Buffer.from(imageData.data); } -export function fabricCanvasToRgba(fabricCanvas) { +export function fabricCanvasToRgba(fabricCanvas: fabric.Canvas) { const internalCanvas = fabricCanvas.getNodeCanvas(); const ctx = internalCanvas.getContext('2d'); @@ -32,11 +26,11 @@ export function fabricCanvasToRgba(fabricCanvas) { return canvasToRgba(ctx); } -export function createFabricCanvas({ width, height }) { +export function createFabricCanvas({ width, height }: { width: number, height: number }) { return new fabric.StaticCanvas(null, { width, height }); } -export async function renderFabricCanvas(canvas) { +export async function renderFabricCanvas(canvas: fabric.Canvas) { // console.time('canvas.renderAll'); canvas.renderAll(); // console.timeEnd('canvas.renderAll'); @@ -46,7 +40,7 @@ export async function renderFabricCanvas(canvas) { return rgba; } -export function toUint8ClampedArray(buffer) { +export function toUint8ClampedArray(buffer: Buffer) { // return Uint8ClampedArray.from(buffer); // Some people are finding that manual copying is orders of magnitude faster than Uint8ClampedArray.from // Since I'm getting similar times for both methods, then why not: @@ -57,12 +51,12 @@ export function toUint8ClampedArray(buffer) { return data; } -export async function rgbaToFabricImage({ width, height, rgba }) { +export async function rgbaToFabricImage({ width, height, rgba }: { width: number, height: number, rgba: Buffer }) { const canvas = createCanvas(width, height); // FIXME: Fabric tries to add a class to this, but DOM is not defined. Because node? // https://github.com/fabricjs/fabric.js/issues/10032 - canvas.classList = new Set(); + (canvas as any).classList = new Set(); const ctx = canvas.getContext('2d'); // https://developer.mozilla.org/en-US/docs/Web/API/ImageData/ImageData @@ -72,10 +66,13 @@ export async function rgbaToFabricImage({ width, height, rgba }) { return new fabric.FabricImage(canvas); } -export async function createFabricFrameSource(func, { width, height, ...rest }) { - const onInit = async () => func(({ width, height, fabric, ...rest })); +export async function createFabricFrameSource( + func: FabricFrameSourceCallback, + options: CreateFrameSourceOptions +) { + const onInit = async () => func(({ fabric, ...options })); - const { onRender = () => {}, onClose = () => {} } = await onInit() || {}; + const { onRender = () => { }, onClose = () => { } } = await onInit() || {}; return { readNextFrame: onRender, @@ -83,13 +80,18 @@ export async function createFabricFrameSource(func, { width, height, ...rest }) }; } -export async function createCustomCanvasFrameSource({ width, height, params }) { +interface FrameSource { + readNextFrame(progress: number): Promise; + close(): Promise; +} + +export async function createCustomCanvasFrameSource({ width, height, params }: Pick, "width" | "height" | "params">) { const canvas = createCanvas(width, height); const context = canvas.getContext('2d'); const { onClose, onRender } = await params.func(({ width, height, canvas })); - async function readNextFrame(progress) { + async function readNextFrame(progress: number) { context.clearRect(0, 0, canvas.width, canvas.height); await onRender(progress); // require('fs').writeFileSync(`${new Date().getTime()}.png`, canvas.toBuffer('image/png')); @@ -103,9 +105,14 @@ export async function createCustomCanvasFrameSource({ width, height, params }) { close: onClose, }; } +export type BlurImageOptions = { + mutableImg: fabric.FabricImage, + width: number, + height: number, +} -export async function blurImage({ mutableImg, width, height }) { - mutableImg.setOptions({ scaleX: width / mutableImg.width, scaleY: height / mutableImg.height }); +export async function blurImage({ mutableImg, width, height }: BlurImageOptions) { + mutableImg.set({ scaleX: width / mutableImg.width, scaleY: height / mutableImg.height }); const canvas = mutableImg.toCanvasElement(); const ctx = canvas.getContext('2d'); diff --git a/sources/glFrameSource.ts b/sources/glFrameSource.ts index fc3d074a..e65ea613 100644 --- a/sources/glFrameSource.ts +++ b/sources/glFrameSource.ts @@ -1,23 +1,12 @@ import GL from 'gl'; import createShader from 'gl-shader'; import { readFile } from 'node:fs/promises'; +import type { CreateFrameSourceOptions } from '../types.js'; +import type { GlLayer } from '../index.js'; // I have no idea what I'm doing but it works ¯\_(ツ)_/¯ -export type CreateGlFrameSourceOptions = { - width: number; - height: number; - channels: number; - params: { - vertexPath?: string; - fragmentPath?: string; - vertexSrc?: string; - fragmentSrc?: string; - speed?: number; - }; -} - -export default async function createGlFrameSource({ width, height, channels, params }: CreateGlFrameSourceOptions) { +export default async function createGlFrameSource({ width, height, channels, params }: CreateFrameSourceOptions) { const gl = GL(width, height); const defaultVertexSrc = ` diff --git a/types.ts b/types.ts index cd15170f..5ef45e2c 100644 --- a/types.ts +++ b/types.ts @@ -1,6 +1,8 @@ // Types used internally and not exposed through any external interfaces. // TODO[ts]: Move these elsewhere +import { Layer } from "./index.js"; + export type Stream = { codec_type: string; r_frame_rate: string; @@ -18,3 +20,17 @@ export type Keyframe = { t: number; props: Record; }; + +export type CreateFrameSourceOptions = { + ffmpegPath: string; + ffprobePath: string; + width: number, + height: number, + duration: number, + channels: number, + verbose: boolean, + logTimes: boolean, + enableFfmpegLog: boolean, + framerateStr: string, + params: T, +} From 0de155f92bc73b4cbaccee2892fbad27c3e276af Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Wed, 15 Jan 2025 12:13:44 -0500 Subject: [PATCH 11/23] Convert sources/videoFrameSource.{js,ts} --- parseConfig.ts | 16 ++-------- ...ideoFrameSource.js => videoFrameSource.ts} | 29 ++++++++++--------- types.ts | 18 ++++++++++-- 3 files changed, 33 insertions(+), 30 deletions(-) rename sources/{videoFrameSource.js => videoFrameSource.ts} (89%) diff --git a/parseConfig.ts b/parseConfig.ts index 148b16ce..b74c7fc8 100644 --- a/parseConfig.ts +++ b/parseConfig.ts @@ -12,20 +12,8 @@ import { } from './util.js'; import { registerFont } from 'canvas'; import { calcTransition } from './transitions.js'; -import type { AudioTrack, CanvasLayer, Config, EditlyBannerLayer, FabricLayer, GlLayer, ImageLayer, ImageOverlayLayer, Layer, LinearGradientLayer, NewsTitleLayer, SlideInTextLayer, SubtitleLayer, TitleBackgroundLayer, TitleLayer, VideoLayer } from './index.js'; - -type LayerDuration = T & { - layerDuration: number; -} - -type ProcessedVideoLayer = LayerDuration & { - framerateStr: string; - inputWidth: number; - inputHeight: number; - speedFactor: number; -} - -type ProcessedLayer = LayerDuration> | ProcessedVideoLayer; +import type { AudioTrack, CanvasLayer, Config, EditlyBannerLayer, FabricLayer, GlLayer, ImageLayer, ImageOverlayLayer, Layer, LinearGradientLayer, NewsTitleLayer, SlideInTextLayer, SubtitleLayer, TitleBackgroundLayer, TitleLayer } from './index.js'; +import type { ProcessedVideoLayer, LayerDuration } from "./types.js" const dirname = fileURLToPath(new URL('.', import.meta.url)); diff --git a/sources/videoFrameSource.js b/sources/videoFrameSource.ts similarity index 89% rename from sources/videoFrameSource.js rename to sources/videoFrameSource.ts index 57b8bdb3..0a6ce9f3 100644 --- a/sources/videoFrameSource.js +++ b/sources/videoFrameSource.ts @@ -8,8 +8,9 @@ import { rgbaToFabricImage, blurImage, } from './fabric.js'; +import type { CreateFrameSourceOptions, ProcessedVideoLayer } from '../types.js'; -export default async ({ width: canvasWidth, height: canvasHeight, channels, framerateStr, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, params }) => { +export default async ({ width: canvasWidth, height: canvasHeight, channels, framerateStr, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, params }: CreateFrameSourceOptions) => { const { path, cutFrom, cutTo, resizeMode = 'contain-blur', speedFactor, inputWidth, inputHeight, width: requestedWidthRel, height: requestedHeightRel, left: leftRel = 0, top: topRel = 0, originX = 'left', originY = 'top', fabricImagePostProcessing = null } = params; const requestedWidth = requestedWidthRel ? Math.round(requestedWidthRel * canvasWidth) : canvasWidth; @@ -77,8 +78,8 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram // https://superuser.com/a/1116905/658247 let inputCodec; - if (firstVideoStream.codec_name === 'vp8') inputCodec = 'libvpx'; - else if (firstVideoStream.codec_name === 'vp9') inputCodec = 'libvpx-vp9'; + if (firstVideoStream?.codec_name === 'vp8') inputCodec = 'libvpx'; + else if (firstVideoStream?.codec_name === 'vp9') inputCodec = 'libvpx-vp9'; // http://zulko.github.io/blog/2013/09/27/read-and-write-video-frames-in-python-using-ffmpeg/ // Testing: ffmpeg -i 'vid.mov' -t 1 -vcodec rawvideo -pix_fmt rgba -f image2pipe - | ffmpeg -f rawvideo -vcodec rawvideo -pix_fmt rgba -s 2166x1650 -i - -vf format=yuv420p -vcodec libx264 -y out.mp4 @@ -86,9 +87,9 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram const args = [ ...getFfmpegCommonArgs({ enableFfmpegLog }), ...(inputCodec ? ['-vcodec', inputCodec] : []), - ...(cutFrom ? ['-ss', cutFrom] : []), + ...(cutFrom ? ['-ss', cutFrom.toString()] : []), '-i', path, - ...(cutTo ? ['-t', (cutTo - cutFrom) * speedFactor] : []), + ...(cutTo ? ['-t', ((cutTo - cutFrom!) * speedFactor).toString()] : []), '-vf', `${ptsFilter}fps=${framerateStr},${scaleFilter}`, '-map', 'v:0', '-vcodec', 'rawvideo', @@ -100,9 +101,9 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram const ps = execa(ffmpegPath, args, { encoding: null, buffer: false, stdin: 'ignore', stdout: 'pipe', stderr: process.stderr }); - const stream = ps.stdout; + const stream = ps.stdout!; - let timeout; + let timeout: NodeJS.Timeout; let ended = false; stream.once('end', () => { @@ -124,8 +125,8 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram return null; } - async function readNextFrame(progress, canvas, time) { - const rgba = await new Promise((resolve, reject) => { + async function readNextFrame(progress: number, canvas: fabric.Canvas, time: number) { + const rgba = await new Promise | void>((resolve, reject) => { const frame = getNextFrame(); if (frame) { resolve(frame); @@ -151,7 +152,7 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram stream.removeListener('error', reject); } - function handleChunk(chunk) { + function handleChunk(chunk: Buffer) { const nCopied = Math.min(buf.length - length, chunk.length); chunk.copy(buf, length, 0, nCopied); length += nCopied; @@ -197,7 +198,7 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram const img = await rgbaToFabricImage({ width: targetWidth, height: targetHeight, rgba }); if (logTimes) console.timeEnd('rgbaToFabricImage'); - img.setOptions({ + img.set({ originX, originY, }); @@ -211,15 +212,15 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram centerOffsetY = (dirY * (requestedHeight - targetHeight)) / 2; } - img.setOptions({ + img.set({ left: left + centerOffsetX, top: top + centerOffsetY, }); if (resizeMode === 'contain-blur') { - const mutableImg = img.cloneAsImage(); + const mutableImg = img.cloneAsImage({}); const blurredImg = await blurImage({ mutableImg, width: requestedWidth, height: requestedHeight }); - blurredImg.setOptions({ + blurredImg.set({ left, top, originX, diff --git a/types.ts b/types.ts index 5ef45e2c..77179b29 100644 --- a/types.ts +++ b/types.ts @@ -1,10 +1,11 @@ // Types used internally and not exposed through any external interfaces. // TODO[ts]: Move these elsewhere -import { Layer } from "./index.js"; +import { Layer, VideoLayer } from "./index.js"; export type Stream = { codec_type: string; + codec_name: string; r_frame_rate: string; width?: number; height?: number; @@ -33,4 +34,17 @@ export type CreateFrameSourceOptions = { enableFfmpegLog: boolean, framerateStr: string, params: T, -} +}; + +export type LayerDuration = T & { + layerDuration: number; +}; + +export type ProcessedLayer = LayerDuration> | ProcessedVideoLayer; + +export type ProcessedVideoLayer = LayerDuration & { + framerateStr: string; + inputWidth: number; + inputHeight: number; + speedFactor: number; +}; From 4fd3fed2571d9972206d137307c0da9eaf2ba078 Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Wed, 15 Jan 2025 15:17:17 -0500 Subject: [PATCH 12/23] Convert sources/fabricFrameSources.{js,ts} --- index.d.ts | 14 ++-- sources/fabric.ts | 3 +- ...cFrameSources.js => fabricFrameSources.ts} | 69 ++++++++++--------- util.ts | 7 +- 4 files changed, 48 insertions(+), 45 deletions(-) rename sources/fabric/{fabricFrameSources.js => fabricFrameSources.ts} (80%) diff --git a/index.d.ts b/index.d.ts index 1af76eb0..ad15364b 100644 --- a/index.d.ts +++ b/index.d.ts @@ -13,15 +13,9 @@ declare namespace Editly { /** Little utility */ type OptionalPromise = Promise | T; - type OriginX = - 'left' | - 'center' | - 'right'; + type OriginX = Fabric.TOriginX; - type OriginY = - 'top' | - 'center' | - 'bottom'; + type OriginY = Fabric.TOriginY; /** * How to fit image to screen. Can be one of: @@ -553,6 +547,8 @@ declare namespace Editly { */ backgroundColor?: string; + delay: number; + speed: number; } /** @@ -591,6 +587,8 @@ declare namespace Editly { */ position?: Position; + delay: number; + speed: number; } interface SlideInTextLayer extends TextLayer { diff --git a/sources/fabric.ts b/sources/fabric.ts index 88780f73..9af9a870 100644 --- a/sources/fabric.ts +++ b/sources/fabric.ts @@ -4,7 +4,8 @@ import { boxBlurImage } from '../BoxBlur.js'; import type { CreateFrameSourceOptions } from '../types.js'; import type { CanvasLayer, CustomFabricFunctionCallbacks, Layer } from '../index.js'; -export type FabricFrameSourceCallback = (options: CreateFrameSourceOptions & { fabric: typeof fabric }) => CustomFabricFunctionCallbacks; +export type FabricFrameSourceOptions = CreateFrameSourceOptions & { fabric: typeof fabric }; +export type FabricFrameSourceCallback = (options: FabricFrameSourceOptions) => CustomFabricFunctionCallbacks; // Fabric is used as a fundament for compositing layers in editly diff --git a/sources/fabric/fabricFrameSources.js b/sources/fabric/fabricFrameSources.ts similarity index 80% rename from sources/fabric/fabricFrameSources.js rename to sources/fabric/fabricFrameSources.ts index 08fbdfda..802fdf18 100644 --- a/sources/fabric/fabricFrameSources.js +++ b/sources/fabric/fabricFrameSources.ts @@ -4,15 +4,16 @@ import fileUrl from 'file-url'; import { getRandomGradient, getRandomColors } from '../../colors.js'; import { easeOutExpo, easeInOutCubic } from '../../transitions.js'; import { getPositionProps, getFrameByKeyFrames, isUrl } from '../../util.js'; -import { blurImage } from '../fabric.js'; +import { blurImage, type FabricFrameSourceOptions } from '../fabric.js'; +import type { FillColorLayer, ImageLayer, ImageOverlayLayer, KenBurns, LinearGradientLayer, NewsTitleLayer, RadialGradientLayer, SlideInTextLayer, SubtitleLayer, TitleLayer } from '../../index.js'; // http://fabricjs.com/kitchensink const defaultFontFamily = 'sans-serif'; -const loadImage = (pathOrUrl) => fabric.util.loadImage(isUrl(pathOrUrl) ? pathOrUrl : fileUrl(pathOrUrl)); +const loadImage = (pathOrUrl: string) => fabric.util.loadImage(isUrl(pathOrUrl) ? pathOrUrl : fileUrl(pathOrUrl)); -function getZoomParams({ progress, zoomDirection, zoomAmount }) { +function getZoomParams({ progress, zoomDirection, zoomAmount = 0.1 }: KenBurns & { progress: number }) { let scaleFactor = 1; if (zoomDirection === 'left' || zoomDirection === 'right') return 1.3 + zoomAmount; if (zoomDirection === 'in') scaleFactor = (1 + zoomAmount * progress); @@ -20,7 +21,7 @@ function getZoomParams({ progress, zoomDirection, zoomAmount }) { return scaleFactor; } -function getTranslationParams({ progress, zoomDirection, zoomAmount }) { +function getTranslationParams({ progress, zoomDirection, zoomAmount = 0.1 }: KenBurns & { progress: number }) { let translation = 0; const range = zoomAmount * 1000; @@ -30,7 +31,7 @@ function getTranslationParams({ progress, zoomDirection, zoomAmount }) { return translation; } -export async function imageFrameSource({ verbose, params, width, height }) { +export async function imageFrameSource({ verbose, params, width, height }: FabricFrameSourceOptions) { const { path, zoomDirection = 'in', zoomAmount = 0.1, resizeMode = 'contain-blur' } = params; if (verbose) console.log('Loading', path); @@ -44,7 +45,7 @@ export async function imageFrameSource({ verbose, params, width, height }) { top: height / 2, }); - let blurredImg; + let blurredImg: fabric.FabricImage; // Blurred version if (resizeMode === 'contain-blur') { // If we dispose mutableImg, seems to cause issues with the rendering of blurredImg @@ -53,7 +54,7 @@ export async function imageFrameSource({ verbose, params, width, height }) { blurredImg = await blurImage({ mutableImg, width, height }); } - async function onRender(progress, canvas) { + async function onRender(progress: number, canvas: fabric.Canvas) { const img = createImg(); const scaleFactor = getZoomParams({ progress, zoomDirection, zoomAmount }); @@ -77,7 +78,7 @@ export async function imageFrameSource({ verbose, params, width, height }) { img.scaleToHeight(height * scaleFactor); } } else if (resizeMode === 'stretch') { - img.setOptions({ scaleX: (width / img.width) * scaleFactor, scaleY: (height / img.height) * scaleFactor }); + img.set({ scaleX: (width / img.width) * scaleFactor, scaleY: (height / img.height) * scaleFactor }); } if (blurredImg) canvas.add(blurredImg); @@ -92,12 +93,12 @@ export async function imageFrameSource({ verbose, params, width, height }) { return { onRender, onClose }; } -export async function fillColorFrameSource({ params, width, height }) { +export async function fillColorFrameSource({ params, width, height }: FabricFrameSourceOptions) { const { color } = params; const randomColor = getRandomColors(1)[0]; - async function onRender(progress, canvas) { + async function onRender(progress: number, canvas: fabric.Canvas) { const rect = new fabric.Rect({ left: 0, right: 0, @@ -111,17 +112,17 @@ export async function fillColorFrameSource({ params, width, height }) { return { onRender }; } -function getRekt(width, height) { +function getRekt(width: number, height: number) { // width and height with room to rotate return new fabric.Rect({ originX: 'center', originY: 'center', left: width / 2, top: height / 2, width: width * 2, height: height * 2 }); } -export async function radialGradientFrameSource({ width, height, params }) { +export async function radialGradientFrameSource({ width, height, params }: FabricFrameSourceOptions) { const { colors: inColors } = params; const randomColors = getRandomGradient(); - async function onRender(progress, canvas) { + async function onRender(progress: number, canvas: fabric.Canvas) { // console.log('progress', progress); const max = Math.max(width, height); @@ -158,13 +159,13 @@ export async function radialGradientFrameSource({ width, height, params }) { return { onRender }; } -export async function linearGradientFrameSource({ width, height, params }) { +export async function linearGradientFrameSource({ width, height, params }: FabricFrameSourceOptions) { const { colors: inColors } = params; const randomColors = getRandomGradient(); const colors = inColors && inColors.length === 2 ? inColors : randomColors; - async function onRender(progress, canvas) { + async function onRender(progress: number, canvas: fabric.Canvas) { const rect = getRekt(width, height); rect.set('fill', new fabric.Gradient({ @@ -187,9 +188,9 @@ export async function linearGradientFrameSource({ width, height, params }) { return { onRender }; } -export async function subtitleFrameSource({ width, height, params }) { +export async function subtitleFrameSource({ width, height, params }: FabricFrameSourceOptions) { const { text, textColor = '#ffffff', backgroundColor = 'rgba(0,0,0,0.3)', fontFamily = defaultFontFamily, delay = 0, speed = 1 } = params; - async function onRender(progress, canvas) { + async function onRender(progress: number, canvas: fabric.Canvas) { const easedProgress = easeOutExpo(Math.max(0, Math.min((progress - delay) * speed, 1))); const min = Math.min(width, height); @@ -226,7 +227,7 @@ export async function subtitleFrameSource({ width, height, params }) { return { onRender }; } -export async function imageOverlayFrameSource({ params, width, height }) { +export async function imageOverlayFrameSource({ params, width, height }: FabricFrameSourceOptions) { const { path, position, width: relWidth, height: relHeight, zoomDirection, zoomAmount = 0.1 } = params; const imgData = await loadImage(path); @@ -240,7 +241,7 @@ export async function imageOverlayFrameSource({ params, width, height }) { top, }); - async function onRender(progress, canvas) { + async function onRender(progress: number, canvas: fabric.Canvas) { const scaleFactor = getZoomParams({ progress, zoomDirection, zoomAmount }); const translationParams = getTranslationParams({ progress, zoomDirection, zoomAmount }); @@ -261,10 +262,10 @@ export async function imageOverlayFrameSource({ params, width, height }) { return { onRender }; } -export async function titleFrameSource({ width, height, params }) { +export async function titleFrameSource({ width, height, params }: FabricFrameSourceOptions) { const { text, textColor = '#ffffff', fontFamily = defaultFontFamily, position = 'center', zoomDirection = 'in', zoomAmount = 0.2 } = params; - async function onRender(progress, canvas) { + async function onRender(progress: number, canvas: fabric.Canvas) { // console.log('progress', progress); const min = Math.min(width, height); @@ -284,7 +285,7 @@ export async function titleFrameSource({ width, height, params }) { }); // We need the text as an image in order to scale it - const textImage = textBox.cloneAsImage(); + const textImage = textBox.cloneAsImage({}); const { left, top, originX, originY } = getPositionProps({ position, width, height }); @@ -302,10 +303,10 @@ export async function titleFrameSource({ width, height, params }) { return { onRender }; } -export async function newsTitleFrameSource({ width, height, params }) { +export async function newsTitleFrameSource({ width, height, params }: FabricFrameSourceOptions) { const { text, textColor = '#ffffff', backgroundColor = '#d02a42', fontFamily = defaultFontFamily, delay = 0, speed = 1 } = params; - async function onRender(progress, canvas) { + async function onRender(progress: number, canvas: fabric.Canvas) { const min = Math.min(width, height); const fontSize = Math.round(min * 0.05); @@ -345,7 +346,7 @@ export async function newsTitleFrameSource({ width, height, params }) { return { onRender }; } -async function getFadedObject({ object, progress }) { +async function getFadedObject({ object, progress }: { object: T, progress: number }) { const rect = new fabric.Rect({ left: 0, width: object.width, @@ -366,10 +367,10 @@ async function getFadedObject({ object, progress }) { ], })); - const gradientMaskImg = rect.cloneAsImage(); - const fadedImage = object.cloneAsImage(); + const gradientMaskImg = rect.cloneAsImage({}); + const fadedImage = object.cloneAsImage({}); - fadedImage.filters.push(new fabric.FabricImage.filters.BlendImage({ + fadedImage.filters.push(new fabric.filters.BlendImage({ image: gradientMaskImg, mode: 'multiply', })); @@ -379,12 +380,14 @@ async function getFadedObject({ object, progress }) { return fadedImage; } -export async function slideInTextFrameSource({ width, height, params: { position, text, fontSize = 0.05, charSpacing = 0.1, textColor = '#ffffff', color = undefined, fontFamily = defaultFontFamily } = {} }) { +export async function slideInTextFrameSource({ width, height, params }: FabricFrameSourceOptions) { + const { position, text, fontSize = 0.05, charSpacing = 0.1, textColor = '#ffffff', color = undefined, fontFamily = defaultFontFamily } = params; + if (color) { console.warn('slide-in-text: color is deprecated, use textColor.'); } - async function onRender(progress, canvas) { + async function onRender(progress: number, canvas: fabric.Canvas) { const fontSizeAbs = Math.round(width * fontSize); const { left, top, originX, originY } = getPositionProps({ position, width, height }); @@ -404,7 +407,7 @@ export async function slideInTextFrameSource({ width, height, params: { position ], progress); const fadedObject = await getFadedObject({ object: textBox, progress: easeInOutCubic(textSlide) }); - fadedObject.setOptions({ + fadedObject.set({ originX, originY, top, @@ -418,6 +421,6 @@ export async function slideInTextFrameSource({ width, height, params: { position return { onRender }; } -export async function customFabricFrameSource({ canvas, width, height, params }) { - return params.func(({ width, height, fabric, canvas, params })); +export async function customFabricFrameSource({ width, height, fabric, params, ...other }: FabricFrameSourceOptions) { + return params.func(({ width, height, fabric, params })); } diff --git a/util.ts b/util.ts index 832a6d32..fa7be3c4 100644 --- a/util.ts +++ b/util.ts @@ -5,6 +5,7 @@ import { pathExists } from 'fs-extra'; import type { Keyframe, Stream } from './types.js'; import type { Position, PositionObject, Transition } from './index.js'; +import type { TOriginX, TOriginY } from 'fabric'; export function parseFps(fps?: string) { const match = typeof fps === 'string' && fps.match(/^([0-9]+)\/([0-9]+)$/); @@ -78,9 +79,9 @@ export function toArrayInteger(buffer: Buffer) { // x264 requires multiple of 2 export const multipleOf2 = (x: number) => Math.round(x / 2) * 2; -export function getPositionProps({ position, width, height }: { position: Position | PositionObject, width: number, height: number }) { - let originY = 'center'; - let originX = 'center'; +export function getPositionProps({ position, width, height }: { position?: Position | PositionObject, width: number, height: number }) { + let originY: TOriginY = 'center'; + let originX: TOriginX = 'center'; let top = height / 2; let left = width / 2; const margin = 0.05; From a33fcd1578b3fbe047458ab57de12da5d53c7b1e Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Wed, 15 Jan 2025 17:57:54 -0500 Subject: [PATCH 13/23] Convert frameSource.{js,ts} --- index.d.ts | 5 +-- sources/fabric.ts | 21 ++++------ sources/fabric/fabricFrameSources.ts | 22 +++++----- sources/{frameSource.js => frameSource.ts} | 48 +++++++++++++++------- sources/glFrameSource.ts | 4 +- sources/videoFrameSource.ts | 2 +- types.ts | 12 +++++- 7 files changed, 67 insertions(+), 47 deletions(-) rename sources/{frameSource.js => frameSource.ts} (71%) diff --git a/index.d.ts b/index.d.ts index ad15364b..b8f9b407 100644 --- a/index.d.ts +++ b/index.d.ts @@ -303,7 +303,7 @@ declare namespace Editly { } interface VideoPostProcessingFunctionArgs { - canvas: Fabric.Canvas; + canvas: Fabric.StaticCanvas; image: Fabric.FabricImage; fabric: typeof Fabric, progress: number; @@ -691,7 +691,7 @@ declare namespace Editly { } interface CustomFabricFunctionCallbacks { - onRender: (progress: number, canvas: Fabric.Canvas) => OptionalPromise; + onRender: (progress: number, canvas: Fabric.StaticCanvas) => OptionalPromise; onClose?: () => OptionalPromise; } @@ -726,7 +726,6 @@ declare namespace Editly { width: number; height: number; fabric: typeof Fabric; - canvas: Fabric.Canvas; params: any; } diff --git a/sources/fabric.ts b/sources/fabric.ts index 9af9a870..0c47b9bd 100644 --- a/sources/fabric.ts +++ b/sources/fabric.ts @@ -1,11 +1,11 @@ import * as fabric from 'fabric/node'; import { type CanvasRenderingContext2D, createCanvas, ImageData } from 'canvas'; import { boxBlurImage } from '../BoxBlur.js'; -import type { CreateFrameSourceOptions } from '../types.js'; +import type { CreateFrameSourceOptions, FrameSource } from '../types.js'; import type { CanvasLayer, CustomFabricFunctionCallbacks, Layer } from '../index.js'; export type FabricFrameSourceOptions = CreateFrameSourceOptions & { fabric: typeof fabric }; -export type FabricFrameSourceCallback = (options: FabricFrameSourceOptions) => CustomFabricFunctionCallbacks; +export type FabricFrameSourceCallback = (options: FabricFrameSourceOptions) => Promise; // Fabric is used as a fundament for compositing layers in editly @@ -17,7 +17,7 @@ export function canvasToRgba(ctx: CanvasRenderingContext2D) { return Buffer.from(imageData.data); } -export function fabricCanvasToRgba(fabricCanvas: fabric.Canvas) { +export function fabricCanvasToRgba(fabricCanvas: fabric.StaticCanvas) { const internalCanvas = fabricCanvas.getNodeCanvas(); const ctx = internalCanvas.getContext('2d'); @@ -31,7 +31,7 @@ export function createFabricCanvas({ width, height }: { width: number, height: n return new fabric.StaticCanvas(null, { width, height }); } -export async function renderFabricCanvas(canvas: fabric.Canvas) { +export async function renderFabricCanvas(canvas: fabric.StaticCanvas) { // console.time('canvas.renderAll'); canvas.renderAll(); // console.timeEnd('canvas.renderAll'); @@ -70,10 +70,8 @@ export async function rgbaToFabricImage({ width, height, rgba }: { width: number export async function createFabricFrameSource( func: FabricFrameSourceCallback, options: CreateFrameSourceOptions -) { - const onInit = async () => func(({ fabric, ...options })); - - const { onRender = () => { }, onClose = () => { } } = await onInit() || {}; +): Promise { + const { onRender = () => { }, onClose = () => { } } = await func({ fabric, ...options }) || {}; return { readNextFrame: onRender, @@ -81,12 +79,7 @@ export async function createFabricFrameSource( }; } -interface FrameSource { - readNextFrame(progress: number): Promise; - close(): Promise; -} - -export async function createCustomCanvasFrameSource({ width, height, params }: Pick, "width" | "height" | "params">) { +export async function createCustomCanvasFrameSource({ width, height, params }: Pick, "width" | "height" | "params">): Promise { const canvas = createCanvas(width, height); const context = canvas.getContext('2d'); diff --git a/sources/fabric/fabricFrameSources.ts b/sources/fabric/fabricFrameSources.ts index 802fdf18..66e7719b 100644 --- a/sources/fabric/fabricFrameSources.ts +++ b/sources/fabric/fabricFrameSources.ts @@ -5,7 +5,7 @@ import { getRandomGradient, getRandomColors } from '../../colors.js'; import { easeOutExpo, easeInOutCubic } from '../../transitions.js'; import { getPositionProps, getFrameByKeyFrames, isUrl } from '../../util.js'; import { blurImage, type FabricFrameSourceOptions } from '../fabric.js'; -import type { FillColorLayer, ImageLayer, ImageOverlayLayer, KenBurns, LinearGradientLayer, NewsTitleLayer, RadialGradientLayer, SlideInTextLayer, SubtitleLayer, TitleLayer } from '../../index.js'; +import type { FabricLayer, FillColorLayer, ImageLayer, ImageOverlayLayer, KenBurns, LinearGradientLayer, NewsTitleLayer, RadialGradientLayer, SlideInTextLayer, SubtitleLayer, TitleLayer } from '../../index.js'; // http://fabricjs.com/kitchensink @@ -54,7 +54,7 @@ export async function imageFrameSource({ verbose, params, width, height }: Fabri blurredImg = await blurImage({ mutableImg, width, height }); } - async function onRender(progress: number, canvas: fabric.Canvas) { + async function onRender(progress: number, canvas: fabric.StaticCanvas) { const img = createImg(); const scaleFactor = getZoomParams({ progress, zoomDirection, zoomAmount }); @@ -98,7 +98,7 @@ export async function fillColorFrameSource({ params, width, height }: FabricFram const randomColor = getRandomColors(1)[0]; - async function onRender(progress: number, canvas: fabric.Canvas) { + async function onRender(progress: number, canvas: fabric.StaticCanvas) { const rect = new fabric.Rect({ left: 0, right: 0, @@ -122,7 +122,7 @@ export async function radialGradientFrameSource({ width, height, params }: Fabri const randomColors = getRandomGradient(); - async function onRender(progress: number, canvas: fabric.Canvas) { + async function onRender(progress: number, canvas: fabric.StaticCanvas) { // console.log('progress', progress); const max = Math.max(width, height); @@ -165,7 +165,7 @@ export async function linearGradientFrameSource({ width, height, params }: Fabri const randomColors = getRandomGradient(); const colors = inColors && inColors.length === 2 ? inColors : randomColors; - async function onRender(progress: number, canvas: fabric.Canvas) { + async function onRender(progress: number, canvas: fabric.StaticCanvas) { const rect = getRekt(width, height); rect.set('fill', new fabric.Gradient({ @@ -190,7 +190,7 @@ export async function linearGradientFrameSource({ width, height, params }: Fabri export async function subtitleFrameSource({ width, height, params }: FabricFrameSourceOptions) { const { text, textColor = '#ffffff', backgroundColor = 'rgba(0,0,0,0.3)', fontFamily = defaultFontFamily, delay = 0, speed = 1 } = params; - async function onRender(progress: number, canvas: fabric.Canvas) { + async function onRender(progress: number, canvas: fabric.StaticCanvas) { const easedProgress = easeOutExpo(Math.max(0, Math.min((progress - delay) * speed, 1))); const min = Math.min(width, height); @@ -241,7 +241,7 @@ export async function imageOverlayFrameSource({ params, width, height }: FabricF top, }); - async function onRender(progress: number, canvas: fabric.Canvas) { + async function onRender(progress: number, canvas: fabric.StaticCanvas) { const scaleFactor = getZoomParams({ progress, zoomDirection, zoomAmount }); const translationParams = getTranslationParams({ progress, zoomDirection, zoomAmount }); @@ -265,7 +265,7 @@ export async function imageOverlayFrameSource({ params, width, height }: FabricF export async function titleFrameSource({ width, height, params }: FabricFrameSourceOptions) { const { text, textColor = '#ffffff', fontFamily = defaultFontFamily, position = 'center', zoomDirection = 'in', zoomAmount = 0.2 } = params; - async function onRender(progress: number, canvas: fabric.Canvas) { + async function onRender(progress: number, canvas: fabric.StaticCanvas) { // console.log('progress', progress); const min = Math.min(width, height); @@ -306,7 +306,7 @@ export async function titleFrameSource({ width, height, params }: FabricFrameSou export async function newsTitleFrameSource({ width, height, params }: FabricFrameSourceOptions) { const { text, textColor = '#ffffff', backgroundColor = '#d02a42', fontFamily = defaultFontFamily, delay = 0, speed = 1 } = params; - async function onRender(progress: number, canvas: fabric.Canvas) { + async function onRender(progress: number, canvas: fabric.StaticCanvas) { const min = Math.min(width, height); const fontSize = Math.round(min * 0.05); @@ -387,7 +387,7 @@ export async function slideInTextFrameSource({ width, height, params }: FabricFr console.warn('slide-in-text: color is deprecated, use textColor.'); } - async function onRender(progress: number, canvas: fabric.Canvas) { + async function onRender(progress: number, canvas: fabric.StaticCanvas) { const fontSizeAbs = Math.round(width * fontSize); const { left, top, originX, originY } = getPositionProps({ position, width, height }); @@ -421,6 +421,6 @@ export async function slideInTextFrameSource({ width, height, params }: FabricFr return { onRender }; } -export async function customFabricFrameSource({ width, height, fabric, params, ...other }: FabricFrameSourceOptions) { +export async function customFabricFrameSource({ width, height, fabric, params }: FabricFrameSourceOptions) { return params.func(({ width, height, fabric, params })); } diff --git a/sources/frameSource.js b/sources/frameSource.ts similarity index 71% rename from sources/frameSource.js rename to sources/frameSource.ts index 8a13607e..ae01fa91 100644 --- a/sources/frameSource.js +++ b/sources/frameSource.ts @@ -7,6 +7,7 @@ import { createFabricFrameSource, createFabricCanvas, renderFabricCanvas, + type FabricFrameSourceCallback, } from './fabric.js'; import { customFabricFrameSource, @@ -22,8 +23,10 @@ import { } from './fabric/fabricFrameSources.js'; import createVideoFrameSource from './videoFrameSource.js'; import createGlFrameSource from './glFrameSource.js'; +import type { CreateFrameSource, CreateFrameSourceOptions, LayerDuration } from '../types.js'; +import type { Clip, Layer } from '../index.js'; -const fabricFrameSources = { +const fabricFrameSources: Record> = { fabric: customFabricFrameSource, image: imageFrameSource, 'image-overlay': imageOverlayFrameSource, @@ -36,39 +39,56 @@ const fabricFrameSources = { 'slide-in-text': slideInTextFrameSource, }; -export async function createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, framerateStr }) { +const frameSources: Record> = { + video: createVideoFrameSource, + gl: createGlFrameSource, + canvas: createCustomCanvasFrameSource, +}; + +type FrameSourceOptions = { + clip: Clip; + clipIndex: number; + ffmpegPath: string; + ffprobePath: string; + width: number, + height: number, + duration: number, + channels: number, + verbose: boolean, + logTimes: boolean, + enableFfmpegLog: boolean, + framerateStr: string, +} + +export async function createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, framerateStr }: FrameSourceOptions) { const { layers, duration } = clip; - const visualLayers = layers.filter((layer) => layer.type !== 'audio'); + const visualLayers = (layers as LayerDuration[]).filter((layer) => layer.type !== 'audio'); const layerFrameSources = await pMap(visualLayers, async (layer, layerIndex) => { const { type, ...params } = layer; if (verbose) console.log('createFrameSource', type, 'clip', clipIndex, 'layer', layerIndex); - let createFrameSourceFunc; + let createFrameSourceFunc: CreateFrameSource; if (fabricFrameSources[type]) { - createFrameSourceFunc = async (opts) => createFabricFrameSource(fabricFrameSources[type], opts); + createFrameSourceFunc = async (opts: CreateFrameSourceOptions) => createFabricFrameSource(fabricFrameSources[type], opts); } else { - createFrameSourceFunc = { - video: createVideoFrameSource, - gl: createGlFrameSource, - canvas: createCustomCanvasFrameSource, - }[type]; + createFrameSourceFunc = frameSources[type]; } assert(createFrameSourceFunc, `Invalid type ${type}`); - const frameSource = await createFrameSourceFunc({ ffmpegPath, ffprobePath, width, height, duration, channels, verbose, logTimes, enableFfmpegLog, framerateStr, params }); + const frameSource = await createFrameSourceFunc({ ffmpegPath, ffprobePath, width, height, duration: duration!, channels, verbose, logTimes, enableFfmpegLog, framerateStr, params }); return { layer, frameSource }; }, { concurrency: 1 }); - async function readNextFrame({ time }) { + async function readNextFrame({ time }: { time: number }) { const canvas = createFabricCanvas({ width, height }); // eslint-disable-next-line no-restricted-syntax for (const { frameSource, layer } of layerFrameSources) { // console.log({ start: layer.start, stop: layer.stop, layerDuration: layer.layerDuration, time }); - const offsetTime = time - layer.start; + const offsetTime = time - (layer?.start ?? 0); const offsetProgress = offsetTime / layer.layerDuration; // console.log({ offsetProgress }); const shouldDrawLayer = offsetProgress >= 0 && offsetProgress <= 1; @@ -102,7 +122,7 @@ export async function createFrameSource({ clip, clipIndex, width, height, channe } async function close() { - await pMap(layerFrameSources, async ({ frameSource }) => frameSource.close()); + await pMap(layerFrameSources, async ({ frameSource }) => frameSource.close?.()); } return { diff --git a/sources/glFrameSource.ts b/sources/glFrameSource.ts index e65ea613..dd5f0543 100644 --- a/sources/glFrameSource.ts +++ b/sources/glFrameSource.ts @@ -1,12 +1,12 @@ import GL from 'gl'; import createShader from 'gl-shader'; import { readFile } from 'node:fs/promises'; -import type { CreateFrameSourceOptions } from '../types.js'; +import type { CreateFrameSourceOptions, FrameSource } from '../types.js'; import type { GlLayer } from '../index.js'; // I have no idea what I'm doing but it works ¯\_(ツ)_/¯ -export default async function createGlFrameSource({ width, height, channels, params }: CreateFrameSourceOptions) { +export default async function createGlFrameSource({ width, height, channels, params }: CreateFrameSourceOptions): Promise { const gl = GL(width, height); const defaultVertexSrc = ` diff --git a/sources/videoFrameSource.ts b/sources/videoFrameSource.ts index 0a6ce9f3..40f2136f 100644 --- a/sources/videoFrameSource.ts +++ b/sources/videoFrameSource.ts @@ -125,7 +125,7 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram return null; } - async function readNextFrame(progress: number, canvas: fabric.Canvas, time: number) { + async function readNextFrame(progress: number, canvas: fabric.StaticCanvas, time: number) { const rgba = await new Promise | void>((resolve, reject) => { const frame = getNextFrame(); if (frame) { diff --git a/types.ts b/types.ts index 77179b29..62afdd73 100644 --- a/types.ts +++ b/types.ts @@ -1,7 +1,8 @@ // Types used internally and not exposed through any external interfaces. // TODO[ts]: Move these elsewhere -import { Layer, VideoLayer } from "./index.js"; +import type { Layer, OptionalPromise, VideoLayer } from "./index.js"; +import { StaticCanvas } from 'fabric/node'; export type Stream = { codec_type: string; @@ -22,6 +23,11 @@ export type Keyframe = { props: Record; }; +export interface FrameSource { + readNextFrame(progress: number, canvas: StaticCanvas, offsetTime: number): OptionalPromise; + close?(): OptionalPromise; +} + export type CreateFrameSourceOptions = { ffmpegPath: string; ffprobePath: string; @@ -33,9 +39,11 @@ export type CreateFrameSourceOptions = { logTimes: boolean, enableFfmpegLog: boolean, framerateStr: string, - params: T, + params: Omit, }; +export type CreateFrameSource = (options: CreateFrameSourceOptions) => Promise; + export type LayerDuration = T & { layerDuration: number; }; From f94036a7a59358af45926232f1e71ddfcbdfc9e5 Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Wed, 15 Jan 2025 19:24:31 -0500 Subject: [PATCH 14/23] Convert glTransitions.{js,ts} --- glTransitions.js => glTransitions.ts | 14 +++++++++++--- types/gl-buffer.d.ts | 3 +++ types/gl-texture2d.d.ts | 5 +++++ types/gl-transition.d.ts | 6 ++++++ types/gl-transitions.d.ts | 15 +++++++++++++++ 5 files changed, 40 insertions(+), 3 deletions(-) rename glTransitions.js => glTransitions.ts (87%) create mode 100644 types/gl-buffer.d.ts create mode 100644 types/gl-texture2d.d.ts create mode 100644 types/gl-transition.d.ts create mode 100644 types/gl-transitions.d.ts diff --git a/glTransitions.js b/glTransitions.ts similarity index 87% rename from glTransitions.js rename to glTransitions.ts index 1a6b0c88..fee7f739 100644 --- a/glTransitions.js +++ b/glTransitions.ts @@ -7,15 +7,23 @@ import createTexture from 'gl-texture2d'; const { default: createTransition } = glTransition; -export default ({ width, height, channels }) => { +type RunTransitionOptions = { + fromFrame: Buffer; + toFrame: Buffer; + progress: number; + transitionName: string; + transitionParams?: any; +} + +export default ({ width, height, channels }: { width: number, height: number, channels: number }) => { const gl = GL(width, height); if (!gl) { throw new Error('gl returned null, this probably means that some dependencies are not installed. See README.'); } - function runTransitionOnFrame({ fromFrame, toFrame, progress, transitionName, transitionParams = {} }) { - function convertFrame(buf) { + function runTransitionOnFrame({ fromFrame, toFrame, progress, transitionName, transitionParams = {} }: RunTransitionOptions) { + function convertFrame(buf: Buffer) { // @see https://github.com/stackgl/gl-texture2d/issues/16 return ndarray(buf, [width, height, channels], [channels, width * channels, 1]); } diff --git a/types/gl-buffer.d.ts b/types/gl-buffer.d.ts new file mode 100644 index 00000000..517a313d --- /dev/null +++ b/types/gl-buffer.d.ts @@ -0,0 +1,3 @@ +declare module 'gl-buffer' { + export default function createBuffer(gl: WebGLRenderingContext, data: number[], target: number, usage: number): WebGLBuffer; +} diff --git a/types/gl-texture2d.d.ts b/types/gl-texture2d.d.ts new file mode 100644 index 00000000..681a4646 --- /dev/null +++ b/types/gl-texture2d.d.ts @@ -0,0 +1,5 @@ +declare module 'gl-texture2d' { + declare function createTexture(gl: WebGLRenderingContext, data: any): WebGLTexture; + + export default createTexture; +} diff --git a/types/gl-transition.d.ts b/types/gl-transition.d.ts new file mode 100644 index 00000000..8de64848 --- /dev/null +++ b/types/gl-transition.d.ts @@ -0,0 +1,6 @@ +declare module 'gl-transition' { + import type GL from 'gl'; + declare function createTransition(gl: GL, transitionSource: any, { resizeMode: string }): any; + + export = { default: createTransition }; +} diff --git a/types/gl-transitions.d.ts b/types/gl-transitions.d.ts new file mode 100644 index 00000000..e739d2a7 --- /dev/null +++ b/types/gl-transitions.d.ts @@ -0,0 +1,15 @@ +declare module 'gl-transitions' { + type GlTransition = { + name: string, + author: string, + license: string, + glsl: string, + defaultParams: { [key: string]: mixed }, + paramsTypes: { [key: string]: string }, + createdAt: string, + updatedAt: string, + } + + declare const _default: GlTransition[]; + export default _default; +} From 14e14f1cc463787d06a465d3b8998bea7109953a Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Wed, 15 Jan 2025 20:41:22 -0500 Subject: [PATCH 15/23] Move existing types --- audio.ts | 2 +- index.d.ts | 1113 -------------------------- parseConfig.ts | 3 +- sources/fabric.ts | 3 +- sources/fabric/fabricFrameSources.ts | 2 +- sources/frameSource.ts | 3 +- sources/glFrameSource.ts | 3 +- transitions.ts | 2 +- types.ts | 1098 ++++++++++++++++++++++++- util.ts | 2 +- 10 files changed, 1102 insertions(+), 1129 deletions(-) delete mode 100644 index.d.ts diff --git a/audio.ts b/audio.ts index f04d375d..c609d0f8 100644 --- a/audio.ts +++ b/audio.ts @@ -6,7 +6,7 @@ import { flatMap } from 'lodash-es'; import { getFfmpegCommonArgs, getCutFromArgs } from './ffmpeg.js'; import { readFileStreams } from './util.js'; -import type { AudioLayer, AudioNormalizationOptions, AudioTrack, Clip, Config, Layer, Transition, VideoLayer } from './index.js' +import type { AudioLayer, AudioNormalizationOptions, AudioTrack, Clip, Config, Layer, Transition, VideoLayer } from './types.js' export type AudioOptions = { ffmpegPath: string; diff --git a/index.d.ts b/index.d.ts deleted file mode 100644 index b8f9b407..00000000 --- a/index.d.ts +++ /dev/null @@ -1,1113 +0,0 @@ -import type * as Fabric from 'fabric/node'; -import type { Canvas } from "canvas" - -/** - * Edit and render videos. - * - * @param config - Config. - */ -declare function Editly(config: Editly.Config): Promise; - -declare namespace Editly { - - /** Little utility */ - type OptionalPromise = Promise | T; - - type OriginX = Fabric.TOriginX; - - type OriginY = Fabric.TOriginY; - - /** - * How to fit image to screen. Can be one of: - * - `'contain'` - All the video will be contained within the frame and letterboxed. - * - `'contain-blur'` - Like contain, but with a blurred copy as the letterbox. - * - `'cover'` - Video be cropped to cover the whole screen (aspect ratio preserved). - * - `'stretch'` - Video will be stretched to cover the whole screen (aspect ratio ignored). - * - * @default 'contain-blur' - * @see [Example 'image.json5']{@link https://github.com/mifi/editly/blob/master/examples/image.json5} - * @see [Example 'videos.json5']{@link https://github.com/mifi/editly/blob/master/examples/videos.json5} - */ - type ResizeMode = - 'contain' | - 'contain-blur' | - 'cover' | - 'stretch'; - - /** - * An object, where `{ x: 0, y: 0 }` is the upper left corner of the screen and `{ x: 1, y: 1 }` is the lower right corner. - */ - interface PositionObject { - - /** - * X-position relative to video width. - */ - x: number; - - /** - * Y-position relative to video height. - */ - y: number; - - /** - * X-anchor position of the object. - */ - originX?: OriginX; - - /** - * Y-anchor position of the object. - */ - originY?: OriginY; - - } - - /** - * Certain layers support the position parameter. - * - * @see [Position parameter]{@link https://github.com/mifi/editly#position-parameter} - * @see [Example 'position.json5']{@link https://github.com/mifi/editly/blob/master/examples/position.json5} - */ - type Position = - 'top' | - 'top-left' | - 'top-right' | - 'center' | - 'center-left' | - 'center-right' | - 'bottom' | - 'bottom-left' | - 'bottom-right' | - PositionObject; - - /** - * @see [Curve types]{@link https://trac.ffmpeg.org/wiki/AfadeCurves} - */ - type CurveType = - 'tri' | - 'qsin' | - 'hsin' | - 'esin' | - 'log' | - 'ipar' | - 'qua' | - 'cub' | - 'squ' | - 'cbr' | - 'par' | - 'exp' | - 'iqsin' | - 'ihsin' | - 'dese' | - 'desi' | - 'losi' | - 'nofade' | - string; - - /** - * @see [Transition types]{@link https://github.com/mifi/editly#transition-types} - */ - type TransitionType = - 'directional-left' | - 'directional-right' | - 'directional-up' | - 'directional-down' | - 'random' | - 'dummy' | - string; - - /** - * WARNING: Undocumented feature! - */ - type GLTextureLike = { - bind: (unit: number) => number, - shape: [number, number], - }; - - /** - * WARNING: Undocumented feature! - */ - interface TransitionParams { - - /** - * WARNING: Undocumented feature! - */ - [key: string]: number | boolean | GLTextureLike | number[]; - - } - - interface Transition { - - /** - * Transition duration. - * - * @default 0.5 - */ - duration?: number; - - /** - * Transition type. - * - * @default 'random' - * @see [Transition types]{@link https://github.com/mifi/editly#transition-types} - */ - name?: TransitionType; - - /** - * [Fade out curve]{@link https://trac.ffmpeg.org/wiki/AfadeCurves} in audio cross fades. - * - * @default 'tri' - */ - audioOutCurve?: CurveType; - - /** - * [Fade in curve]{@link https://trac.ffmpeg.org/wiki/AfadeCurves} in audio cross fades. - * - * @default 'tri' - */ - audioInCurve?: CurveType; - - /** - * WARNING: Undocumented feature! - */ - easing?: string | null; - - /** - * WARNING: Undocumented feature! - */ - params?: TransitionParams; - - } - - /** - * @see [Arbitrary audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} - */ - interface AudioTrack { - - /** - * File path for this track. - */ - path: string; - - /** - * Relative volume for this track. - * - * @default 1 - */ - mixVolume?: number | string; - - /** - * Time value to cut source file from (in seconds). - * - * @default 0 - */ - cutFrom?: number; - - /** - * Time value to cut source file to (in seconds). - */ - cutTo?: number; - - /** - * How many seconds into video to start this audio track. - * - * @default 0 - */ - start?: number; - - } - - /** - * @see [Ken Burns parameters]{@link https://github.com/mifi/editly#ken-burns-parameters} - */ - interface KenBurns { - - /** - * Zoom direction for Ken Burns effect. - * Use `null` to disable. - */ - zoomDirection?: 'in' | 'out' | 'left' | `right` | null; - - /** - * Zoom amount for Ken Burns effect. - * - * @default 0.1 - */ - zoomAmount?: number; - - } - - type LayerType = - 'video' | - 'audio' | - 'detached-audio' | - 'image' | - 'image-overlay' | - 'title' | - 'subtitle' | - 'title-background' | - 'news-title' | - 'slide-in-text' | - 'fill-color' | - 'pause' | - 'radial-gradient' | - 'linear-gradient' | - 'rainbow-colors' | - 'canvas' | - 'fabric' | - 'gl' | - 'editly-banner'; - - interface BaseLayer { - - /** - * Layer type. - */ - type: LayerType; - - /** - * What time into the clip should this layer start (in seconds). - */ - start?: number; - - /** - * What time into the clip should this layer stop (in seconds). - */ - stop?: number; - - } - - interface TextLayer extends BaseLayer { - /** - * Subtitle text to show. - */ - text: string; - - /** - * Text color. - * Defaults to '#ffffff'. - */ - textColor?: string; - - /** - * Set font (`.ttf`). - * Defaults to system font. - */ - fontPath?: string; - - /** - * WARNING: Undocumented feature! - * The font family to use. Must already be registered using `fontPath`. - * If `fontPath` is also provided, this will be ignored. - */ - fontFamily?: string; - } - - interface VideoPostProcessingFunctionArgs { - canvas: Fabric.StaticCanvas; - image: Fabric.FabricImage; - fabric: typeof Fabric, - progress: number; - time: number; - } - - /** - * For video layers, if parent `clip.duration` is specified, the video will be slowed/sped-up to match `clip.duration`. - * If `cutFrom`/`cutTo` is set, the resulting segment (`cutTo`-`cutFrom`) will be slowed/sped-up to fit `clip.duration`. - * If the layer has audio, it will be kept (and mixed with other audio layers if present). - */ - interface VideoLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'video'; - - /** - * Path to video file. - */ - path: string; - - /** - * How to fit video to screen. - * - * @default 'contain-blur' - * @see [Resize modes]{@link https://github.com/mifi/editly#resize-modes} - */ - resizeMode?: ResizeMode; - - /** - * Time value to cut from (in seconds). - * - * @default 0 - */ - cutFrom?: number; - - /** - * Time value to cut to (in seconds). - * Defaults to *end of video*. - */ - cutTo?: number; - - /** - * Width relative to screen width. - * Must be between 0 and 1. - * - * @default 1 - */ - width?: number; - - /** - * Height relative to screen height. - * Must be between 0 and 1. - * - * @default 1 - */ - height?: number; - - /** - * X-position relative to screen width. - * Must be between 0 and 1. - * - * @default 0 - */ - left?: number; - - /** - * Y-position relative to screen height. - * Must be between 0 and 1. - * - * @default 0 - */ - top?: number; - - /** - * X-anchor. - * - * @default 'left' - */ - originX?: OriginX; - - /** - * Y-anchor. - * - * @default 'top' - */ - originY?: OriginY; - - /** - * Relative volume when mixing this video's audio track with others. - * - * @default 1 - */ - mixVolume?: number | string; - - /** - * Post-processing function after calling rgbaToFabricImage but before adding it to StaticCanvas. - */ - fabricImagePostProcessing?: (data: VideoPostProcessingFunctionArgs) => Promise; - } - - /** - * Audio layers will be mixed together. - * If `cutFrom`/`cutTo` is set, the resulting segment (`cutTo`-`cutFrom`) will be slowed/sped-up to fit `clip.duration`. - * The slow down/speed-up operation is limited to values between `0.5x` and `100x`. - */ - interface AudioLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'audio'; - - /** - * Path to audio file. - */ - path: string; - - /** - * Time value to cut from (in seconds). - * - * @default 0 - */ - cutFrom?: number; - - /** - * Time value to cut to (in seconds). - * Defaults to `clip.duration`. - */ - cutTo?: number; - - /** - * Relative volume when mixing this audio track with others. - * - * @default 1 - */ - mixVolume?: number | string; - - } - - /** - * This is a special case of `audioTracks` that makes it easier to start the audio relative to clips start times, - * without having to calculate global start times. - * - * This layer has the exact same properties as [`audioTracks`]{@link https://github.com/mifi/editly#arbitrary-audio-tracks}, - * except `start` time is relative to the clip's start. - */ - interface DetachedAudioLayer extends BaseLayer, AudioTrack { - - /** - * Layer type. - */ - type: 'detached-audio'; - - } - - /** - * Full screen image. - */ - interface ImageLayer extends BaseLayer, KenBurns { - - /** - * Layer type. - */ - type: 'image'; - - /** - * Path to image file. - */ - path: string; - - /** - * How to fit image to screen. - */ - resizeMode?: ResizeMode; - - /** - * WARNING: Undocumented feature! - */ - duration?: number; - - } - - /** - * Image overlay with a custom position and size on the screen. - */ - interface ImageOverlayLayer extends BaseLayer, KenBurns { - - /** - * Layer type. - */ - type: 'image-overlay'; - - /** - * Path to image file. - */ - path: string; - - /** - * Position. - */ - position?: Position; - - /** - * Width (from 0 to 1) where 1 is screen width. - */ - width?: number; - - /** - * Height (from 0 to 1) where 1 is screen height. - */ - height?: number; - - } - - interface TitleLayer extends TextLayer, KenBurns { - - /** - * Layer type. - */ - type: 'title'; - - /** - * Position. - */ - position?: Position; - - } - - interface SubtitleLayer extends TextLayer { - - /** - * Layer type. - */ - type: 'subtitle'; - - /** - * WARNING: Undocumented feature! - */ - backgroundColor?: string; - - delay: number; - speed: number; - } - - /** - * Title with background. - */ - interface TitleBackgroundLayer extends TextLayer { - - /** - * Layer type. - */ - type: 'title-background'; - - /** - * Background layer. - * Defaults to random background. - */ - background?: BackgroundLayer; - - } - - interface NewsTitleLayer extends TextLayer { - - /** - * Layer type. - */ - type: 'news-title'; - - /** - * Background color. - * Defaults to '#d02a42'. - */ - backgroundColor?: string; - - /** - * Position. - */ - position?: Position; - - delay: number; - speed: number; - } - - interface SlideInTextLayer extends TextLayer { - - /** - * Layer type. - */ - type: 'slide-in-text'; - - /** - * Font size. - */ - fontSize?: number; - - /** - * Char spacing. - */ - charSpacing?: number; - - /** - * Color. - * @deprecated use `fontColor` instead. - */ - color?: string; - - /** - * Position. - */ - position?: Position; - - } - - interface FillColorLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'fill-color'; - - /** - * Color to fill background. - * Defaults to random color. - */ - color?: string; - - } - - interface PauseLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'pause'; - - /** - * Color to fill background. - * Defaults to random color. - */ - color?: string; - - } - - interface RadialGradientLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'radial-gradient'; - - /** - * Array of two colors. - * Defaults to random colors. - */ - colors?: [string, string]; - - } - - interface LinearGradientLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'linear-gradient'; - - /** - * Array of two colors. - * Defaults to random colors. - */ - colors?: [string, string]; - - } - - interface RainbowColorsLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'rainbow-colors'; - - } - - interface CustomFabricFunctionCallbacks { - onRender: (progress: number, canvas: Fabric.StaticCanvas) => OptionalPromise; - onClose?: () => OptionalPromise; - } - - interface CustomCanvasFunctionArgs { - width: number; - height: number; - canvas: Canvas; - } - - interface CustomCanvasFunctionCallbacks { - onRender: (progress: number) => OptionalPromise; - onClose?: () => OptionalPromise; - } - - type CustomCanvasFunction = (args: CustomCanvasFunctionArgs) => OptionalPromise; - - interface CanvasLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'canvas'; - - /** - * Custom JavaScript function. - */ - func: CustomCanvasFunction; - - } - - interface CustomFabricFunctionArgs { - width: number; - height: number; - fabric: typeof Fabric; - params: any; - } - - type CustomFabricFunction = (args: CustomFabricFunctionArgs) => OptionalPromise; - - interface FabricLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'fabric'; - - /** - * Custom JavaScript function. - */ - func: CustomFabricFunction; - - } - - interface GlLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'gl'; - - /** - * Fragment path (`.frag` file) - */ - fragmentPath: string; - - /** - * Vertex path (`.vert` file). - */ - vertexPath?: string; - - /** - * WARNING: Undocumented feature! - */ - speed?: number; - - vertexSrc?: string; - fragmentSrc?: string; - } - - /** - * WARNING: Undocumented feature! - */ - interface EditlyBannerLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'editly-banner'; - - /** - * Set font (`.ttf`). - * Defaults to system font. - */ - fontPath?: string; - } - - /** - * @see [Examples]{@link https://github.com/mifi/editly/tree/master/examples} - * @see [Example 'commonFeatures.json5']{@link https://github.com/mifi/editly/blob/master/examples/commonFeatures.json5} - */ - type Layer = - VideoLayer | - AudioLayer | - DetachedAudioLayer | - ImageLayer | - ImageOverlayLayer | - TitleLayer | - SubtitleLayer | - TitleBackgroundLayer | - NewsTitleLayer | - SlideInTextLayer | - FillColorLayer | - PauseLayer | - RadialGradientLayer | - LinearGradientLayer | - RainbowColorsLayer | - CanvasLayer | - FabricLayer | - GlLayer | - EditlyBannerLayer; - - /** - * Special layers that can be used f.e. in the 'title-background' layer. - */ - type BackgroundLayer = - RadialGradientLayer | - LinearGradientLayer | - FillColorLayer; - - interface Clip { - - /** - * List of layers within the current clip that will be overlaid in their natural order (final layer on top). - */ - layers: Layer[] | Layer; - - /** - * Clip duration. - * If unset, the clip duration will be that of the first video layer. - * Defaults to `defaults.duration`. - */ - duration?: number; - - /** - * Specify transition at the end of this clip. - * Defaults to `defaults.transition`. - * Set to `null` to disable transitions. - */ - transition?: Transition | null; - - } - - interface DefaultLayerOptions { - - /** - * Set default font (`.ttf`). - * Defaults to system font. - */ - fontPath?: string; - - /** - * Set any layer parameter that all layers will inherit. - */ - [key: string]: any; - - } - - type DefaultLayerTypeOptions = { - - /** - * Set any layer parameter that all layers of the same type (specified in key) will inherit. - */ - [P in LayerType]?: Partial, 'type'>>; - - } - - interface DefaultTransitionOptions extends Transition { - } - - interface DefaultOptions { - - /** - * Set default clip duration for clips that don't have an own duration (in seconds). - * - * @default 4 - */ - duration?: number; - - /** - * An object describing the default layer options. - */ - layer?: DefaultLayerOptions; - - /** - * Defaults for each individual layer types. - */ - layerType?: DefaultLayerTypeOptions; - - /** - * An object describing the default transition. - * Set to `null` to disable transitions. - */ - transition?: DefaultTransitionOptions | null; - - } - - /** - * You can enable audio normalization of the final output audio. - * This is useful if you want to achieve Audio Ducking (e.g. automatically lower volume of all other tracks when voice-over speaks). - * - * @see [Dynaudnorm]{@link https://ffmpeg.org/ffmpeg-filters.html#dynaudnorm} - * @see [Example of audio ducking]{@link https://github.com/mifi/editly/blob/master/examples/audio2.json5} - */ - interface AudioNormalizationOptions { - - /** - * Enable audio normalization? - * - * @default false - * @see [Audio normalization]{@link https://github.com/mifi/editly#audio-normalization} - */ - enable?: boolean; - - /** - * Audio normalization gauss size. - * - * @default 5 - * @see [Audio normalization]{@link https://github.com/mifi/editly#audio-normalization} - */ - gaussSize?: number; - - /** - * Audio normalization max gain. - * - * @default 30 - * @see [Audio normalization]{@link https://github.com/mifi/editly#audio-normalization} - */ - maxGain?: number; - - } - - interface Config { - - /** - * Output path (`.mp4` or `.mkv`, can also be a `.gif`). - */ - outPath: string; - - /** - * List of clip objects that will be played in sequence. - * Each clip can have one or more layers. - * - * @default [] - */ - clips: Clip[]; - - /** - * Width which all media will be converted to. - * - * @default 640 - */ - width?: number; - - /** - * Height which all media will be converted to. - * Decides height based on `width` and aspect ratio of the first video by default. - */ - height?: number; - - /** - * FPS which all videos will be converted to. - * Defaults to first video's FPS or `25`. - */ - fps?: number; - - /** - * Specify custom output codec/format arguments for ffmpeg. - * Automatically adds codec options (normally `h264`) by default. - * - * @see [Example]{@link https://github.com/mifi/editly/blob/master/examples/customOutputArgs.json5} - */ - customOutputArgs?: string[]; - - /** - * Allow remote URLs as paths. - * - * @default false - */ - allowRemoteRequests?: boolean; - - /** - * Fast mode (low resolution and FPS, useful for getting a quick preview ⏩). - * - * @default false - */ - fast?: boolean; - - /** - * An object describing default options for clips and layers. - */ - defaults?: DefaultOptions; - - /** - * List of arbitrary audio tracks. - * - * @default [] - * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} - */ - audioTracks?: AudioTrack[]; - - /** - * Set an audio track for the whole video.. - * - * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} - */ - audioFilePath?: string; - - /** - * Background Volume - * - * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} - */ - backgroundAudioVolume?: string | number; - - /** - * Loop the audio track if it is shorter than video? - * - * @default false - */ - loopAudio?: boolean; - - /** - * Keep source audio from `clips`? - * - * @default false - */ - keepSourceAudio?: boolean; - - /** - * Volume of audio from `clips` relative to `audioTracks`. - * - * @default 1 - * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} - */ - clipsAudioVolume?: number | string; - - /** - * Adjust output [volume]{@link http://ffmpeg.org/ffmpeg-filters.html#volume} (final stage). - * - * @default 1 - * @see [Example]{@link https://github.com/mifi/editly/blob/master/examples/audio-volume.json5} - * @example - * 0.5 - * @example - * '10db' - */ - outputVolume?: number | string; - - /** - * Audio normalization. - */ - audioNorm?: AudioNormalizationOptions; - - /** - * WARNING: Undocumented feature! - */ - ffmpegPath?: string; - - /** - * WARNING: Undocumented feature! - */ - ffprobePath?: string; - - /** - * WARNING: Undocumented feature! - */ - enableFfmpegLog?: boolean; - - /** - * WARNING: Undocumented feature! - */ - verbose?: boolean; - - /** - * WARNING: Undocumented feature! - */ - logTimes?: boolean; - - /** - * WARNING: Undocumented feature! - */ - keepTmp?: boolean; - - } - - interface RenderSingleFrameConfig extends Config { - - /** - * Output path (`.mp4` or `.mkv`, can also be a `.gif`). - */ - outPath: string; - - /** - * Timestamp to render. - */ - time?: number; - - } - - /** - * WARNING: Undocumented feature! - * Pure function to get a frame at a certain time. - * - * @param config - Config. - */ - function renderSingleFrame(config: RenderSingleFrameConfig): Promise; -} - -export = Editly; diff --git a/parseConfig.ts b/parseConfig.ts index b74c7fc8..4d3c8d03 100644 --- a/parseConfig.ts +++ b/parseConfig.ts @@ -12,8 +12,7 @@ import { } from './util.js'; import { registerFont } from 'canvas'; import { calcTransition } from './transitions.js'; -import type { AudioTrack, CanvasLayer, Config, EditlyBannerLayer, FabricLayer, GlLayer, ImageLayer, ImageOverlayLayer, Layer, LinearGradientLayer, NewsTitleLayer, SlideInTextLayer, SubtitleLayer, TitleBackgroundLayer, TitleLayer } from './index.js'; -import type { ProcessedVideoLayer, LayerDuration } from "./types.js" +import type { AudioTrack, CanvasLayer, Config, EditlyBannerLayer, FabricLayer, GlLayer, ImageLayer, ImageOverlayLayer, Layer, LinearGradientLayer, NewsTitleLayer, SlideInTextLayer, SubtitleLayer, TitleBackgroundLayer, TitleLayer, ProcessedVideoLayer, LayerDuration } from './types.js'; const dirname = fileURLToPath(new URL('.', import.meta.url)); diff --git a/sources/fabric.ts b/sources/fabric.ts index 0c47b9bd..6f6e11ba 100644 --- a/sources/fabric.ts +++ b/sources/fabric.ts @@ -1,8 +1,7 @@ import * as fabric from 'fabric/node'; import { type CanvasRenderingContext2D, createCanvas, ImageData } from 'canvas'; import { boxBlurImage } from '../BoxBlur.js'; -import type { CreateFrameSourceOptions, FrameSource } from '../types.js'; -import type { CanvasLayer, CustomFabricFunctionCallbacks, Layer } from '../index.js'; +import type { CreateFrameSourceOptions, FrameSource, CanvasLayer, CustomFabricFunctionCallbacks, Layer } from '../types.js'; export type FabricFrameSourceOptions = CreateFrameSourceOptions & { fabric: typeof fabric }; export type FabricFrameSourceCallback = (options: FabricFrameSourceOptions) => Promise; diff --git a/sources/fabric/fabricFrameSources.ts b/sources/fabric/fabricFrameSources.ts index 66e7719b..0972b3c9 100644 --- a/sources/fabric/fabricFrameSources.ts +++ b/sources/fabric/fabricFrameSources.ts @@ -5,7 +5,7 @@ import { getRandomGradient, getRandomColors } from '../../colors.js'; import { easeOutExpo, easeInOutCubic } from '../../transitions.js'; import { getPositionProps, getFrameByKeyFrames, isUrl } from '../../util.js'; import { blurImage, type FabricFrameSourceOptions } from '../fabric.js'; -import type { FabricLayer, FillColorLayer, ImageLayer, ImageOverlayLayer, KenBurns, LinearGradientLayer, NewsTitleLayer, RadialGradientLayer, SlideInTextLayer, SubtitleLayer, TitleLayer } from '../../index.js'; +import type { FabricLayer, FillColorLayer, ImageLayer, ImageOverlayLayer, KenBurns, LinearGradientLayer, NewsTitleLayer, RadialGradientLayer, SlideInTextLayer, SubtitleLayer, TitleLayer } from '../../types.js'; // http://fabricjs.com/kitchensink diff --git a/sources/frameSource.ts b/sources/frameSource.ts index ae01fa91..47fd36dd 100644 --- a/sources/frameSource.ts +++ b/sources/frameSource.ts @@ -23,8 +23,7 @@ import { } from './fabric/fabricFrameSources.js'; import createVideoFrameSource from './videoFrameSource.js'; import createGlFrameSource from './glFrameSource.js'; -import type { CreateFrameSource, CreateFrameSourceOptions, LayerDuration } from '../types.js'; -import type { Clip, Layer } from '../index.js'; +import type { CreateFrameSource, CreateFrameSourceOptions, LayerDuration, Clip, Layer } from '../types.js'; const fabricFrameSources: Record> = { fabric: customFabricFrameSource, diff --git a/sources/glFrameSource.ts b/sources/glFrameSource.ts index dd5f0543..07fa4a03 100644 --- a/sources/glFrameSource.ts +++ b/sources/glFrameSource.ts @@ -1,8 +1,7 @@ import GL from 'gl'; import createShader from 'gl-shader'; import { readFile } from 'node:fs/promises'; -import type { CreateFrameSourceOptions, FrameSource } from '../types.js'; -import type { GlLayer } from '../index.js'; +import type { GlLayer, CreateFrameSourceOptions, FrameSource } from '../types.js'; // I have no idea what I'm doing but it works ¯\_(ツ)_/¯ diff --git a/transitions.ts b/transitions.ts index 3f8b005c..d71c59b2 100644 --- a/transitions.ts +++ b/transitions.ts @@ -1,5 +1,5 @@ import assert from 'assert'; -import type { Transition } from './index.js'; +import type { Transition } from './types.js'; const randomTransitionsSet = ['fade', 'fadegrayscale', 'directionalwarp', 'crosswarp', 'dreamyzoom', 'burn', 'crosszoom', 'simplezoom', 'linearblur', 'directional-left', 'directional-right', 'directional-up', 'directional-down']; diff --git a/types.ts b/types.ts index 62afdd73..6291aba1 100644 --- a/types.ts +++ b/types.ts @@ -1,8 +1,1098 @@ -// Types used internally and not exposed through any external interfaces. // TODO[ts]: Move these elsewhere -import type { Layer, OptionalPromise, VideoLayer } from "./index.js"; -import { StaticCanvas } from 'fabric/node'; +import type * as Fabric from 'fabric/node'; +import type { Canvas } from "canvas" + +/** Little utility */ +export type OptionalPromise = Promise | T; + +export type OriginX = Fabric.TOriginX; + +export type OriginY = Fabric.TOriginY; + +/** + * How to fit image to screen. Can be one of: + * - `'contain'` - All the video will be contained within the frame and letterboxed. + * - `'contain-blur'` - Like contain, but with a blurred copy as the letterbox. + * - `'cover'` - Video be cropped to cover the whole screen (aspect ratio preserved). + * - `'stretch'` - Video will be stretched to cover the whole screen (aspect ratio ignored). + * + * @default 'contain-blur' + * @see [Example 'image.json5']{@link https://github.com/mifi/editly/blob/master/examples/image.json5} + * @see [Example 'videos.json5']{@link https://github.com/mifi/editly/blob/master/examples/videos.json5} + */ +export type ResizeMode = + 'contain' | + 'contain-blur' | + 'cover' | + 'stretch'; + +/** + * An object, where `{ x: 0, y: 0 }` is the upper left corner of the screen and `{ x: 1, y: 1 }` is the lower right corner. + */ +export interface PositionObject { + + /** + * X-position relative to video width. + */ + x: number; + + /** + * Y-position relative to video height. + */ + y: number; + + /** + * X-anchor position of the object. + */ + originX?: OriginX; + + /** + * Y-anchor position of the object. + */ + originY?: OriginY; + +} + +/** + * Certain layers support the position parameter. + * + * @see [Position parameter]{@link https://github.com/mifi/editly#position-parameter} + * @see [Example 'position.json5']{@link https://github.com/mifi/editly/blob/master/examples/position.json5} + */ +export type Position = + 'top' | + 'top-left' | + 'top-right' | + 'center' | + 'center-left' | + 'center-right' | + 'bottom' | + 'bottom-left' | + 'bottom-right' | + PositionObject; + +/** + * @see [Curve types]{@link https://trac.ffmpeg.org/wiki/AfadeCurves} + */ +export type CurveType = + 'tri' | + 'qsin' | + 'hsin' | + 'esin' | + 'log' | + 'ipar' | + 'qua' | + 'cub' | + 'squ' | + 'cbr' | + 'par' | + 'exp' | + 'iqsin' | + 'ihsin' | + 'dese' | + 'desi' | + 'losi' | + 'nofade' | + string; + +/** + * @see [Transition types]{@link https://github.com/mifi/editly#transition-types} + */ +export type TransitionType = + 'directional-left' | + 'directional-right' | + 'directional-up' | + 'directional-down' | + 'random' | + 'dummy' | + string; + +/** + * WARNING: Undocumented feature! + */ +export type GLTextureLike = { + bind: (unit: number) => number, + shape: [number, number], +}; + +/** + * WARNING: Undocumented feature! + */ +export interface TransitionParams { + + /** + * WARNING: Undocumented feature! + */ + [key: string]: number | boolean | GLTextureLike | number[]; + +} + +export interface Transition { + /** + * Transition duration. + * + * @default 0.5 + */ + duration?: number; + + /** + * Transition type. + * + * @default 'random' + * @see [Transition types]{@link https://github.com/mifi/editly#transition-types} + */ + name?: TransitionType; + + /** + * [Fade out curve]{@link https://trac.ffmpeg.org/wiki/AfadeCurves} in audio cross fades. + * + * @default 'tri' + */ + audioOutCurve?: CurveType; + + /** + * [Fade in curve]{@link https://trac.ffmpeg.org/wiki/AfadeCurves} in audio cross fades. + * + * @default 'tri' + */ + audioInCurve?: CurveType; + + /** + * WARNING: Undocumented feature! + */ + easing?: string | null; + + /** + * WARNING: Undocumented feature! + */ + params?: TransitionParams; + +} + +/** + * @see [Arbitrary audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} + */ +export interface AudioTrack { + + /** + * File path for this track. + */ + path: string; + + /** + * Relative volume for this track. + * + * @default 1 + */ + mixVolume?: number | string; + + /** + * Time value to cut source file from (in seconds). + * + * @default 0 + */ + cutFrom?: number; + + /** + * Time value to cut source file to (in seconds). + */ + cutTo?: number; + + /** + * How many seconds into video to start this audio track. + * + * @default 0 + */ + start?: number; + +} + +/** + * @see [Ken Burns parameters]{@link https://github.com/mifi/editly#ken-burns-parameters} + */ +export interface KenBurns { + + /** + * Zoom direction for Ken Burns effect. + * Use `null` to disable. + */ + zoomDirection?: 'in' | 'out' | 'left' | `right` | null; + + /** + * Zoom amount for Ken Burns effect. + * + * @default 0.1 + */ + zoomAmount?: number; + +} + +export type LayerType = + 'video' | + 'audio' | + 'detached-audio' | + 'image' | + 'image-overlay' | + 'title' | + 'subtitle' | + 'title-background' | + 'news-title' | + 'slide-in-text' | + 'fill-color' | + 'pause' | + 'radial-gradient' | + 'linear-gradient' | + 'rainbow-colors' | + 'canvas' | + 'fabric' | + 'gl' | + 'editly-banner'; + +export interface BaseLayer { + + /** + * Layer type. + */ + type: LayerType; + + /** + * What time into the clip should this layer start (in seconds). + */ + start?: number; + + /** + * What time into the clip should this layer stop (in seconds). + */ + stop?: number; + +} + +export interface TextLayer extends BaseLayer { + /** + * Subtitle text to show. + */ + text: string; + + /** + * Text color. + * Defaults to '#ffffff'. + */ + textColor?: string; + + /** + * Set font (`.ttf`). + * Defaults to system font. + */ + fontPath?: string; + + /** + * WARNING: Undocumented feature! + * The font family to use. Must already be registered using `fontPath`. + * If `fontPath` is also provided, this will be ignored. + */ + fontFamily?: string; +} + +export interface VideoPostProcessingFunctionArgs { + canvas: Fabric.StaticCanvas; + image: Fabric.FabricImage; + fabric: typeof Fabric, + progress: number; + time: number; +} + +/** + * For video layers, if parent `clip.duration` is specified, the video will be slowed/sped-up to match `clip.duration`. + * If `cutFrom`/`cutTo` is set, the resulting segment (`cutTo`-`cutFrom`) will be slowed/sped-up to fit `clip.duration`. + * If the layer has audio, it will be kept (and mixed with other audio layers if present). + */ +export interface VideoLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'video'; + + /** + * Path to video file. + */ + path: string; + + /** + * How to fit video to screen. + * + * @default 'contain-blur' + * @see [Resize modes]{@link https://github.com/mifi/editly#resize-modes} + */ + resizeMode?: ResizeMode; + + /** + * Time value to cut from (in seconds). + * + * @default 0 + */ + cutFrom?: number; + + /** + * Time value to cut to (in seconds). + * Defaults to *end of video*. + */ + cutTo?: number; + + /** + * Width relative to screen width. + * Must be between 0 and 1. + * + * @default 1 + */ + width?: number; + + /** + * Height relative to screen height. + * Must be between 0 and 1. + * + * @default 1 + */ + height?: number; + + /** + * X-position relative to screen width. + * Must be between 0 and 1. + * + * @default 0 + */ + left?: number; + + /** + * Y-position relative to screen height. + * Must be between 0 and 1. + * + * @default 0 + */ + top?: number; + + /** + * X-anchor. + * + * @default 'left' + */ + originX?: OriginX; + + /** + * Y-anchor. + * + * @default 'top' + */ + originY?: OriginY; + + /** + * Relative volume when mixing this video's audio track with others. + * + * @default 1 + */ + mixVolume?: number | string; + + /** + * Post-processing function after calling rgbaToFabricImage but before adding it to StaticCanvas. + */ + fabricImagePostProcessing?: (data: VideoPostProcessingFunctionArgs) => Promise; +} + +/** + * Audio layers will be mixed together. + * If `cutFrom`/`cutTo` is set, the resulting segment (`cutTo`-`cutFrom`) will be slowed/sped-up to fit `clip.duration`. + * The slow down/speed-up operation is limited to values between `0.5x` and `100x`. + */ +export interface AudioLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'audio'; + + /** + * Path to audio file. + */ + path: string; + + /** + * Time value to cut from (in seconds). + * + * @default 0 + */ + cutFrom?: number; + + /** + * Time value to cut to (in seconds). + * Defaults to `clip.duration`. + */ + cutTo?: number; + + /** + * Relative volume when mixing this audio track with others. + * + * @default 1 + */ + mixVolume?: number | string; + +} + +/** + * This is a special case of `audioTracks` that makes it easier to start the audio relative to clips start times, + * without having to calculate global start times. + * + * This layer has the exact same properties as [`audioTracks`]{@link https://github.com/mifi/editly#arbitrary-audio-tracks}, + * except `start` time is relative to the clip's start. + */ +export interface DetachedAudioLayer extends BaseLayer, AudioTrack { + + /** + * Layer type. + */ + type: 'detached-audio'; + +} + +/** + * Full screen image. + */ +export interface ImageLayer extends BaseLayer, KenBurns { + + /** + * Layer type. + */ + type: 'image'; + + /** + * Path to image file. + */ + path: string; + + /** + * How to fit image to screen. + */ + resizeMode?: ResizeMode; + + /** + * WARNING: Undocumented feature! + */ + duration?: number; + +} + +/** + * Image overlay with a custom position and size on the screen. + */ +export interface ImageOverlayLayer extends BaseLayer, KenBurns { + + /** + * Layer type. + */ + type: 'image-overlay'; + + /** + * Path to image file. + */ + path: string; + + /** + * Position. + */ + position?: Position; + + /** + * Width (from 0 to 1) where 1 is screen width. + */ + width?: number; + + /** + * Height (from 0 to 1) where 1 is screen height. + */ + height?: number; + +} + +export interface TitleLayer extends TextLayer, KenBurns { + + /** + * Layer type. + */ + type: 'title'; + + /** + * Position. + */ + position?: Position; + +} + +export interface SubtitleLayer extends TextLayer { + + /** + * Layer type. + */ + type: 'subtitle'; + + /** + * WARNING: Undocumented feature! + */ + backgroundColor?: string; + + delay: number; + speed: number; +} + +/** + * Title with background. + */ +export interface TitleBackgroundLayer extends TextLayer { + + /** + * Layer type. + */ + type: 'title-background'; + + /** + * Background layer. + * Defaults to random background. + */ + background?: BackgroundLayer; + +} + +export interface NewsTitleLayer extends TextLayer { + + /** + * Layer type. + */ + type: 'news-title'; + + /** + * Background color. + * Defaults to '#d02a42'. + */ + backgroundColor?: string; + + /** + * Position. + */ + position?: Position; + + delay: number; + speed: number; +} + +export interface SlideInTextLayer extends TextLayer { + + /** + * Layer type. + */ + type: 'slide-in-text'; + + /** + * Font size. + */ + fontSize?: number; + + /** + * Char spacing. + */ + charSpacing?: number; + + /** + * Color. + * @deprecated use `fontColor` instead. + */ + color?: string; + + /** + * Position. + */ + position?: Position; + +} + +export interface FillColorLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'fill-color'; + + /** + * Color to fill background. + * Defaults to random color. + */ + color?: string; + +} + +export interface PauseLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'pause'; + + /** + * Color to fill background. + * Defaults to random color. + */ + color?: string; + +} + +export interface RadialGradientLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'radial-gradient'; + + /** + * Array of two colors. + * Defaults to random colors. + */ + colors?: [string, string]; + +} + +export interface LinearGradientLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'linear-gradient'; + + /** + * Array of two colors. + * Defaults to random colors. + */ + colors?: [string, string]; + +} + +export interface RainbowColorsLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'rainbow-colors'; + +} + +export interface CustomFabricFunctionCallbacks { + onRender: (progress: number, canvas: Fabric.StaticCanvas) => OptionalPromise; + onClose?: () => OptionalPromise; +} + +export interface CustomCanvasFunctionArgs { + width: number; + height: number; + canvas: Canvas; +} + +export interface CustomCanvasFunctionCallbacks { + onRender: (progress: number) => OptionalPromise; + onClose?: () => OptionalPromise; +} + +export type CustomCanvasFunction = (args: CustomCanvasFunctionArgs) => OptionalPromise; + +export interface CanvasLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'canvas'; + + /** + * Custom JavaScript function. + */ + func: CustomCanvasFunction; + +} + +export interface CustomFabricFunctionArgs { + width: number; + height: number; + fabric: typeof Fabric; + params: any; +} + +export type CustomFabricFunction = (args: CustomFabricFunctionArgs) => OptionalPromise; + +export interface FabricLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'fabric'; + + /** + * Custom JavaScript function. + */ + func: CustomFabricFunction; + +} + +export interface GlLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'gl'; + + /** + * Fragment path (`.frag` file) + */ + fragmentPath: string; + + /** + * Vertex path (`.vert` file). + */ + vertexPath?: string; + + /** + * WARNING: Undocumented feature! + */ + speed?: number; + + vertexSrc?: string; + fragmentSrc?: string; +} + +/** + * WARNING: Undocumented feature! + */ +export interface EditlyBannerLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'editly-banner'; + + /** + * Set font (`.ttf`). + * Defaults to system font. + */ + fontPath?: string; +} + +/** + * @see [Examples]{@link https://github.com/mifi/editly/tree/master/examples} + * @see [Example 'commonFeatures.json5']{@link https://github.com/mifi/editly/blob/master/examples/commonFeatures.json5} + */ +export type Layer = + VideoLayer | + AudioLayer | + DetachedAudioLayer | + ImageLayer | + ImageOverlayLayer | + TitleLayer | + SubtitleLayer | + TitleBackgroundLayer | + NewsTitleLayer | + SlideInTextLayer | + FillColorLayer | + PauseLayer | + RadialGradientLayer | + LinearGradientLayer | + RainbowColorsLayer | + CanvasLayer | + FabricLayer | + GlLayer | + EditlyBannerLayer; + +/** + * Special layers that can be used f.e. in the 'title-background' layer. + */ +export type BackgroundLayer = + RadialGradientLayer | + LinearGradientLayer | + FillColorLayer; + +export interface Clip { + + /** + * List of layers within the current clip that will be overlaid in their natural order (final layer on top). + */ + layers: Layer[] | Layer; + + /** + * Clip duration. + * If unset, the clip duration will be that of the first video layer. + * Defaults to `defaults.duration`. + */ + duration?: number; + + /** + * Specify transition at the end of this clip. + * Defaults to `defaults.transition`. + * Set to `null` to disable transitions. + */ + transition?: Transition | null; + +} + +export interface DefaultLayerOptions { + + /** + * Set default font (`.ttf`). + * Defaults to system font. + */ + fontPath?: string; + + /** + * Set any layer parameter that all layers will inherit. + */ + [key: string]: any; + +} + +export type DefaultLayerTypeOptions = { + + /** + * Set any layer parameter that all layers of the same type (specified in key) will inherit. + */ + [P in LayerType]?: Partial, 'type'>>; + +} + +export interface DefaultTransitionOptions extends Transition { +} + +export interface DefaultOptions { + + /** + * Set default clip duration for clips that don't have an own duration (in seconds). + * + * @default 4 + */ + duration?: number; + + /** + * An object describing the default layer options. + */ + layer?: DefaultLayerOptions; + + /** + * Defaults for each individual layer types. + */ + layerType?: DefaultLayerTypeOptions; + + /** + * An object describing the default transition. + * Set to `null` to disable transitions. + */ + transition?: DefaultTransitionOptions | null; + +} + +/** + * You can enable audio normalization of the final output audio. + * This is useful if you want to achieve Audio Ducking (e.g. automatically lower volume of all other tracks when voice-over speaks). + * + * @see [Dynaudnorm]{@link https://ffmpeg.org/ffmpeg-filters.html#dynaudnorm} + * @see [Example of audio ducking]{@link https://github.com/mifi/editly/blob/master/examples/audio2.json5} + */ +export interface AudioNormalizationOptions { + + /** + * Enable audio normalization? + * + * @default false + * @see [Audio normalization]{@link https://github.com/mifi/editly#audio-normalization} + */ + enable?: boolean; + + /** + * Audio normalization gauss size. + * + * @default 5 + * @see [Audio normalization]{@link https://github.com/mifi/editly#audio-normalization} + */ + gaussSize?: number; + + /** + * Audio normalization max gain. + * + * @default 30 + * @see [Audio normalization]{@link https://github.com/mifi/editly#audio-normalization} + */ + maxGain?: number; + +} + +export interface Config { + /** + * Output path (`.mp4` or `.mkv`, can also be a `.gif`). + */ + outPath: string; + + /** + * List of clip objects that will be played in sequence. + * Each clip can have one or more layers. + * + * @default [] + */ + clips: Clip[]; + + /** + * Width which all media will be converted to. + * + * @default 640 + */ + width?: number; + + /** + * Height which all media will be converted to. + * Decides height based on `width` and aspect ratio of the first video by default. + */ + height?: number; + + /** + * FPS which all videos will be converted to. + * Defaults to first video's FPS or `25`. + */ + fps?: number; + + /** + * Specify custom output codec/format arguments for ffmpeg. + * Automatically adds codec options (normally `h264`) by default. + * + * @see [Example]{@link https://github.com/mifi/editly/blob/master/examples/customOutputArgs.json5} + */ + customOutputArgs?: string[]; + + /** + * Allow remote URLs as paths. + * + * @default false + */ + allowRemoteRequests?: boolean; + + /** + * Fast mode (low resolution and FPS, useful for getting a quick preview ⏩). + * + * @default false + */ + fast?: boolean; + + /** + * An object describing default options for clips and layers. + */ + defaults?: DefaultOptions; + + /** + * List of arbitrary audio tracks. + * + * @default [] + * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} + */ + audioTracks?: AudioTrack[]; + + /** + * Set an audio track for the whole video.. + * + * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} + */ + audioFilePath?: string; + + /** + * Background Volume + * + * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} + */ + backgroundAudioVolume?: string | number; + + /** + * Loop the audio track if it is shorter than video? + * + * @default false + */ + loopAudio?: boolean; + + /** + * Keep source audio from `clips`? + * + * @default false + */ + keepSourceAudio?: boolean; + + /** + * Volume of audio from `clips` relative to `audioTracks`. + * + * @default 1 + * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} + */ + clipsAudioVolume?: number | string; + + /** + * Adjust output [volume]{@link http://ffmpeg.org/ffmpeg-filters.html#volume} (final stage). + * + * @default 1 + * @see [Example]{@link https://github.com/mifi/editly/blob/master/examples/audio-volume.json5} + * @example + * 0.5 + * @example + * '10db' + */ + outputVolume?: number | string; + + /** + * Audio normalization. + */ + audioNorm?: AudioNormalizationOptions; + + /** + * WARNING: Undocumented feature! + */ + ffmpegPath?: string; + + /** + * WARNING: Undocumented feature! + */ + ffprobePath?: string; + + /** + * WARNING: Undocumented feature! + */ + enableFfmpegLog?: boolean; + + /** + * WARNING: Undocumented feature! + */ + verbose?: boolean; + + /** + * WARNING: Undocumented feature! + */ + logTimes?: boolean; + + /** + * WARNING: Undocumented feature! + */ + keepTmp?: boolean; + +} + +export interface RenderSingleFrameConfig extends Config { + + /** + * Output path (`.mp4` or `.mkv`, can also be a `.gif`). + */ + outPath: string; + + /** + * Timestamp to render. + */ + time?: number; + +} + +// Internal types export type Stream = { codec_type: string; @@ -24,7 +1114,7 @@ export type Keyframe = { }; export interface FrameSource { - readNextFrame(progress: number, canvas: StaticCanvas, offsetTime: number): OptionalPromise; + readNextFrame(progress: number, canvas: Fabric.StaticCanvas, offsetTime: number): OptionalPromise; close?(): OptionalPromise; } diff --git a/util.ts b/util.ts index fa7be3c4..a7ffaba9 100644 --- a/util.ts +++ b/util.ts @@ -4,7 +4,7 @@ import { sortBy } from 'lodash-es'; import { pathExists } from 'fs-extra'; import type { Keyframe, Stream } from './types.js'; -import type { Position, PositionObject, Transition } from './index.js'; +import type { Position, PositionObject, Transition } from './types.js'; import type { TOriginX, TOriginY } from 'fabric'; export function parseFps(fps?: string) { From 196dfaacd8e1da0f000a267e6b7eb0f647a6e741 Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Wed, 15 Jan 2025 23:41:24 -0500 Subject: [PATCH 16/23] Convert index.{js,ts} --- audio.ts | 10 ++-- glTransitions.ts | 6 +-- index.js => index.ts | 92 +++++++++++++++++++++---------------- parseConfig.ts | 34 +++++++++----- sources/frameSource.ts | 14 ++---- sources/videoFrameSource.ts | 12 ++--- transitions.ts | 21 +++++++-- types.ts | 59 +++++++++--------------- 8 files changed, 133 insertions(+), 115 deletions(-) rename index.js => index.ts (87%) diff --git a/audio.ts b/audio.ts index c609d0f8..1d03191a 100644 --- a/audio.ts +++ b/audio.ts @@ -16,14 +16,14 @@ export type AudioOptions = { tmpDir: string; } -export type EditAudioOptions = Required> & { +export type EditAudioOptions = Pick & { arbitraryAudio: AudioTrack[] }; type LayerWithAudio = (AudioLayer | VideoLayer) & { speedFactor: number }; export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: AudioOptions) => { - async function createMixedAudioClips({ clips, keepSourceAudio }: { clips: Clip[], keepSourceAudio: boolean }) { + async function createMixedAudioClips({ clips, keepSourceAudio }: { clips: Clip[], keepSourceAudio?: boolean }) { return pMap(clips, async (clip, i) => { const { duration, layers, transition } = clip; @@ -36,7 +36,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: A '-f', 'lavfi', '-i', 'anullsrc=channel_layout=stereo:sample_rate=44100', '-sample_fmt', 's32', '-ar', '48000', - '-t', duration!.toString(), + '-t', duration.toString(), '-c:a', 'flac', '-y', clipAudioPath, @@ -50,7 +50,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: A if (!keepSourceAudio) return createSilence(); // TODO:[ts]: Layers is always an array once config is parsed. Fix this in types - const audioLayers = (layers as Layer[]).filter(({ type, start, stop }) => ( + const audioLayers = layers.filter(({ type, start, stop }) => ( ['audio', 'video'].includes(type) // TODO: We don't support audio for start/stop layers && !start && stop == null)) as LayerWithAudio[]; @@ -174,7 +174,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: A } // FIXME[ts]: parseConfig sets `loop` on arbitrary audio tracks. Should that be part of the `AudioTrack` interface? - async function mixArbitraryAudio({ streams, audioNorm, outputVolume }: { streams: (AudioTrack & { loop?: number })[], audioNorm: AudioNormalizationOptions, outputVolume: number | string }) { + async function mixArbitraryAudio({ streams, audioNorm, outputVolume }: { streams: (AudioTrack & { loop?: number })[], audioNorm?: AudioNormalizationOptions, outputVolume?: number | string }) { let maxGain = 30; let gaussSize = 5; if (audioNorm) { diff --git a/glTransitions.ts b/glTransitions.ts index fee7f739..5cf23035 100644 --- a/glTransitions.ts +++ b/glTransitions.ts @@ -7,11 +7,11 @@ import createTexture from 'gl-texture2d'; const { default: createTransition } = glTransition; -type RunTransitionOptions = { +export type RunTransitionOptions = { fromFrame: Buffer; toFrame: Buffer; progress: number; - transitionName: string; + transitionName?: string; transitionParams?: any; } @@ -40,7 +40,7 @@ export default ({ width, height, channels }: { width: number, height: number, ch try { const resizeMode = 'stretch'; - const transitionSource = glTransitions.find((t) => t.name.toLowerCase() === transitionName.toLowerCase()); + const transitionSource = glTransitions.find((t) => t.name.toLowerCase() === transitionName?.toLowerCase()); transition = createTransition(gl, transitionSource, { resizeMode }); diff --git a/index.js b/index.ts similarity index 87% rename from index.js rename to index.ts index 9a6df3e4..4c55bf11 100644 --- a/index.js +++ b/index.ts @@ -1,4 +1,4 @@ -import { execa } from 'execa'; +import { execa, ExecaChildProcess } from 'execa'; import assert from 'assert'; import { join, dirname } from 'path'; import JSON5 from 'json5'; @@ -10,19 +10,27 @@ import { parseFps, multipleOf2, assertFileValid, checkTransition } from './util. import { createFabricCanvas, rgbaToFabricImage } from './sources/fabric.js'; import { createFrameSource } from './sources/frameSource.js'; import parseConfig from './parseConfig.js'; -import GlTransitions from './glTransitions.js'; +import GlTransitions, { type RunTransitionOptions } from './glTransitions.js'; import Audio from './audio.js'; +import type { Clip, Config, Layer, RenderSingleFrameConfig } from './types.js'; const channels = 4; -async function Editly(config = {}) { +export type * from './types.js'; + +/** + * Edit and render videos. + * + * @param config - Config. + */ +async function Editly(config: Config): Promise { const { // Testing options: enableFfmpegLog = false, verbose = false, logTimes = false, keepTmp = false, - fast, + fast = false, outPath, clips: clipsIn, @@ -87,15 +95,15 @@ async function Editly(config = {}) { return false; })); - let width; - let height; + let width: number; + let height: number; let desiredWidth; if (requestedWidth) desiredWidth = requestedWidth; else if (isGif) desiredWidth = 320; - const roundDimension = (val) => (isGif ? Math.round(val) : multipleOf2(val)); + const roundDimension = (val: number) => (isGif ? Math.round(val) : multipleOf2(val)); if (firstVideoWidth && firstVideoHeight) { if (desiredWidth) { @@ -138,8 +146,8 @@ async function Editly(config = {}) { height = Math.max(2, height); } - let fps; - let framerateStr; + let fps: number; + let framerateStr: string; if (fast) { fps = 15; @@ -151,7 +159,7 @@ async function Editly(config = {}) { fps = 10; framerateStr = String(fps); } else if (firstVideoFramerateStr) { - fps = parseFps(firstVideoFramerateStr); + fps = parseFps(firstVideoFramerateStr) ?? 25; framerateStr = firstVideoFramerateStr; } else { fps = 25; @@ -170,7 +178,7 @@ async function Editly(config = {}) { const { runTransitionOnFrame: runGlTransitionOnFrame } = GlTransitions({ width, height, channels }); - function runTransitionOnFrame({ fromFrame, toFrame, progress, transitionName, transitionParams }) { + function runTransitionOnFrame({ fromFrame, toFrame, progress, transitionName, transitionParams }: RunTransitionOptions) { // A dummy transition can be used to have an audio transition without a video transition // (Note: You will lose a portion from both clips due to overlap) if (transitionName === 'dummy') return progress > 0.5 ? toFrame : fromFrame; @@ -186,7 +194,7 @@ async function Editly(config = {}) { // https://superuser.com/questions/556029/how-do-i-convert-a-video-to-gif-using-ffmpeg-with-reasonable-quality const videoOutputArgs = isGif ? [ '-vf', `format=rgb24,fps=${fps},scale=${width}:${height}:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse`, - '-loop', 0, + '-loop', '0', ] : [ '-vf', 'format=yuv420p', '-vcodec', 'libx264', @@ -226,7 +234,7 @@ async function Editly(config = {}) { return execa(ffmpegPath, args, { encoding: null, buffer: false, stdin: 'pipe', stdout: process.stdout, stderr: process.stderr }); } - let outProcess; + let outProcess: ExecaChildProcess> | undefined = undefined; let outProcessExitCode; let frameSource1; @@ -244,7 +252,7 @@ async function Editly(config = {}) { const getTransitionFromClip = () => clips[transitionFromClipId]; const getTransitionToClip = () => clips[getTransitionToClipId()]; - const getSource = async (clip, clipIndex) => createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, framerateStr }); + const getSource = async (clip: Clip, clipIndex: number) => createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, framerateStr }); const getTransitionFromSource = async () => getSource(getTransitionFromClip(), transitionFromClipId); const getTransitionToSource = async () => (getTransitionToClip() && getSource(getTransitionToClip(), getTransitionToClipId())); @@ -279,9 +287,9 @@ async function Editly(config = {}) { const fromClipTime = transitionFromClip.duration * fromClipProgress; const toClipTime = transitionToClip && transitionToClip.duration * toClipProgress; - const currentTransition = transitionFromClip.transition; + const currentTransition = transitionFromClip.transition!; - const transitionNumFrames = Math.round(currentTransition.duration * fps); + const transitionNumFrames = Math.round(currentTransition.duration! * fps); // Each clip has two transitions, make sure we leave enough room: const transitionNumFramesSafe = Math.floor(Math.min(Math.min(fromClipNumFrames, toClipNumFrames != null ? toClipNumFrames : Number.MAX_SAFE_INTEGER) / 2, transitionNumFrames)); @@ -341,7 +349,7 @@ async function Editly(config = {}) { const easedProgress = currentTransition.easingFunction(progress); if (logTimes) console.time('runTransitionOnFrame'); - outFrameData = runTransitionOnFrame({ fromFrame: frameSource1Data, toFrame: frameSource2Data, progress: easedProgress, transitionName: currentTransition.name, transitionParams: currentTransition.params }); + outFrameData = runTransitionOnFrame({ fromFrame: frameSource1Data!, toFrame: frameSource2Data, progress: easedProgress, transitionName: currentTransition.name, transitionParams: currentTransition.params }); if (logTimes) console.timeEnd('runTransitionOnFrame'); } else { console.warn('Got no frame data from transitionToClip!'); @@ -364,7 +372,7 @@ async function Editly(config = {}) { if (logTimes) console.time('outProcess.write'); // If we don't wait, then we get EINVAL when dealing with high resolution files (big writes) - if (!nullOutput) await new Promise((r) => outProcess.stdin.write(outFrameData, r)); + if (!nullOutput) await new Promise((r) => outProcess?.stdin?.write(outFrameData, r)); if (logTimes) console.timeEnd('outProcess.write'); @@ -375,9 +383,9 @@ async function Editly(config = {}) { if (isInTransition) toClipFrameAt += 1; } // End while loop - outProcess.stdin.end(); + outProcess.stdin?.end(); } catch (err) { - outProcess.kill(); + outProcess?.kill(); throw err; } finally { if (verbose) console.log('Cleanup'); @@ -389,7 +397,7 @@ async function Editly(config = {}) { if (verbose) console.log('Waiting for output ffmpeg process to finish'); await outProcess; } catch (err) { - if (outProcessExitCode !== 0 && !err.killed) throw err; + if (outProcessExitCode !== 0 && !(err as any).killed) throw err; } } finally { if (!keepTmp) await fsExtra.remove(tmpDir); @@ -400,24 +408,30 @@ async function Editly(config = {}) { console.log(outPath); } -// Pure function to get a frame at a certain time -// TODO I think this does not respect transition durations -async function renderSingleFrame({ - time = 0, - defaults, - width = 800, - height = 600, - clips: clipsIn, - - verbose, - logTimes, - enableFfmpegLog, - allowRemoteRequests, - ffprobePath = 'ffprobe', - ffmpegPath = 'ffmpeg', - outPath = `${Math.floor(Math.random() * 1e12)}.png`, -}) { - const clips = await parseConfig({ defaults, clips: clipsIn, arbitraryAudio: [], allowRemoteRequests, ffprobePath }); +/** + * Pure function to get a frame at a certain time. + * TODO: I think this does not respect transition durations + * + * @param config - Config. + */ +export async function renderSingleFrame(config: RenderSingleFrameConfig): Promise { + const { + time = 0, + defaults = {}, + width = 800, + height = 600, + clips: clipsIn, + + verbose, + logTimes, + enableFfmpegLog, + allowRemoteRequests, + ffprobePath = 'ffprobe', + ffmpegPath = 'ffmpeg', + outPath = `${Math.floor(Math.random() * 1e12)}.png`, + } = config; + + const { clips } = await parseConfig({ defaults, clips: clipsIn, arbitraryAudio: [], allowRemoteRequests, ffprobePath }); let clipStartTime = 0; const clip = clips.find((c) => { if (clipStartTime <= time && clipStartTime + c.duration > time) return true; diff --git a/parseConfig.ts b/parseConfig.ts index 4d3c8d03..30e3aa7c 100644 --- a/parseConfig.ts +++ b/parseConfig.ts @@ -11,8 +11,14 @@ import { checkTransition, } from './util.js'; import { registerFont } from 'canvas'; -import { calcTransition } from './transitions.js'; -import type { AudioTrack, CanvasLayer, Config, EditlyBannerLayer, FabricLayer, GlLayer, ImageLayer, ImageOverlayLayer, Layer, LinearGradientLayer, NewsTitleLayer, SlideInTextLayer, SubtitleLayer, TitleBackgroundLayer, TitleLayer, ProcessedVideoLayer, LayerDuration } from './types.js'; +import { calcTransition, type CalculatedTransition } from './transitions.js'; +import type { AudioTrack, CanvasLayer, EditlyBannerLayer, FabricLayer, GlLayer, ImageLayer, ImageOverlayLayer, Layer, LinearGradientLayer, NewsTitleLayer, SlideInTextLayer, SubtitleLayer, TitleBackgroundLayer, TitleLayer, DefaultOptions, Clip, Transition, VideoLayer } from './types.js'; + +export type ProcessedClip = { + layers: Layer[]; + duration: number; + transition: CalculatedTransition; +} const dirname = fileURLToPath(new URL('.', import.meta.url)); @@ -35,9 +41,15 @@ async function validateArbitraryAudio(audio: AudioTrack[] | undefined, allowRemo } } -type ParseConfigOptions = Required> & { - arbitraryAudio: AudioTrack[]; +type ParseConfigOptions = { + defaults: DefaultOptions; + clips: Clip[]; + backgroundAudioVolume?: string | number; backgroundAudioPath?: string; + loopAudio?: boolean; + allowRemoteRequests?: boolean; + ffprobePath: string; + arbitraryAudio: AudioTrack[]; }; export default async function parseConfig({ defaults: defaultsIn = {}, clips, arbitraryAudio: arbitraryAudioIn, backgroundAudioPath, backgroundAudioVolume, loopAudio, allowRemoteRequests, ffprobePath }: ParseConfigOptions) { @@ -123,7 +135,7 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar const detachedAudioByClip: Record = {}; - let clipsOut = await pMap(clips, async (clip, clipIndex) => { + let clipsOut: ProcessedClip[] = await pMap(clips, async (clip, clipIndex) => { assert(typeof clip === 'object', '"clips" must contain objects with one or more layers'); const { transition: userTransition, duration: userClipDuration, layers: layersIn } = clip; @@ -142,7 +154,7 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar const transition = calcTransition(defaults.transition, userTransition, clipIndex === clips.length - 1); - let layersOut: Layer[] = flatMap(await pMap(layers, async (layerIn: T) => { + let layersOut = flatMap(await pMap(layers, async (layerIn: T) => { const globalLayerDefaults = defaults.layer || {}; const thisLayerDefaults = (defaults.layerType || {})[layerIn.type]; @@ -166,7 +178,7 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar const inputWidth = isRotated ? heightIn : widthIn; const inputHeight = isRotated ? widthIn : heightIn; - return { ...layer, cutFrom, cutTo, layerDuration, framerateStr, inputWidth, inputHeight } as ProcessedVideoLayer; + return { ...layer, cutFrom, cutTo, layerDuration, framerateStr, inputWidth, inputHeight } as T; } // Audio is handled later @@ -177,8 +189,8 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar let clipDuration = userClipDurationOrDefault; - const firstVideoLayer = layersOut.find((layer): layer is ProcessedVideoLayer => layer.type === 'video'); - if (firstVideoLayer && !userClipDuration) clipDuration = firstVideoLayer.layerDuration; + const firstVideoLayer = layersOut.find((layer): layer is VideoLayer => layer.type === 'video'); + if (firstVideoLayer && !userClipDuration) clipDuration = firstVideoLayer.layerDuration!; assert(clipDuration); // We need to map again, because for audio, we need to know the correct clipDuration @@ -191,7 +203,7 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar // TODO Also need to handle video layers (speedFactor etc) // TODO handle audio in case of start/stop - const layer: LayerDuration = { ...layerIn, layerDuration }; + const layer: T = { ...layerIn, layerDuration }; if (layer.type === 'audio') { const { duration: fileDuration } = await readAudioFileInfo(ffprobePath, layer.path); @@ -263,7 +275,7 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar let safeTransitionDuration = 0; if (nextClip) { // Each clip can have two transitions, make sure we leave enough room: - safeTransitionDuration = Math.min(clip.duration / 2, nextClip.duration / 2, clip.transition.duration); + safeTransitionDuration = Math.min(clip.duration / 2, nextClip.duration / 2, clip.transition!.duration!); } // We now know all clip durations so we can calculate the offset for detached audio tracks diff --git a/sources/frameSource.ts b/sources/frameSource.ts index 47fd36dd..12f856fc 100644 --- a/sources/frameSource.ts +++ b/sources/frameSource.ts @@ -23,7 +23,7 @@ import { } from './fabric/fabricFrameSources.js'; import createVideoFrameSource from './videoFrameSource.js'; import createGlFrameSource from './glFrameSource.js'; -import type { CreateFrameSource, CreateFrameSourceOptions, LayerDuration, Clip, Layer } from '../types.js'; +import type { CreateFrameSource, CreateFrameSourceOptions, Clip, DebugOptions } from '../types.js'; const fabricFrameSources: Record> = { fabric: customFabricFrameSource, @@ -44,25 +44,21 @@ const frameSources: Record> = { canvas: createCustomCanvasFrameSource, }; -type FrameSourceOptions = { +type FrameSourceOptions = DebugOptions & { clip: Clip; clipIndex: number; ffmpegPath: string; ffprobePath: string; width: number, height: number, - duration: number, channels: number, - verbose: boolean, - logTimes: boolean, - enableFfmpegLog: boolean, framerateStr: string, } export async function createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, framerateStr }: FrameSourceOptions) { const { layers, duration } = clip; - const visualLayers = (layers as LayerDuration[]).filter((layer) => layer.type !== 'audio'); + const visualLayers = layers.filter((layer) => layer.type !== 'audio'); const layerFrameSources = await pMap(visualLayers, async (layer, layerIndex) => { const { type, ...params } = layer; @@ -77,7 +73,7 @@ export async function createFrameSource({ clip, clipIndex, width, height, channe assert(createFrameSourceFunc, `Invalid type ${type}`); - const frameSource = await createFrameSourceFunc({ ffmpegPath, ffprobePath, width, height, duration: duration!, channels, verbose, logTimes, enableFfmpegLog, framerateStr, params }); + const frameSource = await createFrameSourceFunc({ ffmpegPath, ffprobePath, width, height, duration, channels, verbose, logTimes, enableFfmpegLog, framerateStr, params }); return { layer, frameSource }; }, { concurrency: 1 }); @@ -88,7 +84,7 @@ export async function createFrameSource({ clip, clipIndex, width, height, channe for (const { frameSource, layer } of layerFrameSources) { // console.log({ start: layer.start, stop: layer.stop, layerDuration: layer.layerDuration, time }); const offsetTime = time - (layer?.start ?? 0); - const offsetProgress = offsetTime / layer.layerDuration; + const offsetProgress = offsetTime / layer.layerDuration!; // console.log({ offsetProgress }); const shouldDrawLayer = offsetProgress >= 0 && offsetProgress <= 1; diff --git a/sources/videoFrameSource.ts b/sources/videoFrameSource.ts index 40f2136f..4abdb280 100644 --- a/sources/videoFrameSource.ts +++ b/sources/videoFrameSource.ts @@ -8,9 +8,9 @@ import { rgbaToFabricImage, blurImage, } from './fabric.js'; -import type { CreateFrameSourceOptions, ProcessedVideoLayer } from '../types.js'; +import type { CreateFrameSourceOptions, VideoLayer } from '../types.js'; -export default async ({ width: canvasWidth, height: canvasHeight, channels, framerateStr, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, params }: CreateFrameSourceOptions) => { +export default async ({ width: canvasWidth, height: canvasHeight, channels, framerateStr, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, params }: CreateFrameSourceOptions) => { const { path, cutFrom, cutTo, resizeMode = 'contain-blur', speedFactor, inputWidth, inputHeight, width: requestedWidthRel, height: requestedHeightRel, left: leftRel = 0, top: topRel = 0, originX = 'left', originY = 'top', fabricImagePostProcessing = null } = params; const requestedWidth = requestedWidthRel ? Math.round(requestedWidthRel * canvasWidth) : canvasWidth; @@ -19,9 +19,9 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram const left = leftRel * canvasWidth; const top = topRel * canvasHeight; - const ratioW = requestedWidth / inputWidth; - const ratioH = requestedHeight / inputHeight; - const inputAspectRatio = inputWidth / inputHeight; + const ratioW = requestedWidth / inputWidth!; + const ratioH = requestedHeight / inputHeight!; + const inputAspectRatio = inputWidth! / inputHeight!; let targetWidth = requestedWidth; let targetHeight = requestedHeight; @@ -89,7 +89,7 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram ...(inputCodec ? ['-vcodec', inputCodec] : []), ...(cutFrom ? ['-ss', cutFrom.toString()] : []), '-i', path, - ...(cutTo ? ['-t', ((cutTo - cutFrom!) * speedFactor).toString()] : []), + ...(cutTo ? ['-t', ((cutTo - cutFrom!) * speedFactor!).toString()] : []), '-vf', `${ptsFilter}fps=${framerateStr},${scaleFilter}`, '-map', 'v:0', '-vcodec', 'rawvideo', diff --git a/transitions.ts b/transitions.ts index d71c59b2..db674c45 100644 --- a/transitions.ts +++ b/transitions.ts @@ -1,6 +1,13 @@ import assert from 'assert'; import type { Transition } from './types.js'; +export type EasingFunction = (progress: number) => number; + +export type CalculatedTransition = Transition & { + duration: number; + easingFunction: EasingFunction; +} + const randomTransitionsSet = ['fade', 'fadegrayscale', 'directionalwarp', 'crosswarp', 'dreamyzoom', 'burn', 'crosszoom', 'simplezoom', 'linearblur', 'directional-left', 'directional-right', 'directional-up', 'directional-down']; function getRandomTransition() { @@ -17,13 +24,17 @@ export function easeInOutCubic(x: number) { return x < 0.5 ? 4 * x * x * x : 1 - ((-2 * x + 2) ** 3) / 2; } -function getTransitionEasingFunction(easing: string | null | undefined, transitionName?: string) { +export function linear(x: number) { + return x; +} + +function getTransitionEasingFunction(easing: string | null | undefined, transitionName?: string): EasingFunction { if (easing !== null) { // FIXME[TS]: `easing` always appears to be null or undefined, so this never gets called - if (easing) return { easeOutExpo }[easing]; + if (easing) return { easeOutExpo }[easing] || linear; if (transitionName === 'directional') return easeOutExpo; } - return (progress: number) => progress; + return linear; } const TransitionAliases: Record> = { @@ -33,8 +44,8 @@ const TransitionAliases: Record> = { 'directional-up': { name: 'directional', params: { direction: [0, -1] } }, } -export function calcTransition(defaults: Transition | null | undefined, transition: Transition | null | undefined, isLastClip: boolean) { - if (transition === null || isLastClip) return { duration: 0 }; +export function calcTransition(defaults: Transition | null | undefined, transition: Transition | null | undefined, isLastClip: boolean): CalculatedTransition { + if (transition === null || isLastClip) return { duration: 0, easingFunction: linear }; let transitionOrDefault: Transition = { ...defaults, ...transition } diff --git a/types.ts b/types.ts index 6291aba1..0d94489e 100644 --- a/types.ts +++ b/types.ts @@ -266,6 +266,11 @@ export interface BaseLayer { */ stop?: number; + /** + * FIXME[ts]: This is used internally and should be removed after some refactoring. + * @private + */ + layerDuration?: number; } export interface TextLayer extends BaseLayer { @@ -397,6 +402,12 @@ export interface VideoLayer extends BaseLayer { * Post-processing function after calling rgbaToFabricImage but before adding it to StaticCanvas. */ fabricImagePostProcessing?: (data: VideoPostProcessingFunctionArgs) => Promise; + + // FIXME[TS]: Used internally, but should be removed after refactoring + framerateStr?: string; + inputWidth?: number; + inputHeight?: number; + speedFactor?: number; } /** @@ -818,14 +829,14 @@ export interface Clip { /** * List of layers within the current clip that will be overlaid in their natural order (final layer on top). */ - layers: Layer[] | Layer; + layers: Layer[]; /** * Clip duration. * If unset, the clip duration will be that of the first video layer. * Defaults to `defaults.duration`. */ - duration?: number; + duration: number; /** * Specify transition at the end of this clip. @@ -925,7 +936,13 @@ export interface AudioNormalizationOptions { } -export interface Config { +export interface DebugOptions { + enableFfmpegLog?: boolean; + verbose?: boolean; + logTimes?: boolean; +} + +export interface Config extends DebugOptions { /** * Output path (`.mp4` or `.mkv`, can also be a `.gif`). */ @@ -1056,27 +1073,11 @@ export interface Config { */ ffprobePath?: string; - /** - * WARNING: Undocumented feature! - */ - enableFfmpegLog?: boolean; - - /** - * WARNING: Undocumented feature! - */ - verbose?: boolean; - - /** - * WARNING: Undocumented feature! - */ - logTimes?: boolean; - /** * WARNING: Undocumented feature! */ keepTmp?: boolean; - -} +}; export interface RenderSingleFrameConfig extends Config { @@ -1118,31 +1119,15 @@ export interface FrameSource { close?(): OptionalPromise; } -export type CreateFrameSourceOptions = { +export type CreateFrameSourceOptions = DebugOptions & { ffmpegPath: string; ffprobePath: string; width: number, height: number, duration: number, channels: number, - verbose: boolean, - logTimes: boolean, - enableFfmpegLog: boolean, framerateStr: string, params: Omit, }; export type CreateFrameSource = (options: CreateFrameSourceOptions) => Promise; - -export type LayerDuration = T & { - layerDuration: number; -}; - -export type ProcessedLayer = LayerDuration> | ProcessedVideoLayer; - -export type ProcessedVideoLayer = LayerDuration & { - framerateStr: string; - inputWidth: number; - inputHeight: number; - speedFactor: number; -}; From 215c17be8f73bab1e5591e2d2214f02e0d8d16e8 Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Thu, 16 Jan 2025 00:02:22 -0500 Subject: [PATCH 17/23] Convert cli.{js,ts} --- audio.ts | 2 +- cli.js => cli.ts | 25 +++++++++++++++---------- sources/frameSource.ts | 3 ++- types.ts | 2 +- 4 files changed, 19 insertions(+), 13 deletions(-) rename cli.js => cli.ts (86%) diff --git a/audio.ts b/audio.ts index 1d03191a..23925c28 100644 --- a/audio.ts +++ b/audio.ts @@ -36,7 +36,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: A '-f', 'lavfi', '-i', 'anullsrc=channel_layout=stereo:sample_rate=44100', '-sample_fmt', 's32', '-ar', '48000', - '-t', duration.toString(), + '-t', duration!.toString(), '-c:a', 'flac', '-y', clipAudioPath, diff --git a/cli.js b/cli.ts similarity index 86% rename from cli.js rename to cli.ts index e33eaac1..7524afb6 100644 --- a/cli.js +++ b/cli.ts @@ -6,7 +6,7 @@ import pMap from 'p-map'; import JSON5 from 'json5'; import assert from 'assert'; -import Editly from './index.js'; +import Editly, { Config, Layer } from './index.js'; // See also readme const cli = meow(` @@ -53,13 +53,18 @@ const cli = meow(` keepSourceAudio: { type: 'boolean' }, allowRemoteRequests: { type: 'boolean' }, fast: { type: 'boolean', alias: 'f' }, + transitionName: { type: 'string' }, transitionDuration: { type: 'number' }, clipDuration: { type: 'number' }, width: { type: 'number' }, height: { type: 'number' }, fps: { type: 'number' }, + fontPath: { type: 'string' }, loopAudio: { type: 'boolean' }, outputVolume: { type: 'string' }, + json: { type: 'string' }, + out: { type: 'string' }, + audioFilePath: { type: 'string' }, }, }); @@ -68,7 +73,7 @@ const cli = meow(` // eslint-disable-next-line prefer-destructuring if (cli.input.length === 1 && /\.(json|json5|js)$/.test(cli.input[0].toLowerCase())) json = cli.input[0]; - let params = { + let params: Partial = { defaults: {}, }; @@ -78,7 +83,7 @@ const cli = meow(` const clipsIn = cli.input; if (clipsIn.length < 1) cli.showHelp(); - const clips = await pMap(clipsIn, async (clip) => { + const clips: Layer[] = await pMap(clipsIn, async (clip) => { let match = clip.match(/^title:(.+)$/); if (match) return { type: 'title-background', text: match[1] }; @@ -91,7 +96,7 @@ const cli = meow(` cli.showHelp(); } - const { mime } = fileType; + const mime = fileType!.mime; if (mime.startsWith('video')) return { type: 'video', path: clip }; if (mime.startsWith('image')) return { type: 'image', path: clip }; @@ -107,15 +112,15 @@ const cli = meow(` const { verbose, transitionName, transitionDuration, clipDuration, width, height, fps, audioFilePath, fontPath, fast, out: outPath, keepSourceAudio, loopAudio, outputVolume, allowRemoteRequests } = cli.flags; if (transitionName || transitionDuration != null) { - params.defaults.transition = {}; - if (transitionName) params.defaults.transition.name = transitionName; - if (transitionDuration) params.defaults.transition.duration = transitionDuration; + params.defaults!.transition = {}; + if (transitionName) params.defaults!.transition!.name = transitionName; + if (transitionDuration) params.defaults!.transition!.duration = transitionDuration; } - if (clipDuration) params.defaults.duration = clipDuration; + if (clipDuration) params.defaults!.duration = clipDuration; if (fontPath) { - params.defaults.layer = { + params.defaults!.layer = { fontPath, }; } @@ -137,7 +142,7 @@ const cli = meow(` if (!params.outPath) params.outPath = './editly-out.mp4'; - await Editly(params); + await Editly(params as Config); })().catch((err) => { console.error('Caught error', err); process.exitCode = 1; diff --git a/sources/frameSource.ts b/sources/frameSource.ts index 12f856fc..d3e6924c 100644 --- a/sources/frameSource.ts +++ b/sources/frameSource.ts @@ -24,6 +24,7 @@ import { import createVideoFrameSource from './videoFrameSource.js'; import createGlFrameSource from './glFrameSource.js'; import type { CreateFrameSource, CreateFrameSourceOptions, Clip, DebugOptions } from '../types.js'; +import { ProcessedClip } from '../parseConfig.js'; const fabricFrameSources: Record> = { fabric: customFabricFrameSource, @@ -45,7 +46,7 @@ const frameSources: Record> = { }; type FrameSourceOptions = DebugOptions & { - clip: Clip; + clip: ProcessedClip; clipIndex: number; ffmpegPath: string; ffprobePath: string; diff --git a/types.ts b/types.ts index 0d94489e..9f14ae9f 100644 --- a/types.ts +++ b/types.ts @@ -836,7 +836,7 @@ export interface Clip { * If unset, the clip duration will be that of the first video layer. * Defaults to `defaults.duration`. */ - duration: number; + duration?: number; /** * Specify transition at the end of this clip. From ab2337f86155234a3d6a04909663d0e38fb89bfe Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Thu, 16 Jan 2025 08:54:00 -0500 Subject: [PATCH 18/23] Move typescript files into src, build in dist --- .gitignore | 1 + package.json | 17 +++++++++++------ BoxBlur.d.ts => src/BoxBlur.d.ts | 0 BoxBlur.js => src/BoxBlur.js | 0 audio.ts => src/audio.ts | 0 cli.ts => src/cli.ts | 0 colors.ts => src/colors.ts | 0 ffmpeg.ts => src/ffmpeg.ts | 0 glTransitions.ts => src/glTransitions.ts | 0 index.ts => src/index.ts | 4 ++-- parseConfig.ts => src/parseConfig.ts | 2 +- {sources => src/sources}/fabric.ts | 0 .../sources}/fabric/fabricFrameSources.ts | 0 {sources => src/sources}/frameSource.ts | 0 {sources => src/sources}/glFrameSource.ts | 0 {sources => src/sources}/videoFrameSource.ts | 0 transitions.ts => src/transitions.ts | 0 types.ts => src/types.ts | 0 {types => src/types}/gl-buffer.d.ts | 0 {types => src/types}/gl-texture2d.d.ts | 0 {types => src/types}/gl-transition.d.ts | 0 {types => src/types}/gl-transitions.d.ts | 0 util.ts => src/util.ts | 0 test.ts => test/index.ts | 5 +++-- tsconfig.json | 5 ++++- 25 files changed, 22 insertions(+), 12 deletions(-) rename BoxBlur.d.ts => src/BoxBlur.d.ts (100%) rename BoxBlur.js => src/BoxBlur.js (100%) rename audio.ts => src/audio.ts (100%) rename cli.ts => src/cli.ts (100%) rename colors.ts => src/colors.ts (100%) rename ffmpeg.ts => src/ffmpeg.ts (100%) rename glTransitions.ts => src/glTransitions.ts (100%) rename index.ts => src/index.ts (98%) rename parseConfig.ts => src/parseConfig.ts (99%) rename {sources => src/sources}/fabric.ts (100%) rename {sources => src/sources}/fabric/fabricFrameSources.ts (100%) rename {sources => src/sources}/frameSource.ts (100%) rename {sources => src/sources}/glFrameSource.ts (100%) rename {sources => src/sources}/videoFrameSource.ts (100%) rename transitions.ts => src/transitions.ts (100%) rename types.ts => src/types.ts (100%) rename {types => src/types}/gl-buffer.d.ts (100%) rename {types => src/types}/gl-texture2d.d.ts (100%) rename {types => src/types}/gl-transition.d.ts (100%) rename {types => src/types}/gl-transitions.d.ts (100%) rename util.ts => src/util.ts (100%) rename test.ts => test/index.ts (88%) diff --git a/.gitignore b/.gitignore index e293efc3..e93bdac6 100644 --- a/.gitignore +++ b/.gitignore @@ -114,3 +114,4 @@ editly-tmp-*/ pnpm-lock.yaml yarn.lock package-lock.json +dist diff --git a/package.json b/package.json index 436e8d6a..5bf81387 100644 --- a/package.json +++ b/package.json @@ -2,8 +2,11 @@ "name": "editly", "description": "Simple, sexy, declarative video editing", "version": "0.14.2", - "main": "index.js", - "types": "index.d.ts", + "module": "./dist/index.js", + "types": "dist/index.d.ts", + "exports": { + ".": "./dist/index.js" + }, "author": "Mikael Finstad ", "contributors": [ "Patrick Connolly (https://github.com/patcon)", @@ -33,8 +36,8 @@ "p-map": "^7.0.2" }, "scripts": { - "prepare": "tsc -b", - "test": "node test.js", + "prepare": "pkgroll --clean-dist --sourcemap", + "test": "tsx test/index.ts", "lint": "eslint ." }, "repository": { @@ -42,7 +45,7 @@ "url": "git+https://github.com/mifi/editly.git" }, "bin": { - "editly": "cli.js" + "editly": "dist/cli.js" }, "devDependencies": { "@tsconfig/node-lts": "^22.0.1", @@ -50,11 +53,13 @@ "@types/gl": "^6.0.5", "@types/gl-shader": "^4.2.5", "@types/lodash-es": "^4.17.12", + "@types/ndarray": "^1.0.14", "eslint": "^8.22.0", "eslint-config-airbnb-base": "^15.0.0", "eslint-plugin-import": "^2.29.1", "eslint-plugin-import-exports-imports-resolver": "^1.0.1", - "ts-node": "^10.9.2", + "pkgroll": "^2.6.1", + "tsx": "^4.19.2", "typescript": "^5.7.3" } } diff --git a/BoxBlur.d.ts b/src/BoxBlur.d.ts similarity index 100% rename from BoxBlur.d.ts rename to src/BoxBlur.d.ts diff --git a/BoxBlur.js b/src/BoxBlur.js similarity index 100% rename from BoxBlur.js rename to src/BoxBlur.js diff --git a/audio.ts b/src/audio.ts similarity index 100% rename from audio.ts rename to src/audio.ts diff --git a/cli.ts b/src/cli.ts similarity index 100% rename from cli.ts rename to src/cli.ts diff --git a/colors.ts b/src/colors.ts similarity index 100% rename from colors.ts rename to src/colors.ts diff --git a/ffmpeg.ts b/src/ffmpeg.ts similarity index 100% rename from ffmpeg.ts rename to src/ffmpeg.ts diff --git a/glTransitions.ts b/src/glTransitions.ts similarity index 100% rename from glTransitions.ts rename to src/glTransitions.ts diff --git a/index.ts b/src/index.ts similarity index 98% rename from index.ts rename to src/index.ts index 4c55bf11..9f25b2a9 100644 --- a/index.ts +++ b/src/index.ts @@ -9,7 +9,7 @@ import { testFf } from './ffmpeg.js'; import { parseFps, multipleOf2, assertFileValid, checkTransition } from './util.js'; import { createFabricCanvas, rgbaToFabricImage } from './sources/fabric.js'; import { createFrameSource } from './sources/frameSource.js'; -import parseConfig from './parseConfig.js'; +import parseConfig, { ProcessedClip } from './parseConfig.js'; import GlTransitions, { type RunTransitionOptions } from './glTransitions.js'; import Audio from './audio.js'; import type { Clip, Config, Layer, RenderSingleFrameConfig } from './types.js'; @@ -252,7 +252,7 @@ async function Editly(config: Config): Promise { const getTransitionFromClip = () => clips[transitionFromClipId]; const getTransitionToClip = () => clips[getTransitionToClipId()]; - const getSource = async (clip: Clip, clipIndex: number) => createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, framerateStr }); + const getSource = async (clip: ProcessedClip, clipIndex: number) => createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, framerateStr }); const getTransitionFromSource = async () => getSource(getTransitionFromClip(), transitionFromClipId); const getTransitionToSource = async () => (getTransitionToClip() && getSource(getTransitionToClip(), getTransitionToClipId())); diff --git a/parseConfig.ts b/src/parseConfig.ts similarity index 99% rename from parseConfig.ts rename to src/parseConfig.ts index 30e3aa7c..561a519d 100644 --- a/parseConfig.ts +++ b/src/parseConfig.ts @@ -20,7 +20,7 @@ export type ProcessedClip = { transition: CalculatedTransition; } -const dirname = fileURLToPath(new URL('.', import.meta.url)); +const dirname = fileURLToPath(new URL('..', import.meta.url)); // Cache const loadedFonts: string[] = []; diff --git a/sources/fabric.ts b/src/sources/fabric.ts similarity index 100% rename from sources/fabric.ts rename to src/sources/fabric.ts diff --git a/sources/fabric/fabricFrameSources.ts b/src/sources/fabric/fabricFrameSources.ts similarity index 100% rename from sources/fabric/fabricFrameSources.ts rename to src/sources/fabric/fabricFrameSources.ts diff --git a/sources/frameSource.ts b/src/sources/frameSource.ts similarity index 100% rename from sources/frameSource.ts rename to src/sources/frameSource.ts diff --git a/sources/glFrameSource.ts b/src/sources/glFrameSource.ts similarity index 100% rename from sources/glFrameSource.ts rename to src/sources/glFrameSource.ts diff --git a/sources/videoFrameSource.ts b/src/sources/videoFrameSource.ts similarity index 100% rename from sources/videoFrameSource.ts rename to src/sources/videoFrameSource.ts diff --git a/transitions.ts b/src/transitions.ts similarity index 100% rename from transitions.ts rename to src/transitions.ts diff --git a/types.ts b/src/types.ts similarity index 100% rename from types.ts rename to src/types.ts diff --git a/types/gl-buffer.d.ts b/src/types/gl-buffer.d.ts similarity index 100% rename from types/gl-buffer.d.ts rename to src/types/gl-buffer.d.ts diff --git a/types/gl-texture2d.d.ts b/src/types/gl-texture2d.d.ts similarity index 100% rename from types/gl-texture2d.d.ts rename to src/types/gl-texture2d.d.ts diff --git a/types/gl-transition.d.ts b/src/types/gl-transition.d.ts similarity index 100% rename from types/gl-transition.d.ts rename to src/types/gl-transition.d.ts diff --git a/types/gl-transitions.d.ts b/src/types/gl-transitions.d.ts similarity index 100% rename from types/gl-transitions.d.ts rename to src/types/gl-transitions.d.ts diff --git a/util.ts b/src/util.ts similarity index 100% rename from util.ts rename to src/util.ts diff --git a/test.ts b/test/index.ts similarity index 88% rename from test.ts rename to test/index.ts index 80f68ac6..34e1d29b 100644 --- a/test.ts +++ b/test/index.ts @@ -1,8 +1,9 @@ import { execa } from 'execa'; // todo use jest -await execa('node', [ - 'cli.js', +await execa('npx', [ + 'tsx', + 'src/cli.ts', '--allow-remote-requests', "title:'My video'", 'https://raw.githubusercontent.com/mifi/editly-assets/main/overlay.svg', diff --git a/tsconfig.json b/tsconfig.json index eae32087..0df87c8f 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,3 +1,6 @@ { - "extends": "@tsconfig/node-lts/tsconfig.json" + "extends": "@tsconfig/node-lts/tsconfig.json", + "compilerOptions": { + "noEmit": true + } } From e56a8a2bf22f542fa6ec6e7b7d3e50fc10524d3a Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Thu, 16 Jan 2025 09:34:36 -0500 Subject: [PATCH 19/23] Convert examples to ts --- .gitignore | 1 + examples/customCanvas.js | 37 ----------------- examples/customCanvas.ts | 40 +++++++++++++++++++ examples/customFabric.js | 36 ----------------- examples/customFabric.ts | 35 ++++++++++++++++ ...essing.js => fabricImagePostProcessing.ts} | 13 +++--- examples/renderSingleFrame.js | 12 ------ examples/renderSingleFrame.ts | 13 ++++++ examples/run-all-examples.sh | 19 ++++----- 9 files changed, 104 insertions(+), 102 deletions(-) delete mode 100644 examples/customCanvas.js create mode 100644 examples/customCanvas.ts delete mode 100644 examples/customFabric.js create mode 100644 examples/customFabric.ts rename examples/{fabricImagePostProcessing.js => fabricImagePostProcessing.ts} (82%) delete mode 100644 examples/renderSingleFrame.js create mode 100644 examples/renderSingleFrame.ts diff --git a/.gitignore b/.gitignore index e93bdac6..e44c706a 100644 --- a/.gitignore +++ b/.gitignore @@ -108,6 +108,7 @@ dist editly-tmp-*/ *.mp4 *.gif +*.png # Other .idea diff --git a/examples/customCanvas.js b/examples/customCanvas.js deleted file mode 100644 index 0fd4c165..00000000 --- a/examples/customCanvas.js +++ /dev/null @@ -1,37 +0,0 @@ -import editly from '../index.js'; - -async function func({ canvas }) { - async function onRender(progress) { - const context = canvas.getContext('2d'); - const centerX = canvas.width / 2; - const centerY = canvas.height / 2; - const radius = 40 * (1 + progress * 0.5); - - context.beginPath(); - context.arc(centerX, centerY, radius, 0, 2 * Math.PI, false); - context.fillStyle = 'hsl(350, 100%, 37%)'; - context.fill(); - context.lineWidth = 5; - context.strokeStyle = '#ffffff'; - context.stroke(); - } - - function onClose() { - // Cleanup if you initialized anything - } - - return { onRender, onClose }; -} - -editly({ - // fast: true, - // outPath: './customCanvas.mp4', - outPath: './customCanvas.gif', - clips: [ - { duration: 2, - layers: [ - { type: 'rainbow-colors' }, - { type: 'canvas', func }, - ] }, - ], -}).catch(console.error); diff --git a/examples/customCanvas.ts b/examples/customCanvas.ts new file mode 100644 index 00000000..1caffef5 --- /dev/null +++ b/examples/customCanvas.ts @@ -0,0 +1,40 @@ +import editly from 'editly'; +import type { CustomCanvasFunctionArgs, CustomCanvasFunctionCallbacks } from 'editly'; + +function func({ canvas }: CustomCanvasFunctionArgs): CustomCanvasFunctionCallbacks { + return { + async onRender(progress) { + const context = canvas.getContext('2d'); + const centerX = canvas.width / 2; + const centerY = canvas.height / 2; + const radius = 40 * (1 + progress * 0.5); + + context.beginPath(); + context.arc(centerX, centerY, radius, 0, 2 * Math.PI, false); + context.fillStyle = 'hsl(350, 100%, 37%)'; + context.fill(); + context.lineWidth = 5; + context.strokeStyle = '#ffffff'; + context.stroke(); + }, + + onClose() { + // Cleanup if you initialized anything + } + }; +} + +editly({ + // fast: true, + // outPath: './customCanvas.mp4', + outPath: './customCanvas.gif', + clips: [ + { + duration: 2, + layers: [ + { type: 'rainbow-colors' }, + { type: 'canvas', func }, + ] + }, + ], +}).catch(console.error); diff --git a/examples/customFabric.js b/examples/customFabric.js deleted file mode 100644 index cf4c223c..00000000 --- a/examples/customFabric.js +++ /dev/null @@ -1,36 +0,0 @@ -import editly from '../index.js'; - -/* eslint-disable spaced-comment,no-param-reassign */ - -async function func({ width, height, fabric }) { - async function onRender(progress, canvas) { - canvas.backgroundColor = 'hsl(33, 100%, 50%)'; - - const text = new fabric.Text(`PROGRESS\n${Math.floor(progress * 100)}%`, { - originX: 'center', - originY: 'center', - left: width / 2, - top: (height / 2) * (1 + (progress * 0.1 - 0.05)), - fontSize: 20, - textAlign: 'center', - fill: 'white', - }); - - canvas.add(text); - } - - function onClose() { - // Cleanup if you initialized anything - } - - return { onRender, onClose }; -} - -editly({ - // fast: true, - outPath: './customFabric.gif', - // outPath: './customFabric.mp4', - clips: [ - { duration: 2, layers: [{ type: 'fabric', func }] }, - ], -}).catch(console.error); diff --git a/examples/customFabric.ts b/examples/customFabric.ts new file mode 100644 index 00000000..6b82d517 --- /dev/null +++ b/examples/customFabric.ts @@ -0,0 +1,35 @@ +import editly from 'editly'; +import { CustomFabricFunctionArgs, CustomFabricFunctionCallbacks } from '../dist/index.js'; + +function func({ width, height, fabric }: CustomFabricFunctionArgs): CustomFabricFunctionCallbacks { + return { + async onRender(progress, canvas) { + canvas.backgroundColor = 'hsl(33, 100%, 50%)'; + + const text = new fabric.Text(`PROGRESS\n${Math.floor(progress * 100)}%`, { + originX: 'center', + originY: 'center', + left: width / 2, + top: (height / 2) * (1 + (progress * 0.1 - 0.05)), + fontSize: 20, + textAlign: 'center', + fill: 'white', + }); + + canvas.add(text); + }, + + onClose() { + // Cleanup if you initialized anything + } + }; +} + +await editly({ + // fast: true, + outPath: './customFabric.gif', + // outPath: './customFabric.mp4', + clips: [ + { duration: 2, layers: [{ type: 'fabric', func }] }, + ], +}); diff --git a/examples/fabricImagePostProcessing.js b/examples/fabricImagePostProcessing.ts similarity index 82% rename from examples/fabricImagePostProcessing.js rename to examples/fabricImagePostProcessing.ts index f13c1f0a..2af3dc6c 100644 --- a/examples/fabricImagePostProcessing.js +++ b/examples/fabricImagePostProcessing.ts @@ -1,8 +1,8 @@ -import editly from '../index.js'; +import editly from 'editly'; // See https://github.com/mifi/editly/pull/222 -editly({ +await editly({ outPath: './fabricImagePostProcessing.mp4', clips: [{ duration: 4, @@ -21,14 +21,14 @@ editly({ width: 0.5, height: 0.5, fabricImagePostProcessing: async ({ image, fabric, canvas }) => { - const circleArgs = { + const circleArgs: ConstructorParameters[0] = { radius: Math.min(image.width, image.height) * 0.4, originX: 'center', originY: 'center', stroke: 'white', strokeWidth: 22, }; - image.setOptions({ clipPath: new fabric.Circle(circleArgs) }); + image.set({ clipPath: new fabric.Circle(circleArgs) }); canvas.add(new fabric.Circle({ ...circleArgs, left: image.getCenterPoint().x, @@ -36,6 +36,7 @@ editly({ })); }, }, - ] }, + ] + }, ], -}).catch(console.error); +}); diff --git a/examples/renderSingleFrame.js b/examples/renderSingleFrame.js deleted file mode 100644 index d396538d..00000000 --- a/examples/renderSingleFrame.js +++ /dev/null @@ -1,12 +0,0 @@ -import { parse } from 'json5'; -import fsExtra from 'fs-extra'; - -// eslint-disable-next-line import/named -import { renderSingleFrame } from '../index.js'; - -(async () => { - await renderSingleFrame({ - time: 0, - clips: parse(await fsExtra.readFile('./videos.json5', 'utf-8')).clips, - }); -})().catch(console.error); diff --git a/examples/renderSingleFrame.ts b/examples/renderSingleFrame.ts new file mode 100644 index 00000000..c8e3f164 --- /dev/null +++ b/examples/renderSingleFrame.ts @@ -0,0 +1,13 @@ +import JSON from 'json5'; +import fsExtra from 'fs-extra'; + +// eslint-disable-next-line import/named +import { renderSingleFrame } from 'editly'; + +(async () => { + await renderSingleFrame({ + time: 0, + clips: JSON.parse(await fsExtra.readFile('./videos.json5', 'utf-8')).clips, + outPath: 'renderSingleFrame.png' + }); +})().catch(console.error); diff --git a/examples/run-all-examples.sh b/examples/run-all-examples.sh index 74a98757..8a94dff9 100755 --- a/examples/run-all-examples.sh +++ b/examples/run-all-examples.sh @@ -1,13 +1,10 @@ #/bin/bash -set -e +set -ex -node ../cli.js --json gl.json5 -node ../cli.js --json image.json5 -node ../cli.js --json losslesscut.json5 -node ../cli.js --json resizeHorizontal.json5 -node ../cli.js --json resizeVertical.json5 -node ../cli.js --json speedTest.json5 -node ../cli.js --json subtitle.json5 -node ../cli.js --json transitionEasing.json5 -node ../cli.js --json transparentGradient.json5 -node ../cli.js --json commonFeatures.json5 +node ../dist/cli.js --json gl.json5 +node ../dist/cli.js --json image.json5 +node ../dist/cli.js --json speedTest.json5 +node ../dist/cli.js --json subtitle.json5 +node ../dist/cli.js --json transitionEasing.json5 +node ../dist/cli.js --json transparentGradient.json5 +node ../dist/cli.js --json commonFeatures.json5 From ab9861b97830a92eab0ee90451d90d205b5a4841 Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Thu, 16 Jan 2025 09:35:33 -0500 Subject: [PATCH 20/23] Move types to runtime deps so they are available to users --- package.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index 5bf81387..842b477e 100644 --- a/package.json +++ b/package.json @@ -15,6 +15,11 @@ "type": "module", "license": "MIT", "dependencies": { + "@types/fs-extra": "^11.0.4", + "@types/gl": "^6.0.5", + "@types/gl-shader": "^4.2.5", + "@types/lodash-es": "^4.17.12", + "@types/ndarray": "^1.0.14", "canvas": "^2.11.2", "compare-versions": "^6.1.0", "execa": "^6.1.0", @@ -49,11 +54,6 @@ }, "devDependencies": { "@tsconfig/node-lts": "^22.0.1", - "@types/fs-extra": "^11.0.4", - "@types/gl": "^6.0.5", - "@types/gl-shader": "^4.2.5", - "@types/lodash-es": "^4.17.12", - "@types/ndarray": "^1.0.14", "eslint": "^8.22.0", "eslint-config-airbnb-base": "^15.0.0", "eslint-plugin-import": "^2.29.1", From 365d2b47b800f70b3d961a4ee16ed7d81a2a77dc Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Thu, 16 Jan 2025 09:46:38 -0500 Subject: [PATCH 21/23] Enable basic tsc linting --- src/audio.ts | 4 ++-- src/index.ts | 2 +- src/parseConfig.ts | 2 +- src/sources/fabric/fabricFrameSources.ts | 2 +- src/sources/frameSource.ts | 2 +- tsconfig.json | 8 +++++++- 6 files changed, 13 insertions(+), 7 deletions(-) diff --git a/src/audio.ts b/src/audio.ts index 23925c28..7cba4edf 100644 --- a/src/audio.ts +++ b/src/audio.ts @@ -6,7 +6,7 @@ import { flatMap } from 'lodash-es'; import { getFfmpegCommonArgs, getCutFromArgs } from './ffmpeg.js'; import { readFileStreams } from './util.js'; -import type { AudioLayer, AudioNormalizationOptions, AudioTrack, Clip, Config, Layer, Transition, VideoLayer } from './types.js' +import type { AudioLayer, AudioNormalizationOptions, AudioTrack, Clip, Config, Transition, VideoLayer } from './types.js' export type AudioOptions = { ffmpegPath: string; @@ -193,7 +193,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: A const volumeArg = outputVolume != null ? `,volume=${outputVolume}` : ''; const audioNormArg = enableAudioNorm ? `,dynaudnorm=g=${gaussSize}:maxgain=${maxGain}` : ''; - filterComplex += `;${streams.map((s, i) => `[a${i}]`).join('')}amix=inputs=${streams.length}:duration=first:dropout_transition=0:weights=${streams.map((s) => (s.mixVolume != null ? s.mixVolume : 1)).join(' ')}${audioNormArg}${volumeArg}`; + filterComplex += `;${streams.map((_, i) => `[a${i}]`).join('')}amix=inputs=${streams.length}:duration=first:dropout_transition=0:weights=${streams.map((s) => (s.mixVolume != null ? s.mixVolume : 1)).join(' ')}${audioNormArg}${volumeArg}`; const mixedAudioPath = join(tmpDir, 'audio-mixed.flac'); diff --git a/src/index.ts b/src/index.ts index 9f25b2a9..da06b66e 100644 --- a/src/index.ts +++ b/src/index.ts @@ -12,7 +12,7 @@ import { createFrameSource } from './sources/frameSource.js'; import parseConfig, { ProcessedClip } from './parseConfig.js'; import GlTransitions, { type RunTransitionOptions } from './glTransitions.js'; import Audio from './audio.js'; -import type { Clip, Config, Layer, RenderSingleFrameConfig } from './types.js'; +import type { Config, RenderSingleFrameConfig } from './types.js'; const channels = 4; diff --git a/src/parseConfig.ts b/src/parseConfig.ts index 561a519d..e27c15ec 100644 --- a/src/parseConfig.ts +++ b/src/parseConfig.ts @@ -12,7 +12,7 @@ import { } from './util.js'; import { registerFont } from 'canvas'; import { calcTransition, type CalculatedTransition } from './transitions.js'; -import type { AudioTrack, CanvasLayer, EditlyBannerLayer, FabricLayer, GlLayer, ImageLayer, ImageOverlayLayer, Layer, LinearGradientLayer, NewsTitleLayer, SlideInTextLayer, SubtitleLayer, TitleBackgroundLayer, TitleLayer, DefaultOptions, Clip, Transition, VideoLayer } from './types.js'; +import type { AudioTrack, CanvasLayer, EditlyBannerLayer, FabricLayer, GlLayer, ImageLayer, ImageOverlayLayer, Layer, LinearGradientLayer, NewsTitleLayer, SlideInTextLayer, SubtitleLayer, TitleBackgroundLayer, TitleLayer, DefaultOptions, Clip, VideoLayer } from './types.js'; export type ProcessedClip = { layers: Layer[]; diff --git a/src/sources/fabric/fabricFrameSources.ts b/src/sources/fabric/fabricFrameSources.ts index 0972b3c9..9f75cd83 100644 --- a/src/sources/fabric/fabricFrameSources.ts +++ b/src/sources/fabric/fabricFrameSources.ts @@ -98,7 +98,7 @@ export async function fillColorFrameSource({ params, width, height }: FabricFram const randomColor = getRandomColors(1)[0]; - async function onRender(progress: number, canvas: fabric.StaticCanvas) { + async function onRender(_: number, canvas: fabric.StaticCanvas) { const rect = new fabric.Rect({ left: 0, right: 0, diff --git a/src/sources/frameSource.ts b/src/sources/frameSource.ts index d3e6924c..d054afba 100644 --- a/src/sources/frameSource.ts +++ b/src/sources/frameSource.ts @@ -23,7 +23,7 @@ import { } from './fabric/fabricFrameSources.js'; import createVideoFrameSource from './videoFrameSource.js'; import createGlFrameSource from './glFrameSource.js'; -import type { CreateFrameSource, CreateFrameSourceOptions, Clip, DebugOptions } from '../types.js'; +import type { CreateFrameSource, CreateFrameSourceOptions, DebugOptions } from '../types.js'; import { ProcessedClip } from '../parseConfig.js'; const fabricFrameSources: Record> = { diff --git a/tsconfig.json b/tsconfig.json index 0df87c8f..d2f61b7d 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,6 +1,12 @@ { "extends": "@tsconfig/node-lts/tsconfig.json", "compilerOptions": { - "noEmit": true + "noEmit": true, + + /* Linting */ + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true, + "noUncheckedSideEffectImports": true } } From 32b8e305985e981babd2da96515cc7242e36775d Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Thu, 16 Jan 2025 10:21:34 -0500 Subject: [PATCH 22/23] Update eslint, configure for typescript --- .eslintrc.cjs | 23 ----------------------- .gitignore | 1 - eslint.config.mjs | 10 ++++++++++ package.json | 9 ++++----- 4 files changed, 14 insertions(+), 29 deletions(-) delete mode 100644 .eslintrc.cjs create mode 100644 eslint.config.mjs diff --git a/.eslintrc.cjs b/.eslintrc.cjs deleted file mode 100644 index 9c76c7e7..00000000 --- a/.eslintrc.cjs +++ /dev/null @@ -1,23 +0,0 @@ -module.exports = { - extends: 'airbnb-base', - parserOptions: { - ecmaVersion: 2022, - }, - env: { - node: true, - }, - settings: { - 'import/resolver': { - [require.resolve('eslint-plugin-import-exports-imports-resolver')]: {}, - }, - 'import/extensions': ['.js'], - }, - rules: { - 'max-len': 0, - 'no-console': 0, - 'object-curly-newline': 0, - 'no-await-in-loop': 0, - 'no-promise-executor-return': 0, - 'import/extensions': ['error', 'ignorePackages'], - }, -}; diff --git a/.gitignore b/.gitignore index e44c706a..8498279d 100644 --- a/.gitignore +++ b/.gitignore @@ -115,4 +115,3 @@ editly-tmp-*/ pnpm-lock.yaml yarn.lock package-lock.json -dist diff --git a/eslint.config.mjs b/eslint.config.mjs new file mode 100644 index 00000000..b3749a7e --- /dev/null +++ b/eslint.config.mjs @@ -0,0 +1,10 @@ +// @ts-check + +import eslint from '@eslint/js'; +import tseslint from 'typescript-eslint'; + +export default tseslint.config( + eslint.configs.recommended, + tseslint.configs.recommended, + { ignores: ["dist/"] }, +); diff --git a/package.json b/package.json index 842b477e..152fe62d 100644 --- a/package.json +++ b/package.json @@ -53,13 +53,12 @@ "editly": "dist/cli.js" }, "devDependencies": { + "@eslint/js": "^9.18.0", "@tsconfig/node-lts": "^22.0.1", - "eslint": "^8.22.0", - "eslint-config-airbnb-base": "^15.0.0", - "eslint-plugin-import": "^2.29.1", - "eslint-plugin-import-exports-imports-resolver": "^1.0.1", + "eslint": "^9.18.0", "pkgroll": "^2.6.1", "tsx": "^4.19.2", - "typescript": "^5.7.3" + "typescript": "^5.7.3", + "typescript-eslint": "^8.20.0" } } From 18b3a936f1f4f097af48fbee31e70a7fc7e53cea Mon Sep 17 00:00:00 2001 From: Brandon Keepers Date: Thu, 16 Jan 2025 11:16:09 -0500 Subject: [PATCH 23/23] Fix lint errors --- examples/renderSingleFrame.ts | 2 -- src/audio.ts | 2 +- src/cli.ts | 1 - src/glTransitions.ts | 5 +-- src/index.ts | 5 ++- src/parseConfig.ts | 4 +-- src/sources/fabric.ts | 4 ++- src/sources/frameSource.ts | 10 ++++-- src/sources/videoFrameSource.ts | 1 - src/types.ts | 12 +++---- src/types/gl-texture2d.d.ts | 5 ++- src/types/gl-transition.d.ts | 56 +++++++++++++++++++++++++++++++-- src/util.ts | 1 - 13 files changed, 81 insertions(+), 27 deletions(-) diff --git a/examples/renderSingleFrame.ts b/examples/renderSingleFrame.ts index c8e3f164..896ac939 100644 --- a/examples/renderSingleFrame.ts +++ b/examples/renderSingleFrame.ts @@ -1,7 +1,5 @@ import JSON from 'json5'; import fsExtra from 'fs-extra'; - -// eslint-disable-next-line import/named import { renderSingleFrame } from 'editly'; (async () => { diff --git a/src/audio.ts b/src/audio.ts index 7cba4edf..b98a4516 100644 --- a/src/audio.ts +++ b/src/audio.ts @@ -113,7 +113,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: A if (processedAudioLayers.length === 1) return { clipAudioPath: processedAudioLayers[0][0], silent: false }; // Merge/mix all layers' audio - const weights = processedAudioLayers.map(([_, { mixVolume }]) => mixVolume ?? 1); + const weights = processedAudioLayers.map(([, { mixVolume }]) => mixVolume ?? 1); const args = [ ...getFfmpegCommonArgs({ enableFfmpegLog }), ...flatMap(processedAudioLayers, ([layerAudioPath]) => ['-i', layerAudioPath]), diff --git a/src/cli.ts b/src/cli.ts index 7524afb6..515bf020 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -70,7 +70,6 @@ const cli = meow(` (async () => { let { json } = cli.flags; - // eslint-disable-next-line prefer-destructuring if (cli.input.length === 1 && /\.(json|json5|js)$/.test(cli.input[0].toLowerCase())) json = cli.input[0]; let params: Partial = { diff --git a/src/glTransitions.ts b/src/glTransitions.ts index 5cf23035..0387b308 100644 --- a/src/glTransitions.ts +++ b/src/glTransitions.ts @@ -4,6 +4,7 @@ import createBuffer from 'gl-buffer'; import glTransitions from 'gl-transitions'; import glTransition from 'gl-transition'; import createTexture from 'gl-texture2d'; +import { TransitionParams } from './types.js'; const { default: createTransition } = glTransition; @@ -12,7 +13,7 @@ export type RunTransitionOptions = { toFrame: Buffer; progress: number; transitionName?: string; - transitionParams?: any; + transitionParams?: TransitionParams; } export default ({ width, height, channels }: { width: number, height: number, channels: number }) => { @@ -42,7 +43,7 @@ export default ({ width, height, channels }: { width: number, height: number, ch const transitionSource = glTransitions.find((t) => t.name.toLowerCase() === transitionName?.toLowerCase()); - transition = createTransition(gl, transitionSource, { resizeMode }); + transition = createTransition(gl, transitionSource!, { resizeMode }); gl.clear(gl.COLOR_BUFFER_BIT); diff --git a/src/index.ts b/src/index.ts index da06b66e..71fdd151 100644 --- a/src/index.ts +++ b/src/index.ts @@ -276,8 +276,7 @@ async function Editly(config: Config): Promise { frameSource1 = await getTransitionFromSource(); frameSource2 = await getTransitionToSource(); - // eslint-disable-next-line no-constant-condition - while (true) { + while (!outProcessError) { const transitionToClip = getTransitionToClip(); const transitionFromClip = getTransitionFromClip(); const fromClipNumFrames = Math.round(transitionFromClip.duration * fps); @@ -323,7 +322,6 @@ async function Editly(config: Config): Promise { fromClipFrameAt = transitionLastFrameIndex; toClipFrameAt = 0; - // eslint-disable-next-line no-continue continue; } @@ -397,6 +395,7 @@ async function Editly(config: Config): Promise { if (verbose) console.log('Waiting for output ffmpeg process to finish'); await outProcess; } catch (err) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any if (outProcessExitCode !== 0 && !(err as any).killed) throw err; } } finally { diff --git a/src/parseConfig.ts b/src/parseConfig.ts index e27c15ec..6b00aacd 100644 --- a/src/parseConfig.ts +++ b/src/parseConfig.ts @@ -29,7 +29,6 @@ async function validateArbitraryAudio(audio: AudioTrack[] | undefined, allowRemo assert(audio === undefined || Array.isArray(audio)); if (audio) { - // eslint-disable-next-line no-restricted-syntax for (const { path, cutFrom, cutTo, start } of audio) { await assertFileValid(path, allowRemoteRequests); @@ -121,7 +120,7 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar let { fontFamily } = rest; if (fontPath) { - const fontFamily = Buffer.from(basename(fontPath)).toString('base64'); + fontFamily = Buffer.from(basename(fontPath)).toString('base64'); if (!loadedFonts.includes(fontFamily)) { registerFont(fontPath, { family: fontFamily, weight: 'regular', style: 'normal' }); loadedFonts.push(fontFamily); @@ -279,7 +278,6 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar } // We now know all clip durations so we can calculate the offset for detached audio tracks - // eslint-disable-next-line no-restricted-syntax for (const { start, ...rest } of (detachedAudioByClip[i] || [])) { clipDetachedAudio.push({ ...rest, start: totalClipDuration + (start || 0) }); } diff --git a/src/sources/fabric.ts b/src/sources/fabric.ts index 6f6e11ba..4af4ba59 100644 --- a/src/sources/fabric.ts +++ b/src/sources/fabric.ts @@ -2,9 +2,10 @@ import * as fabric from 'fabric/node'; import { type CanvasRenderingContext2D, createCanvas, ImageData } from 'canvas'; import { boxBlurImage } from '../BoxBlur.js'; import type { CreateFrameSourceOptions, FrameSource, CanvasLayer, CustomFabricFunctionCallbacks, Layer } from '../types.js'; +import { OptionalPromise } from '../../dist/index.js'; export type FabricFrameSourceOptions = CreateFrameSourceOptions & { fabric: typeof fabric }; -export type FabricFrameSourceCallback = (options: FabricFrameSourceOptions) => Promise; +export type FabricFrameSourceCallback = (options: FabricFrameSourceOptions) => OptionalPromise; // Fabric is used as a fundament for compositing layers in editly @@ -56,6 +57,7 @@ export async function rgbaToFabricImage({ width, height, rgba }: { width: number // FIXME: Fabric tries to add a class to this, but DOM is not defined. Because node? // https://github.com/fabricjs/fabric.js/issues/10032 + // eslint-disable-next-line @typescript-eslint/no-explicit-any (canvas as any).classList = new Set(); const ctx = canvas.getContext('2d'); diff --git a/src/sources/frameSource.ts b/src/sources/frameSource.ts index d054afba..2de0652d 100644 --- a/src/sources/frameSource.ts +++ b/src/sources/frameSource.ts @@ -26,7 +26,9 @@ import createGlFrameSource from './glFrameSource.js'; import type { CreateFrameSource, CreateFrameSourceOptions, DebugOptions } from '../types.js'; import { ProcessedClip } from '../parseConfig.js'; -const fabricFrameSources: Record> = { +// FIXME[ts] +// eslint-disable-next-line @typescript-eslint/no-explicit-any +const fabricFrameSources: Record> = { fabric: customFabricFrameSource, image: imageFrameSource, 'image-overlay': imageOverlayFrameSource, @@ -39,6 +41,8 @@ const fabricFrameSources: Record> = { video: createVideoFrameSource, gl: createGlFrameSource, @@ -67,6 +71,8 @@ export async function createFrameSource({ clip, clipIndex, width, height, channe let createFrameSourceFunc: CreateFrameSource; if (fabricFrameSources[type]) { + // FIXME[TS] + // eslint-disable-next-line @typescript-eslint/no-explicit-any createFrameSourceFunc = async (opts: CreateFrameSourceOptions) => createFabricFrameSource(fabricFrameSources[type], opts); } else { createFrameSourceFunc = frameSources[type]; @@ -81,7 +87,7 @@ export async function createFrameSource({ clip, clipIndex, width, height, channe async function readNextFrame({ time }: { time: number }) { const canvas = createFabricCanvas({ width, height }); - // eslint-disable-next-line no-restricted-syntax + for (const { frameSource, layer } of layerFrameSources) { // console.log({ start: layer.start, stop: layer.stop, layerDuration: layer.layerDuration, time }); const offsetTime = time - (layer?.start ?? 0); diff --git a/src/sources/videoFrameSource.ts b/src/sources/videoFrameSource.ts index 4abdb280..c57f348b 100644 --- a/src/sources/videoFrameSource.ts +++ b/src/sources/videoFrameSource.ts @@ -146,7 +146,6 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram function cleanup() { stream.pause(); - // eslint-disable-next-line no-use-before-define stream.removeListener('data', handleChunk); stream.removeListener('end', onEnd); stream.removeListener('error', reject); diff --git a/src/types.ts b/src/types.ts index 9f14ae9f..2b3ddd1a 100644 --- a/src/types.ts +++ b/src/types.ts @@ -729,7 +729,7 @@ export interface CustomFabricFunctionArgs { width: number; height: number; fabric: typeof Fabric; - params: any; + params: unknown; } export type CustomFabricFunction = (args: CustomFabricFunctionArgs) => OptionalPromise; @@ -858,7 +858,8 @@ export interface DefaultLayerOptions { /** * Set any layer parameter that all layers will inherit. */ - [key: string]: any; + // FIXME[ts]: Define a type for this + [key: string]: unknown; } @@ -871,9 +872,6 @@ export type DefaultLayerTypeOptions = { } -export interface DefaultTransitionOptions extends Transition { -} - export interface DefaultOptions { /** @@ -897,7 +895,7 @@ export interface DefaultOptions { * An object describing the default transition. * Set to `null` to disable transitions. */ - transition?: DefaultTransitionOptions | null; + transition?: Transition | null; } @@ -1111,7 +1109,7 @@ export type Stream = { export type Keyframe = { t: number; - props: Record; + props: { [key: string]: number }; }; export interface FrameSource { diff --git a/src/types/gl-texture2d.d.ts b/src/types/gl-texture2d.d.ts index 681a4646..a0edb0fc 100644 --- a/src/types/gl-texture2d.d.ts +++ b/src/types/gl-texture2d.d.ts @@ -1,5 +1,8 @@ declare module 'gl-texture2d' { - declare function createTexture(gl: WebGLRenderingContext, data: any): WebGLTexture; + import ndarray from 'ndarray'; + + // There are other overloads for this function, but we only care about this one. + declare function createTexture(gl: WebGLRenderingContext, data: ndarray): WebGLTexture; export default createTexture; } diff --git a/src/types/gl-transition.d.ts b/src/types/gl-transition.d.ts index 8de64848..63138b97 100644 --- a/src/types/gl-transition.d.ts +++ b/src/types/gl-transition.d.ts @@ -1,6 +1,58 @@ declare module 'gl-transition' { - import type GL from 'gl'; - declare function createTransition(gl: GL, transitionSource: any, { resizeMode: string }): any; + type TransitionObjectLike = { + glsl: string, + defaultParams: { [key: string]: mixed }, + paramsTypes: { [key: string]: string }, + }; + + + type GLTextureLike = { + bind: (unit: number) => number, + shape: [number, number], + }; + + type Options = { + resizeMode?: "cover" | "contain" | "stretch", + }; + + declare function createTransition( + gl: WebGLRenderingContext, + transition: TransitionObjectLike, + options: Options = {} + ): { + // renders one frame of the transition (up to you to run the animation loop the way you want) + draw: ( + progress: number, + from: GLTextureLike, + to: GLTextureLike, + width: number = gl.drawingBufferWidth, + height: number = gl.drawingBufferHeight, + params: { [key: string]: number | number[] | boolean | GLTextureLike } = {} + ) => void, + // dispose and destroy all objects created by the function call. + dispose: () => void, + }; export = { default: createTransition }; } + +/* + +( + gl: WebGLRenderingContext, + transition: TransitionObjectLike, + options: Options = {} +) => { + // renders one frame of the transition (up to you to run the animation loop the way you want) + draw: ( + progress: number, + from: GLTextureLike, + to: GLTextureLike, + width: number = gl.drawingBufferWidth, + height: number = gl.drawingBufferHeight, + params: { [key: string]: number | boolean | GLTextureLike } = {} + ) => void, + // dispose and destroy all objects created by the function call. + dispose: () => void, +} +*/ diff --git a/src/util.ts b/src/util.ts index a7ffaba9..54f44a52 100644 --- a/src/util.ts +++ b/src/util.ts @@ -156,7 +156,6 @@ export function getFrameByKeyFrames(keyframes: Keyframe[], progress: number) { if (invalidKeyframe) throw new Error('Invalid keyframe'); let prevKeyframe = [...sortedKeyframes].reverse().find((k) => k.t < progress); - // eslint-disable-next-line prefer-destructuring if (!prevKeyframe) prevKeyframe = sortedKeyframes[0]; let nextKeyframe = sortedKeyframes.find((k) => k.t >= progress);