diff --git a/.eslintrc.cjs b/.eslintrc.cjs deleted file mode 100644 index 9c76c7e7..00000000 --- a/.eslintrc.cjs +++ /dev/null @@ -1,23 +0,0 @@ -module.exports = { - extends: 'airbnb-base', - parserOptions: { - ecmaVersion: 2022, - }, - env: { - node: true, - }, - settings: { - 'import/resolver': { - [require.resolve('eslint-plugin-import-exports-imports-resolver')]: {}, - }, - 'import/extensions': ['.js'], - }, - rules: { - 'max-len': 0, - 'no-console': 0, - 'object-curly-newline': 0, - 'no-await-in-loop': 0, - 'no-promise-executor-return': 0, - 'import/extensions': ['error', 'ignorePackages'], - }, -}; diff --git a/.gitignore b/.gitignore index e293efc3..8498279d 100644 --- a/.gitignore +++ b/.gitignore @@ -108,6 +108,7 @@ dist editly-tmp-*/ *.mp4 *.gif +*.png # Other .idea diff --git a/eslint.config.mjs b/eslint.config.mjs new file mode 100644 index 00000000..b3749a7e --- /dev/null +++ b/eslint.config.mjs @@ -0,0 +1,10 @@ +// @ts-check + +import eslint from '@eslint/js'; +import tseslint from 'typescript-eslint'; + +export default tseslint.config( + eslint.configs.recommended, + tseslint.configs.recommended, + { ignores: ["dist/"] }, +); diff --git a/examples/customCanvas.js b/examples/customCanvas.js deleted file mode 100644 index 0fd4c165..00000000 --- a/examples/customCanvas.js +++ /dev/null @@ -1,37 +0,0 @@ -import editly from '../index.js'; - -async function func({ canvas }) { - async function onRender(progress) { - const context = canvas.getContext('2d'); - const centerX = canvas.width / 2; - const centerY = canvas.height / 2; - const radius = 40 * (1 + progress * 0.5); - - context.beginPath(); - context.arc(centerX, centerY, radius, 0, 2 * Math.PI, false); - context.fillStyle = 'hsl(350, 100%, 37%)'; - context.fill(); - context.lineWidth = 5; - context.strokeStyle = '#ffffff'; - context.stroke(); - } - - function onClose() { - // Cleanup if you initialized anything - } - - return { onRender, onClose }; -} - -editly({ - // fast: true, - // outPath: './customCanvas.mp4', - outPath: './customCanvas.gif', - clips: [ - { duration: 2, - layers: [ - { type: 'rainbow-colors' }, - { type: 'canvas', func }, - ] }, - ], -}).catch(console.error); diff --git a/examples/customCanvas.ts b/examples/customCanvas.ts new file mode 100644 index 00000000..1caffef5 --- /dev/null +++ b/examples/customCanvas.ts @@ -0,0 +1,40 @@ +import editly from 'editly'; +import type { CustomCanvasFunctionArgs, CustomCanvasFunctionCallbacks } from 'editly'; + +function func({ canvas }: CustomCanvasFunctionArgs): CustomCanvasFunctionCallbacks { + return { + async onRender(progress) { + const context = canvas.getContext('2d'); + const centerX = canvas.width / 2; + const centerY = canvas.height / 2; + const radius = 40 * (1 + progress * 0.5); + + context.beginPath(); + context.arc(centerX, centerY, radius, 0, 2 * Math.PI, false); + context.fillStyle = 'hsl(350, 100%, 37%)'; + context.fill(); + context.lineWidth = 5; + context.strokeStyle = '#ffffff'; + context.stroke(); + }, + + onClose() { + // Cleanup if you initialized anything + } + }; +} + +editly({ + // fast: true, + // outPath: './customCanvas.mp4', + outPath: './customCanvas.gif', + clips: [ + { + duration: 2, + layers: [ + { type: 'rainbow-colors' }, + { type: 'canvas', func }, + ] + }, + ], +}).catch(console.error); diff --git a/examples/customFabric.js b/examples/customFabric.js deleted file mode 100644 index cf4c223c..00000000 --- a/examples/customFabric.js +++ /dev/null @@ -1,36 +0,0 @@ -import editly from '../index.js'; - -/* eslint-disable spaced-comment,no-param-reassign */ - -async function func({ width, height, fabric }) { - async function onRender(progress, canvas) { - canvas.backgroundColor = 'hsl(33, 100%, 50%)'; - - const text = new fabric.Text(`PROGRESS\n${Math.floor(progress * 100)}%`, { - originX: 'center', - originY: 'center', - left: width / 2, - top: (height / 2) * (1 + (progress * 0.1 - 0.05)), - fontSize: 20, - textAlign: 'center', - fill: 'white', - }); - - canvas.add(text); - } - - function onClose() { - // Cleanup if you initialized anything - } - - return { onRender, onClose }; -} - -editly({ - // fast: true, - outPath: './customFabric.gif', - // outPath: './customFabric.mp4', - clips: [ - { duration: 2, layers: [{ type: 'fabric', func }] }, - ], -}).catch(console.error); diff --git a/examples/customFabric.ts b/examples/customFabric.ts new file mode 100644 index 00000000..6b82d517 --- /dev/null +++ b/examples/customFabric.ts @@ -0,0 +1,35 @@ +import editly from 'editly'; +import { CustomFabricFunctionArgs, CustomFabricFunctionCallbacks } from '../dist/index.js'; + +function func({ width, height, fabric }: CustomFabricFunctionArgs): CustomFabricFunctionCallbacks { + return { + async onRender(progress, canvas) { + canvas.backgroundColor = 'hsl(33, 100%, 50%)'; + + const text = new fabric.Text(`PROGRESS\n${Math.floor(progress * 100)}%`, { + originX: 'center', + originY: 'center', + left: width / 2, + top: (height / 2) * (1 + (progress * 0.1 - 0.05)), + fontSize: 20, + textAlign: 'center', + fill: 'white', + }); + + canvas.add(text); + }, + + onClose() { + // Cleanup if you initialized anything + } + }; +} + +await editly({ + // fast: true, + outPath: './customFabric.gif', + // outPath: './customFabric.mp4', + clips: [ + { duration: 2, layers: [{ type: 'fabric', func }] }, + ], +}); diff --git a/examples/fabricImagePostProcessing.js b/examples/fabricImagePostProcessing.ts similarity index 82% rename from examples/fabricImagePostProcessing.js rename to examples/fabricImagePostProcessing.ts index f13c1f0a..2af3dc6c 100644 --- a/examples/fabricImagePostProcessing.js +++ b/examples/fabricImagePostProcessing.ts @@ -1,8 +1,8 @@ -import editly from '../index.js'; +import editly from 'editly'; // See https://github.com/mifi/editly/pull/222 -editly({ +await editly({ outPath: './fabricImagePostProcessing.mp4', clips: [{ duration: 4, @@ -21,14 +21,14 @@ editly({ width: 0.5, height: 0.5, fabricImagePostProcessing: async ({ image, fabric, canvas }) => { - const circleArgs = { + const circleArgs: ConstructorParameters[0] = { radius: Math.min(image.width, image.height) * 0.4, originX: 'center', originY: 'center', stroke: 'white', strokeWidth: 22, }; - image.setOptions({ clipPath: new fabric.Circle(circleArgs) }); + image.set({ clipPath: new fabric.Circle(circleArgs) }); canvas.add(new fabric.Circle({ ...circleArgs, left: image.getCenterPoint().x, @@ -36,6 +36,7 @@ editly({ })); }, }, - ] }, + ] + }, ], -}).catch(console.error); +}); diff --git a/examples/renderSingleFrame.js b/examples/renderSingleFrame.js deleted file mode 100644 index d396538d..00000000 --- a/examples/renderSingleFrame.js +++ /dev/null @@ -1,12 +0,0 @@ -import { parse } from 'json5'; -import fsExtra from 'fs-extra'; - -// eslint-disable-next-line import/named -import { renderSingleFrame } from '../index.js'; - -(async () => { - await renderSingleFrame({ - time: 0, - clips: parse(await fsExtra.readFile('./videos.json5', 'utf-8')).clips, - }); -})().catch(console.error); diff --git a/examples/renderSingleFrame.ts b/examples/renderSingleFrame.ts new file mode 100644 index 00000000..896ac939 --- /dev/null +++ b/examples/renderSingleFrame.ts @@ -0,0 +1,11 @@ +import JSON from 'json5'; +import fsExtra from 'fs-extra'; +import { renderSingleFrame } from 'editly'; + +(async () => { + await renderSingleFrame({ + time: 0, + clips: JSON.parse(await fsExtra.readFile('./videos.json5', 'utf-8')).clips, + outPath: 'renderSingleFrame.png' + }); +})().catch(console.error); diff --git a/examples/run-all-examples.sh b/examples/run-all-examples.sh index 74a98757..8a94dff9 100755 --- a/examples/run-all-examples.sh +++ b/examples/run-all-examples.sh @@ -1,13 +1,10 @@ #/bin/bash -set -e +set -ex -node ../cli.js --json gl.json5 -node ../cli.js --json image.json5 -node ../cli.js --json losslesscut.json5 -node ../cli.js --json resizeHorizontal.json5 -node ../cli.js --json resizeVertical.json5 -node ../cli.js --json speedTest.json5 -node ../cli.js --json subtitle.json5 -node ../cli.js --json transitionEasing.json5 -node ../cli.js --json transparentGradient.json5 -node ../cli.js --json commonFeatures.json5 +node ../dist/cli.js --json gl.json5 +node ../dist/cli.js --json image.json5 +node ../dist/cli.js --json speedTest.json5 +node ../dist/cli.js --json subtitle.json5 +node ../dist/cli.js --json transitionEasing.json5 +node ../dist/cli.js --json transparentGradient.json5 +node ../dist/cli.js --json commonFeatures.json5 diff --git a/ffmpeg.js b/ffmpeg.js deleted file mode 100644 index 351675f7..00000000 --- a/ffmpeg.js +++ /dev/null @@ -1,31 +0,0 @@ -import fsExtra from 'fs-extra'; -import { execa } from 'execa'; -import assert from 'assert'; -import { compareVersions } from 'compare-versions'; - -export const getFfmpegCommonArgs = ({ enableFfmpegLog }) => (enableFfmpegLog ? [] : ['-hide_banner', '-loglevel', 'error']); - -export const getCutFromArgs = ({ cutFrom }) => (cutFrom ? ['-ss', cutFrom] : []); - -export const getCutToArgs = ({ cutTo, cutFrom, speedFactor }) => (cutTo ? ['-t', (cutTo - cutFrom) * speedFactor] : []); - -export async function createConcatFile(segments, concatFilePath) { - // https://superuser.com/questions/787064/filename-quoting-in-ffmpeg-concat - await fsExtra.writeFile(concatFilePath, segments.map((seg) => `file '${seg.replace(/'/g, "'\\''")}'`).join('\n')); -} - -export async function testFf(exePath, name) { - const minRequiredVersion = '4.3.1'; - - try { - const { stdout } = await execa(exePath, ['-version']); - const firstLine = stdout.split('\n')[0]; - const match = firstLine.match(`${name} version ([0-9.]+)`); - assert(match, 'Unknown version string'); - const versionStr = match[1]; - console.log(`${name} version ${versionStr}`); - assert(compareVersions(versionStr, minRequiredVersion, '>='), 'Version is outdated'); - } catch (err) { - console.error(`WARNING: ${name}:`, err.message); - } -} diff --git a/index.d.ts b/index.d.ts deleted file mode 100644 index 83527dd7..00000000 --- a/index.d.ts +++ /dev/null @@ -1,1158 +0,0 @@ -import type * as Fabric from 'fabric/node'; - -/** - * Edit and render videos. - * - * @param config - Config. - */ -declare function Editly(config: Editly.Config): Promise; - -declare namespace Editly { - - /** Little utility */ - type OptionalPromise = Promise | T; - - type OriginX = - 'left' | - 'center' | - 'right'; - - type OriginY = - 'top' | - 'center' | - 'bottom'; - - /** - * How to fit image to screen. Can be one of: - * - `'contain'` - All the video will be contained within the frame and letterboxed. - * - `'contain-blur'` - Like contain, but with a blurred copy as the letterbox. - * - `'cover'` - Video be cropped to cover the whole screen (aspect ratio preserved). - * - `'stretch'` - Video will be stretched to cover the whole screen (aspect ratio ignored). - * - * @default 'contain-blur' - * @see [Example 'image.json5']{@link https://github.com/mifi/editly/blob/master/examples/image.json5} - * @see [Example 'videos.json5']{@link https://github.com/mifi/editly/blob/master/examples/videos.json5} - */ - type ResizeMode = - 'contain' | - 'contain-blur' | - 'cover' | - 'stretch'; - - /** - * An object, where `{ x: 0, y: 0 }` is the upper left corner of the screen and `{ x: 1, y: 1 }` is the lower right corner. - */ - interface PositionObject { - - /** - * X-position relative to video width. - */ - x: number; - - /** - * Y-position relative to video height. - */ - y: number; - - /** - * X-anchor position of the object. - */ - originX?: OriginX; - - /** - * Y-anchor position of the object. - */ - originY?: OriginY; - - } - - /** - * Certain layers support the position parameter. - * - * @see [Position parameter]{@link https://github.com/mifi/editly#position-parameter} - * @see [Example 'position.json5']{@link https://github.com/mifi/editly/blob/master/examples/position.json5} - */ - type Position = - 'top' | - 'top-left' | - 'top-right' | - 'center' | - 'center-left' | - 'center-right' | - 'bottom' | - 'bottom-left' | - 'bottom-right' | - PositionObject; - - /** - * @see [Curve types]{@link https://trac.ffmpeg.org/wiki/AfadeCurves} - */ - type CurveType = - 'tri' | - 'qsin' | - 'hsin' | - 'esin' | - 'log' | - 'ipar' | - 'qua' | - 'cub' | - 'squ' | - 'cbr' | - 'par' | - 'exp' | - 'iqsin' | - 'ihsin' | - 'dese' | - 'desi' | - 'losi' | - 'nofade' | - string; - - /** - * @see [Transition types]{@link https://github.com/mifi/editly#transition-types} - */ - type TransitionType = - 'directional-left' | - 'directional-right' | - 'directional-up' | - 'directional-down' | - 'random' | - 'dummy' | - string; - - /** - * WARNING: Undocumented feature! - */ - type GLTextureLike = { - bind: (unit: number) => number, - shape: [number, number], - }; - - /** - * WARNING: Undocumented feature! - */ - interface TransitionParams { - - /** - * WARNING: Undocumented feature! - */ - [key: string]: number | boolean | GLTextureLike | number[]; - - } - - interface Transition { - - /** - * Transition duration. - * - * @default 0.5 - */ - duration?: number; - - /** - * Transition type. - * - * @default 'random' - * @see [Transition types]{@link https://github.com/mifi/editly#transition-types} - */ - name?: TransitionType; - - /** - * [Fade out curve]{@link https://trac.ffmpeg.org/wiki/AfadeCurves} in audio cross fades. - * - * @default 'tri' - */ - audioOutCurve?: CurveType; - - /** - * [Fade in curve]{@link https://trac.ffmpeg.org/wiki/AfadeCurves} in audio cross fades. - * - * @default 'tri' - */ - audioInCurve?: CurveType; - - /** - * WARNING: Undocumented feature! - */ - easing?: string | null; - - /** - * WARNING: Undocumented feature! - */ - params?: TransitionParams; - - } - - /** - * @see [Arbitrary audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} - */ - interface AudioTrack { - - /** - * File path for this track. - */ - path: string; - - /** - * Relative volume for this track. - * - * @default 1 - */ - mixVolume?: number | string; - - /** - * Time value to cut source file from (in seconds). - * - * @default 0 - */ - cutFrom?: number; - - /** - * Time value to cut source file to (in seconds). - */ - cutTo?: number; - - /** - * How many seconds into video to start this audio track. - * - * @default 0 - */ - start?: number; - - } - - /** - * @see [Ken Burns parameters]{@link https://github.com/mifi/editly#ken-burns-parameters} - */ - interface KenBurns { - - /** - * Zoom direction for Ken Burns effect. - * Use `null` to disable. - */ - zoomDirection?: 'in' | 'out' | 'left' | `right` | null; - - /** - * Zoom amount for Ken Burns effect. - * - * @default 0.1 - */ - zoomAmount?: number; - - } - - type LayerType = - 'video' | - 'audio' | - 'detached-audio' | - 'image' | - 'image-overlay' | - 'title' | - 'subtitle' | - 'title-background' | - 'news-title' | - 'slide-in-text' | - 'fill-color' | - 'pause' | - 'radial-gradient' | - 'linear-gradient' | - 'rainbow-colors' | - 'canvas' | - 'fabric' | - 'gl' | - 'editly-banner'; - - interface BaseLayer { - - /** - * Layer type. - */ - type: LayerType; - - /** - * What time into the clip should this layer start (in seconds). - */ - start?: number; - - /** - * What time into the clip should this layer stop (in seconds). - */ - stop?: number; - - } - - interface VideoPostProcessingFunctionArgs { - canvas: Fabric.Canvas; - image: Fabric.FabricImage; - fabric: typeof Fabric, - progress: number; - time: number; - } - - /** - * For video layers, if parent `clip.duration` is specified, the video will be slowed/sped-up to match `clip.duration`. - * If `cutFrom`/`cutTo` is set, the resulting segment (`cutTo`-`cutFrom`) will be slowed/sped-up to fit `clip.duration`. - * If the layer has audio, it will be kept (and mixed with other audio layers if present). - */ - interface VideoLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'video'; - - /** - * Path to video file. - */ - path: string; - - /** - * How to fit video to screen. - * - * @default 'contain-blur' - * @see [Resize modes]{@link https://github.com/mifi/editly#resize-modes} - */ - resizeMode?: ResizeMode; - - /** - * Time value to cut from (in seconds). - * - * @default 0 - */ - cutFrom?: number; - - /** - * Time value to cut to (in seconds). - * Defaults to *end of video*. - */ - cutTo?: number; - - /** - * Width relative to screen width. - * Must be between 0 and 1. - * - * @default 1 - */ - width?: number; - - /** - * Height relative to screen height. - * Must be between 0 and 1. - * - * @default 1 - */ - height?: number; - - /** - * X-position relative to screen width. - * Must be between 0 and 1. - * - * @default 0 - */ - left?: number; - - /** - * Y-position relative to screen height. - * Must be between 0 and 1. - * - * @default 0 - */ - top?: number; - - /** - * X-anchor. - * - * @default 'left' - */ - originX?: OriginX; - - /** - * Y-anchor. - * - * @default 'top' - */ - originY?: OriginY; - - /** - * Relative volume when mixing this video's audio track with others. - * - * @default 1 - */ - mixVolume?: number | string; - - /** - * Post-processing function after calling rgbaToFabricImage but before adding it to StaticCanvas. - */ - fabricImagePostProcessing?: (data: VideoPostProcessingFunctionArgs) => Promise; - } - - /** - * Audio layers will be mixed together. - * If `cutFrom`/`cutTo` is set, the resulting segment (`cutTo`-`cutFrom`) will be slowed/sped-up to fit `clip.duration`. - * The slow down/speed-up operation is limited to values between `0.5x` and `100x`. - */ - interface AudioLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'audio'; - - /** - * Path to audio file. - */ - path: string; - - /** - * Time value to cut from (in seconds). - * - * @default 0 - */ - cutFrom?: number; - - /** - * Time value to cut to (in seconds). - * Defaults to `clip.duration`. - */ - cutTo?: number; - - /** - * Relative volume when mixing this audio track with others. - * - * @default 1 - */ - mixVolume?: number | string; - - } - - /** - * This is a special case of `audioTracks` that makes it easier to start the audio relative to clips start times, - * without having to calculate global start times. - * - * This layer has the exact same properties as [`audioTracks`]{@link https://github.com/mifi/editly#arbitrary-audio-tracks}, - * except `start` time is relative to the clip's start. - */ - interface DetachedAudioLayer extends BaseLayer, AudioTrack { - - /** - * Layer type. - */ - type: 'detached-audio'; - - } - - /** - * Full screen image. - */ - interface ImageLayer extends BaseLayer, KenBurns { - - /** - * Layer type. - */ - type: 'image'; - - /** - * Path to image file. - */ - path: string; - - /** - * How to fit image to screen. - */ - resizeMode?: ResizeMode; - - /** - * WARNING: Undocumented feature! - */ - duration?: number; - - } - - /** - * Image overlay with a custom position and size on the screen. - */ - interface ImageOverlayLayer extends BaseLayer, KenBurns { - - /** - * Layer type. - */ - type: 'image-overlay'; - - /** - * Path to image file. - */ - path: string; - - /** - * Position. - */ - position?: Position; - - /** - * Width (from 0 to 1) where 1 is screen width. - */ - width?: number; - - /** - * Height (from 0 to 1) where 1 is screen height. - */ - height?: number; - - } - - interface TitleLayer extends BaseLayer, KenBurns { - - /** - * Layer type. - */ - type: 'title'; - - /** - * Title text to show, keep it short. - */ - text: string; - - /** - * Text color. - * Defaults to '#ffffff'. - */ - textColor?: string; - - /** - * Set font (`.ttf`). - * Defaults to system font. - */ - fontPath?: string; - - /** - * Position. - */ - position?: Position; - - } - - interface SubtitleLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'subtitle'; - - /** - * Subtitle text to show. - */ - text: string; - - /** - * Text color. - * Defaults to '#ffffff'. - */ - textColor?: string; - - /** - * Set font (`.ttf`). - * Defaults to system font. - */ - fontPath?: string; - - /** - * WARNING: Undocumented feature! - */ - backgroundColor?: string; - - } - - /** - * Title with background. - */ - interface TitleBackgroundLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'title-background'; - - /** - * Title text to show, keep it short. - */ - text: string; - - /** - * Text color. - * Defaults to '#ffffff'. - */ - textColor?: string; - - /** - * Set font (`.ttf`). - * Defaults to system font. - */ - fontPath?: string; - - /** - * Background layer. - * Defaults to random background. - */ - background?: BackgroundLayer; - - } - - interface NewsTitleLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'news-title'; - - /** - * Title text to show, keep it short. - */ - text: string; - - /** - * Text color. - * Defaults to '#ffffff'. - */ - textColor?: string; - - /** - * Set font (`.ttf`). - * Defaults to system font. - */ - fontPath?: string; - - /** - * Background color. - * Defaults to '#d02a42'. - */ - backgroundColor?: string; - - /** - * Position. - */ - position?: Position; - - } - - interface SlideInTextLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'slide-in-text'; - - /** - * Title text to show, keep it short. - */ - text: string; - - /** - * Set font (`.ttf`). - * Defaults to system font. - */ - fontPath?: string; - - /** - * Font size. - */ - fontSize?: number; - - /** - * Char spacing. - */ - charSpacing?: number; - - /** - * Color. - */ - color?: string; - - /** - * Position. - */ - position?: Position; - - } - - interface FillColorLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'fill-color'; - - /** - * Color to fill background. - * Defaults to random color. - */ - color?: string; - - } - - interface PauseLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'pause'; - - /** - * Color to fill background. - * Defaults to random color. - */ - color?: string; - - } - - interface RadialGradientLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'radial-gradient'; - - /** - * Array of two colors. - * Defaults to random colors. - */ - colors?: [ string, string ]; - - } - - interface LinearGradientLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'linear-gradient'; - - /** - * Array of two colors. - * Defaults to random colors. - */ - colors?: [ string, string ]; - - } - - interface RainbowColorsLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'rainbow-colors'; - - } - - type OnRenderCallback = (progress: number, canvas: Fabric.Canvas) => OptionalPromise; - type OnCloseCallback = () => OptionalPromise; - - interface CustomFunctionCallbacks { - onRender: OnRenderCallback; - onClose?: OnCloseCallback; - } - - interface CustomCanvasFunctionArgs { - width: number; - height: number; - canvas: Fabric.Canvas; - } - - type CustomCanvasFunction = (args: CustomCanvasFunctionArgs) => OptionalPromise; - - interface CanvasLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'canvas'; - - /** - * Custom JavaScript function. - */ - func: CustomCanvasFunction; - - } - - interface CustomFabricFunctionArgs { - width: number; - height: number; - fabric: typeof Fabric; - canvas: Fabric.Canvas; - params: any; - } - - type CustomFabricFunction = (args: CustomFabricFunctionArgs) => OptionalPromise; - - interface FabricLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'fabric'; - - /** - * Custom JavaScript function. - */ - func: CustomFabricFunction; - - } - - interface GlLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'gl'; - - /** - * Fragment path (`.frag` file) - */ - fragmentPath: string; - - /** - * Vertex path (`.vert` file). - */ - vertexPath?: string; - - /** - * WARNING: Undocumented feature! - */ - speed?: number; - - } - - /** - * WARNING: Undocumented feature! - */ - interface EditlyBannerLayer extends BaseLayer { - - /** - * Layer type. - */ - type: 'editly-banner'; - - } - - /** - * @see [Examples]{@link https://github.com/mifi/editly/tree/master/examples} - * @see [Example 'commonFeatures.json5']{@link https://github.com/mifi/editly/blob/master/examples/commonFeatures.json5} - */ - type Layer = - VideoLayer | - AudioLayer | - DetachedAudioLayer | - ImageLayer | - ImageOverlayLayer | - TitleLayer | - SubtitleLayer | - TitleBackgroundLayer | - NewsTitleLayer | - SlideInTextLayer | - FillColorLayer | - PauseLayer | - RadialGradientLayer | - LinearGradientLayer | - RainbowColorsLayer | - CanvasLayer | - FabricLayer | - GlLayer | - EditlyBannerLayer; - - /** - * Special layers that can be used f.e. in the 'title-background' layer. - */ - type BackgroundLayer = - RadialGradientLayer | - LinearGradientLayer | - FillColorLayer; - - interface Clip { - - /** - * List of layers within the current clip that will be overlaid in their natural order (final layer on top). - */ - layers: Layer[] | Layer; - - /** - * Clip duration. - * If unset, the clip duration will be that of the first video layer. - * Defaults to `defaults.duration`. - */ - duration?: number; - - /** - * Specify transition at the end of this clip. - * Defaults to `defaults.transition`. - * Set to `null` to disable transitions. - */ - transition?: Transition | null; - - } - - interface DefaultLayerOptions { - - /** - * Set default font (`.ttf`). - * Defaults to system font. - */ - fontPath?: string; - - /** - * Set any layer parameter that all layers will inherit. - */ - [key: string]: any; - - } - - type DefaultLayerTypeOptions = { - - /** - * Set any layer parameter that all layers of the same type (specified in key) will inherit. - */ - [P in LayerType]?: Partial, 'type'>>; - - } - - interface DefaultTransitionOptions extends Transition { - } - - interface DefaultOptions { - - /** - * Set default clip duration for clips that don't have an own duration (in seconds). - * - * @default 4 - */ - duration?: number; - - /** - * An object describing the default layer options. - */ - layer?: DefaultLayerOptions; - - /** - * Defaults for each individual layer types. - */ - layerType?: DefaultLayerTypeOptions; - - /** - * An object describing the default transition. - * Set to `null` to disable transitions. - */ - transition?: DefaultTransitionOptions | null; - - } - - /** - * You can enable audio normalization of the final output audio. - * This is useful if you want to achieve Audio Ducking (e.g. automatically lower volume of all other tracks when voice-over speaks). - * - * @see [Dynaudnorm]{@link https://ffmpeg.org/ffmpeg-filters.html#dynaudnorm} - * @see [Example of audio ducking]{@link https://github.com/mifi/editly/blob/master/examples/audio2.json5} - */ - interface AudioNormalizationOptions { - - /** - * Enable audio normalization? - * - * @default false - * @see [Audio normalization]{@link https://github.com/mifi/editly#audio-normalization} - */ - enable?: boolean; - - /** - * Audio normalization gauss size. - * - * @default 5 - * @see [Audio normalization]{@link https://github.com/mifi/editly#audio-normalization} - */ - gaussSize?: number; - - /** - * Audio normalization max gain. - * - * @default 30 - * @see [Audio normalization]{@link https://github.com/mifi/editly#audio-normalization} - */ - maxGain?: number; - - } - - interface Config { - - /** - * Output path (`.mp4` or `.mkv`, can also be a `.gif`). - */ - outPath: string; - - /** - * List of clip objects that will be played in sequence. - * Each clip can have one or more layers. - * - * @default [] - */ - clips: Clip[]; - - /** - * Width which all media will be converted to. - * - * @default 640 - */ - width?: number; - - /** - * Height which all media will be converted to. - * Decides height based on `width` and aspect ratio of the first video by default. - */ - height?: number; - - /** - * FPS which all videos will be converted to. - * Defaults to first video's FPS or `25`. - */ - fps?: number; - - /** - * Specify custom output codec/format arguments for ffmpeg. - * Automatically adds codec options (normally `h264`) by default. - * - * @see [Example]{@link https://github.com/mifi/editly/blob/master/examples/customOutputArgs.json5} - */ - customOutputArgs?: string[]; - - /** - * Allow remote URLs as paths. - * - * @default false - */ - allowRemoteRequests?: boolean; - - /** - * Fast mode (low resolution and FPS, useful for getting a quick preview ⏩). - * - * @default false - */ - fast?: boolean; - - /** - * An object describing default options for clips and layers. - */ - defaults?: DefaultOptions; - - /** - * List of arbitrary audio tracks. - * - * @default [] - * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} - */ - audioTracks?: AudioTrack[]; - - /** - * Set an audio track for the whole video.. - * - * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} - */ - audioFilePath?: string; - - /** - * Background Volume - * - * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} - */ - backgroundAudioVolume?: string | number; - - /** - * Loop the audio track if it is shorter than video? - * - * @default false - */ - loopAudio?: boolean; - - /** - * Keep source audio from `clips`? - * - * @default false - */ - keepSourceAudio?: boolean; - - /** - * Volume of audio from `clips` relative to `audioTracks`. - * - * @default 1 - * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} - */ - clipsAudioVolume?: number | string; - - /** - * Adjust output [volume]{@link http://ffmpeg.org/ffmpeg-filters.html#volume} (final stage). - * - * @default 1 - * @see [Example]{@link https://github.com/mifi/editly/blob/master/examples/audio-volume.json5} - * @example - * 0.5 - * @example - * '10db' - */ - outputVolume?: number | string; - - /** - * Audio normalization. - */ - audioNorm?: AudioNormalizationOptions; - - /** - * WARNING: Undocumented feature! - */ - ffmpegPath?: string; - - /** - * WARNING: Undocumented feature! - */ - ffprobePath?: string; - - /** - * WARNING: Undocumented feature! - */ - enableFfmpegLog?: boolean; - - /** - * WARNING: Undocumented feature! - */ - verbose?: boolean; - - /** - * WARNING: Undocumented feature! - */ - logTimes?: boolean; - - /** - * WARNING: Undocumented feature! - */ - keepTmp?: boolean; - - } - - interface RenderSingleFrameConfig extends Config { - - /** - * Output path (`.mp4` or `.mkv`, can also be a `.gif`). - */ - outPath: string; - - /** - * Timestamp to render. - */ - time?: number; - - } - - /** - * WARNING: Undocumented feature! - * Pure function to get a frame at a certain time. - * - * @param config - Config. - */ - function renderSingleFrame(config: RenderSingleFrameConfig): Promise; -} - -export = Editly; diff --git a/package.json b/package.json index 05bc100b..152fe62d 100644 --- a/package.json +++ b/package.json @@ -2,8 +2,11 @@ "name": "editly", "description": "Simple, sexy, declarative video editing", "version": "0.14.2", - "main": "index.js", - "types": "index.d.ts", + "module": "./dist/index.js", + "types": "dist/index.d.ts", + "exports": { + ".": "./dist/index.js" + }, "author": "Mikael Finstad ", "contributors": [ "Patrick Connolly (https://github.com/patcon)", @@ -12,6 +15,11 @@ "type": "module", "license": "MIT", "dependencies": { + "@types/fs-extra": "^11.0.4", + "@types/gl": "^6.0.5", + "@types/gl-shader": "^4.2.5", + "@types/lodash-es": "^4.17.12", + "@types/ndarray": "^1.0.14", "canvas": "^2.11.2", "compare-versions": "^6.1.0", "execa": "^6.1.0", @@ -33,7 +41,8 @@ "p-map": "^7.0.2" }, "scripts": { - "test": "node test.js", + "prepare": "pkgroll --clean-dist --sourcemap", + "test": "tsx test/index.ts", "lint": "eslint ." }, "repository": { @@ -41,13 +50,15 @@ "url": "git+https://github.com/mifi/editly.git" }, "bin": { - "editly": "cli.js" + "editly": "dist/cli.js" }, "devDependencies": { - "@types/fabric": "^5.2.4", - "eslint": "^8.22.0", - "eslint-config-airbnb-base": "^15.0.0", - "eslint-plugin-import": "^2.29.1", - "eslint-plugin-import-exports-imports-resolver": "^1.0.1" + "@eslint/js": "^9.18.0", + "@tsconfig/node-lts": "^22.0.1", + "eslint": "^9.18.0", + "pkgroll": "^2.6.1", + "tsx": "^4.19.2", + "typescript": "^5.7.3", + "typescript-eslint": "^8.20.0" } } diff --git a/src/BoxBlur.d.ts b/src/BoxBlur.d.ts new file mode 100644 index 00000000..84ab2193 --- /dev/null +++ b/src/BoxBlur.d.ts @@ -0,0 +1,10 @@ +import type { CanvasRenderingContext2D } from "canvas"; + +declare function boxBlurImage( + context: CanvasRenderingContext2D, + width: number, + height: number, + radius: number, + blurAlphaChannel: boolean, + iterations: number +); diff --git a/BoxBlur.js b/src/BoxBlur.js similarity index 100% rename from BoxBlur.js rename to src/BoxBlur.js diff --git a/audio.js b/src/audio.ts similarity index 78% rename from audio.js rename to src/audio.ts index b99057eb..b98a4516 100644 --- a/audio.js +++ b/src/audio.ts @@ -1,17 +1,33 @@ import pMap from 'p-map'; import { join, basename, resolve } from 'path'; import { execa } from 'execa'; -import flatMap from 'lodash-es/flatMap.js'; +import { flatMap } from 'lodash-es'; import { getFfmpegCommonArgs, getCutFromArgs } from './ffmpeg.js'; import { readFileStreams } from './util.js'; -export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) => { - async function createMixedAudioClips({ clips, keepSourceAudio }) { +import type { AudioLayer, AudioNormalizationOptions, AudioTrack, Clip, Config, Transition, VideoLayer } from './types.js' + +export type AudioOptions = { + ffmpegPath: string; + ffprobePath: string; + enableFfmpegLog: boolean; + verbose: boolean; + tmpDir: string; +} + +export type EditAudioOptions = Pick & { + arbitraryAudio: AudioTrack[] +}; + +type LayerWithAudio = (AudioLayer | VideoLayer) & { speedFactor: number }; + +export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: AudioOptions) => { + async function createMixedAudioClips({ clips, keepSourceAudio }: { clips: Clip[], keepSourceAudio?: boolean }) { return pMap(clips, async (clip, i) => { const { duration, layers, transition } = clip; - async function runInner() { + async function runInner(): Promise<{ clipAudioPath: string, silent: boolean }> { const clipAudioPath = join(tmpDir, `clip${i}-audio.flac`); async function createSilence() { @@ -20,7 +36,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = '-f', 'lavfi', '-i', 'anullsrc=channel_layout=stereo:sample_rate=44100', '-sample_fmt', 's32', '-ar', '48000', - '-t', duration, + '-t', duration!.toString(), '-c:a', 'flac', '-y', clipAudioPath, @@ -33,10 +49,11 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = // Has user enabled keep source audio? if (!keepSourceAudio) return createSilence(); + // TODO:[ts]: Layers is always an array once config is parsed. Fix this in types const audioLayers = layers.filter(({ type, start, stop }) => ( ['audio', 'video'].includes(type) // TODO: We don't support audio for start/stop layers - && !start && stop == null)); + && !start && stop == null)) as LayerWithAudio[]; if (audioLayers.length === 0) return createSilence(); @@ -60,13 +77,13 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = atempoFilter = `atempo=${atempo}`; } - const cutToArg = (cutTo - cutFrom) * speedFactor; + const cutToArg = (cutTo! - cutFrom!) * speedFactor; const args = [ ...getFfmpegCommonArgs({ enableFfmpegLog }), ...getCutFromArgs({ cutFrom }), '-i', path, - '-t', cutToArg, + '-t', cutToArg!.toString(), '-sample_fmt', 's32', '-ar', '48000', '-map', 'a:0', '-c:a', 'flac', @@ -78,10 +95,10 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = // console.log(args); await execa(ffmpegPath, args); - return { + return [ layerAudioPath, audioLayer, - }; + ]; } catch (err) { if (verbose) console.error('Cannot extract audio from video', path, err); // Fall back to silence @@ -89,17 +106,17 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = } }, { concurrency: 4 }); - const processedAudioLayers = processedAudioLayersRaw.filter((p) => p); + const processedAudioLayers = processedAudioLayersRaw.filter((r): r is [string, LayerWithAudio] => r !== undefined); if (processedAudioLayers.length < 1) return createSilence(); - if (processedAudioLayers.length === 1) return { clipAudioPath: processedAudioLayers[0].layerAudioPath }; + if (processedAudioLayers.length === 1) return { clipAudioPath: processedAudioLayers[0][0], silent: false }; // Merge/mix all layers' audio - const weights = processedAudioLayers.map(({ audioLayer }) => (audioLayer.mixVolume != null ? audioLayer.mixVolume : 1)); + const weights = processedAudioLayers.map(([, { mixVolume }]) => mixVolume ?? 1); const args = [ ...getFfmpegCommonArgs({ enableFfmpegLog }), - ...flatMap(processedAudioLayers, ({ layerAudioPath }) => ['-i', layerAudioPath]), + ...flatMap(processedAudioLayers, ([layerAudioPath]) => ['-i', layerAudioPath]), '-filter_complex', `amix=inputs=${processedAudioLayers.length}:duration=longest:weights=${weights.join(' ')}`, '-c:a', 'flac', '-y', @@ -107,7 +124,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = ]; await execa(ffmpegPath, args); - return { clipAudioPath }; + return { clipAudioPath, silent: false }; } const { clipAudioPath, silent } = await runInner(); @@ -120,7 +137,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = }, { concurrency: 4 }); } - async function crossFadeConcatClipAudio(clipAudio) { + async function crossFadeConcatClipAudio(clipAudio: { path: string, transition?: Transition | null }[]) { if (clipAudio.length < 2) { return clipAudio[0].path; } @@ -134,7 +151,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = const outStream = `[concat${i}]`; const epsilon = 0.0001; // If duration is 0, ffmpeg seems to default to 1 sec instead, hence epsilon. - let ret = `${inStream}[${i + 1}:a]acrossfade=d=${Math.max(epsilon, transition.duration)}:c1=${transition.audioOutCurve || 'tri'}:c2=${transition.audioInCurve || 'tri'}`; + let ret = `${inStream}[${i + 1}:a]acrossfade=d=${Math.max(epsilon, transition?.duration ?? 0)}:c1=${transition?.audioOutCurve ?? 'tri'}:c2=${transition?.audioInCurve ?? 'tri'}`; inStream = outStream; @@ -156,7 +173,8 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = return outPath; } - async function mixArbitraryAudio({ streams, audioNorm, outputVolume }) { + // FIXME[ts]: parseConfig sets `loop` on arbitrary audio tracks. Should that be part of the `AudioTrack` interface? + async function mixArbitraryAudio({ streams, audioNorm, outputVolume }: { streams: (AudioTrack & { loop?: number })[], audioNorm?: AudioNormalizationOptions, outputVolume?: number | string }) { let maxGain = 30; let gaussSize = 5; if (audioNorm) { @@ -175,14 +193,14 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = const volumeArg = outputVolume != null ? `,volume=${outputVolume}` : ''; const audioNormArg = enableAudioNorm ? `,dynaudnorm=g=${gaussSize}:maxgain=${maxGain}` : ''; - filterComplex += `;${streams.map((s, i) => `[a${i}]`).join('')}amix=inputs=${streams.length}:duration=first:dropout_transition=0:weights=${streams.map((s) => (s.mixVolume != null ? s.mixVolume : 1)).join(' ')}${audioNormArg}${volumeArg}`; + filterComplex += `;${streams.map((_, i) => `[a${i}]`).join('')}amix=inputs=${streams.length}:duration=first:dropout_transition=0:weights=${streams.map((s) => (s.mixVolume != null ? s.mixVolume : 1)).join(' ')}${audioNormArg}${volumeArg}`; const mixedAudioPath = join(tmpDir, 'audio-mixed.flac'); const args = [ ...getFfmpegCommonArgs({ enableFfmpegLog }), ...(flatMap(streams, ({ path, loop }) => ([ - '-stream_loop', (loop || 0), + '-stream_loop', (loop || 0).toString(), '-i', path, ]))), '-vn', @@ -199,7 +217,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = return mixedAudioPath; } - async function editAudio({ keepSourceAudio, clips, arbitraryAudio, clipsAudioVolume, audioNorm, outputVolume }) { + async function editAudio({ keepSourceAudio, clips, arbitraryAudio, clipsAudioVolume, audioNorm, outputVolume }: EditAudioOptions) { // We need clips to process audio, because we need to know duration if (clips.length === 0) return undefined; @@ -217,7 +235,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }) = // Merge & fade the clip audio files const concatedClipAudioPath = await crossFadeConcatClipAudio(clipAudio); - const streams = [ + const streams: AudioTrack[] = [ // The first stream is required, as it determines the length of the output audio. // All other streams will be truncated to its length { path: concatedClipAudioPath, mixVolume: clipsAudioVolume }, diff --git a/cli.js b/src/cli.ts similarity index 86% rename from cli.js rename to src/cli.ts index e33eaac1..515bf020 100644 --- a/cli.js +++ b/src/cli.ts @@ -6,7 +6,7 @@ import pMap from 'p-map'; import JSON5 from 'json5'; import assert from 'assert'; -import Editly from './index.js'; +import Editly, { Config, Layer } from './index.js'; // See also readme const cli = meow(` @@ -53,22 +53,26 @@ const cli = meow(` keepSourceAudio: { type: 'boolean' }, allowRemoteRequests: { type: 'boolean' }, fast: { type: 'boolean', alias: 'f' }, + transitionName: { type: 'string' }, transitionDuration: { type: 'number' }, clipDuration: { type: 'number' }, width: { type: 'number' }, height: { type: 'number' }, fps: { type: 'number' }, + fontPath: { type: 'string' }, loopAudio: { type: 'boolean' }, outputVolume: { type: 'string' }, + json: { type: 'string' }, + out: { type: 'string' }, + audioFilePath: { type: 'string' }, }, }); (async () => { let { json } = cli.flags; - // eslint-disable-next-line prefer-destructuring if (cli.input.length === 1 && /\.(json|json5|js)$/.test(cli.input[0].toLowerCase())) json = cli.input[0]; - let params = { + let params: Partial = { defaults: {}, }; @@ -78,7 +82,7 @@ const cli = meow(` const clipsIn = cli.input; if (clipsIn.length < 1) cli.showHelp(); - const clips = await pMap(clipsIn, async (clip) => { + const clips: Layer[] = await pMap(clipsIn, async (clip) => { let match = clip.match(/^title:(.+)$/); if (match) return { type: 'title-background', text: match[1] }; @@ -91,7 +95,7 @@ const cli = meow(` cli.showHelp(); } - const { mime } = fileType; + const mime = fileType!.mime; if (mime.startsWith('video')) return { type: 'video', path: clip }; if (mime.startsWith('image')) return { type: 'image', path: clip }; @@ -107,15 +111,15 @@ const cli = meow(` const { verbose, transitionName, transitionDuration, clipDuration, width, height, fps, audioFilePath, fontPath, fast, out: outPath, keepSourceAudio, loopAudio, outputVolume, allowRemoteRequests } = cli.flags; if (transitionName || transitionDuration != null) { - params.defaults.transition = {}; - if (transitionName) params.defaults.transition.name = transitionName; - if (transitionDuration) params.defaults.transition.duration = transitionDuration; + params.defaults!.transition = {}; + if (transitionName) params.defaults!.transition!.name = transitionName; + if (transitionDuration) params.defaults!.transition!.duration = transitionDuration; } - if (clipDuration) params.defaults.duration = clipDuration; + if (clipDuration) params.defaults!.duration = clipDuration; if (fontPath) { - params.defaults.layer = { + params.defaults!.layer = { fontPath, }; } @@ -137,7 +141,7 @@ const cli = meow(` if (!params.outPath) params.outPath = './editly-out.mp4'; - await Editly(params); + await Editly(params as Config); })().catch((err) => { console.error('Caught error', err); process.exitCode = 1; diff --git a/colors.js b/src/colors.ts similarity index 98% rename from colors.js rename to src/colors.ts index 81e1b8f4..5c625a52 100644 --- a/colors.js +++ b/src/colors.ts @@ -178,7 +178,7 @@ export function getRandomColor(colors = allColors) { return { remainingColors, color: colors[index] || allColors[0] }; } -export function getRandomColors(num) { +export function getRandomColors(num: number) { let colors = allColors; const out = []; for (let i = 0; i < Math.min(num, allColors.length); i += 1) { diff --git a/src/ffmpeg.ts b/src/ffmpeg.ts new file mode 100644 index 00000000..323a218f --- /dev/null +++ b/src/ffmpeg.ts @@ -0,0 +1,37 @@ +import fsExtra from 'fs-extra'; +import { execa } from 'execa'; +import assert from 'assert'; +import { compareVersions } from 'compare-versions'; + +export function getFfmpegCommonArgs({ enableFfmpegLog }: { enableFfmpegLog?: boolean }) { + return enableFfmpegLog ? [] : ['-hide_banner', '-loglevel', 'error']; +} + +export function getCutFromArgs({ cutFrom }: { cutFrom?: number }) { + return cutFrom ? ['-ss', cutFrom.toString()] : []; +} + +export function getCutToArgs({ cutTo, cutFrom, speedFactor }: { cutTo?: number; cutFrom?: number; speedFactor: number }) { + return cutFrom && cutTo ? ['-t', (cutTo - cutFrom) * speedFactor] : []; +} + +export async function createConcatFile(segments: string[], concatFilePath: string) { + // https://superuser.com/questions/787064/filename-quoting-in-ffmpeg-concat + await fsExtra.writeFile(concatFilePath, segments.map((seg) => `file '${seg.replace(/'/g, "'\\''")}'`).join('\n')); +} + +export async function testFf(exePath: string, name: string) { + const minRequiredVersion = '4.3.1'; + + try { + const { stdout } = await execa(exePath, ['-version']); + const firstLine = stdout.split('\n')[0]; + const match = firstLine.match(`${name} version ([0-9.]+)`); + assert(match, 'Unknown version string'); + const versionStr = match[1]; + console.log(`${name} version ${versionStr}`); + assert(compareVersions(versionStr, minRequiredVersion), 'Version is outdated'); + } catch (err) { + console.error(`WARNING: ${name}:`, err); + } +} diff --git a/glTransitions.js b/src/glTransitions.ts similarity index 80% rename from glTransitions.js rename to src/glTransitions.ts index 1a6b0c88..0387b308 100644 --- a/glTransitions.js +++ b/src/glTransitions.ts @@ -4,18 +4,27 @@ import createBuffer from 'gl-buffer'; import glTransitions from 'gl-transitions'; import glTransition from 'gl-transition'; import createTexture from 'gl-texture2d'; +import { TransitionParams } from './types.js'; const { default: createTransition } = glTransition; -export default ({ width, height, channels }) => { +export type RunTransitionOptions = { + fromFrame: Buffer; + toFrame: Buffer; + progress: number; + transitionName?: string; + transitionParams?: TransitionParams; +} + +export default ({ width, height, channels }: { width: number, height: number, channels: number }) => { const gl = GL(width, height); if (!gl) { throw new Error('gl returned null, this probably means that some dependencies are not installed. See README.'); } - function runTransitionOnFrame({ fromFrame, toFrame, progress, transitionName, transitionParams = {} }) { - function convertFrame(buf) { + function runTransitionOnFrame({ fromFrame, toFrame, progress, transitionName, transitionParams = {} }: RunTransitionOptions) { + function convertFrame(buf: Buffer) { // @see https://github.com/stackgl/gl-texture2d/issues/16 return ndarray(buf, [width, height, channels], [channels, width * channels, 1]); } @@ -32,9 +41,9 @@ export default ({ width, height, channels }) => { try { const resizeMode = 'stretch'; - const transitionSource = glTransitions.find((t) => t.name.toLowerCase() === transitionName.toLowerCase()); + const transitionSource = glTransitions.find((t) => t.name.toLowerCase() === transitionName?.toLowerCase()); - transition = createTransition(gl, transitionSource, { resizeMode }); + transition = createTransition(gl, transitionSource!, { resizeMode }); gl.clear(gl.COLOR_BUFFER_BIT); diff --git a/index.js b/src/index.ts similarity index 86% rename from index.js rename to src/index.ts index 9a6df3e4..71fdd151 100644 --- a/index.js +++ b/src/index.ts @@ -1,4 +1,4 @@ -import { execa } from 'execa'; +import { execa, ExecaChildProcess } from 'execa'; import assert from 'assert'; import { join, dirname } from 'path'; import JSON5 from 'json5'; @@ -9,20 +9,28 @@ import { testFf } from './ffmpeg.js'; import { parseFps, multipleOf2, assertFileValid, checkTransition } from './util.js'; import { createFabricCanvas, rgbaToFabricImage } from './sources/fabric.js'; import { createFrameSource } from './sources/frameSource.js'; -import parseConfig from './parseConfig.js'; -import GlTransitions from './glTransitions.js'; +import parseConfig, { ProcessedClip } from './parseConfig.js'; +import GlTransitions, { type RunTransitionOptions } from './glTransitions.js'; import Audio from './audio.js'; +import type { Config, RenderSingleFrameConfig } from './types.js'; const channels = 4; -async function Editly(config = {}) { +export type * from './types.js'; + +/** + * Edit and render videos. + * + * @param config - Config. + */ +async function Editly(config: Config): Promise { const { // Testing options: enableFfmpegLog = false, verbose = false, logTimes = false, keepTmp = false, - fast, + fast = false, outPath, clips: clipsIn, @@ -87,15 +95,15 @@ async function Editly(config = {}) { return false; })); - let width; - let height; + let width: number; + let height: number; let desiredWidth; if (requestedWidth) desiredWidth = requestedWidth; else if (isGif) desiredWidth = 320; - const roundDimension = (val) => (isGif ? Math.round(val) : multipleOf2(val)); + const roundDimension = (val: number) => (isGif ? Math.round(val) : multipleOf2(val)); if (firstVideoWidth && firstVideoHeight) { if (desiredWidth) { @@ -138,8 +146,8 @@ async function Editly(config = {}) { height = Math.max(2, height); } - let fps; - let framerateStr; + let fps: number; + let framerateStr: string; if (fast) { fps = 15; @@ -151,7 +159,7 @@ async function Editly(config = {}) { fps = 10; framerateStr = String(fps); } else if (firstVideoFramerateStr) { - fps = parseFps(firstVideoFramerateStr); + fps = parseFps(firstVideoFramerateStr) ?? 25; framerateStr = firstVideoFramerateStr; } else { fps = 25; @@ -170,7 +178,7 @@ async function Editly(config = {}) { const { runTransitionOnFrame: runGlTransitionOnFrame } = GlTransitions({ width, height, channels }); - function runTransitionOnFrame({ fromFrame, toFrame, progress, transitionName, transitionParams }) { + function runTransitionOnFrame({ fromFrame, toFrame, progress, transitionName, transitionParams }: RunTransitionOptions) { // A dummy transition can be used to have an audio transition without a video transition // (Note: You will lose a portion from both clips due to overlap) if (transitionName === 'dummy') return progress > 0.5 ? toFrame : fromFrame; @@ -186,7 +194,7 @@ async function Editly(config = {}) { // https://superuser.com/questions/556029/how-do-i-convert-a-video-to-gif-using-ffmpeg-with-reasonable-quality const videoOutputArgs = isGif ? [ '-vf', `format=rgb24,fps=${fps},scale=${width}:${height}:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse`, - '-loop', 0, + '-loop', '0', ] : [ '-vf', 'format=yuv420p', '-vcodec', 'libx264', @@ -226,7 +234,7 @@ async function Editly(config = {}) { return execa(ffmpegPath, args, { encoding: null, buffer: false, stdin: 'pipe', stdout: process.stdout, stderr: process.stderr }); } - let outProcess; + let outProcess: ExecaChildProcess> | undefined = undefined; let outProcessExitCode; let frameSource1; @@ -244,7 +252,7 @@ async function Editly(config = {}) { const getTransitionFromClip = () => clips[transitionFromClipId]; const getTransitionToClip = () => clips[getTransitionToClipId()]; - const getSource = async (clip, clipIndex) => createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, framerateStr }); + const getSource = async (clip: ProcessedClip, clipIndex: number) => createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, framerateStr }); const getTransitionFromSource = async () => getSource(getTransitionFromClip(), transitionFromClipId); const getTransitionToSource = async () => (getTransitionToClip() && getSource(getTransitionToClip(), getTransitionToClipId())); @@ -268,8 +276,7 @@ async function Editly(config = {}) { frameSource1 = await getTransitionFromSource(); frameSource2 = await getTransitionToSource(); - // eslint-disable-next-line no-constant-condition - while (true) { + while (!outProcessError) { const transitionToClip = getTransitionToClip(); const transitionFromClip = getTransitionFromClip(); const fromClipNumFrames = Math.round(transitionFromClip.duration * fps); @@ -279,9 +286,9 @@ async function Editly(config = {}) { const fromClipTime = transitionFromClip.duration * fromClipProgress; const toClipTime = transitionToClip && transitionToClip.duration * toClipProgress; - const currentTransition = transitionFromClip.transition; + const currentTransition = transitionFromClip.transition!; - const transitionNumFrames = Math.round(currentTransition.duration * fps); + const transitionNumFrames = Math.round(currentTransition.duration! * fps); // Each clip has two transitions, make sure we leave enough room: const transitionNumFramesSafe = Math.floor(Math.min(Math.min(fromClipNumFrames, toClipNumFrames != null ? toClipNumFrames : Number.MAX_SAFE_INTEGER) / 2, transitionNumFrames)); @@ -315,7 +322,6 @@ async function Editly(config = {}) { fromClipFrameAt = transitionLastFrameIndex; toClipFrameAt = 0; - // eslint-disable-next-line no-continue continue; } @@ -341,7 +347,7 @@ async function Editly(config = {}) { const easedProgress = currentTransition.easingFunction(progress); if (logTimes) console.time('runTransitionOnFrame'); - outFrameData = runTransitionOnFrame({ fromFrame: frameSource1Data, toFrame: frameSource2Data, progress: easedProgress, transitionName: currentTransition.name, transitionParams: currentTransition.params }); + outFrameData = runTransitionOnFrame({ fromFrame: frameSource1Data!, toFrame: frameSource2Data, progress: easedProgress, transitionName: currentTransition.name, transitionParams: currentTransition.params }); if (logTimes) console.timeEnd('runTransitionOnFrame'); } else { console.warn('Got no frame data from transitionToClip!'); @@ -364,7 +370,7 @@ async function Editly(config = {}) { if (logTimes) console.time('outProcess.write'); // If we don't wait, then we get EINVAL when dealing with high resolution files (big writes) - if (!nullOutput) await new Promise((r) => outProcess.stdin.write(outFrameData, r)); + if (!nullOutput) await new Promise((r) => outProcess?.stdin?.write(outFrameData, r)); if (logTimes) console.timeEnd('outProcess.write'); @@ -375,9 +381,9 @@ async function Editly(config = {}) { if (isInTransition) toClipFrameAt += 1; } // End while loop - outProcess.stdin.end(); + outProcess.stdin?.end(); } catch (err) { - outProcess.kill(); + outProcess?.kill(); throw err; } finally { if (verbose) console.log('Cleanup'); @@ -389,7 +395,8 @@ async function Editly(config = {}) { if (verbose) console.log('Waiting for output ffmpeg process to finish'); await outProcess; } catch (err) { - if (outProcessExitCode !== 0 && !err.killed) throw err; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (outProcessExitCode !== 0 && !(err as any).killed) throw err; } } finally { if (!keepTmp) await fsExtra.remove(tmpDir); @@ -400,24 +407,30 @@ async function Editly(config = {}) { console.log(outPath); } -// Pure function to get a frame at a certain time -// TODO I think this does not respect transition durations -async function renderSingleFrame({ - time = 0, - defaults, - width = 800, - height = 600, - clips: clipsIn, - - verbose, - logTimes, - enableFfmpegLog, - allowRemoteRequests, - ffprobePath = 'ffprobe', - ffmpegPath = 'ffmpeg', - outPath = `${Math.floor(Math.random() * 1e12)}.png`, -}) { - const clips = await parseConfig({ defaults, clips: clipsIn, arbitraryAudio: [], allowRemoteRequests, ffprobePath }); +/** + * Pure function to get a frame at a certain time. + * TODO: I think this does not respect transition durations + * + * @param config - Config. + */ +export async function renderSingleFrame(config: RenderSingleFrameConfig): Promise { + const { + time = 0, + defaults = {}, + width = 800, + height = 600, + clips: clipsIn, + + verbose, + logTimes, + enableFfmpegLog, + allowRemoteRequests, + ffprobePath = 'ffprobe', + ffmpegPath = 'ffmpeg', + outPath = `${Math.floor(Math.random() * 1e12)}.png`, + } = config; + + const { clips } = await parseConfig({ defaults, clips: clipsIn, arbitraryAudio: [], allowRemoteRequests, ffprobePath }); let clipStartTime = 0; const clip = clips.find((c) => { if (clipStartTime <= time && clipStartTime + c.duration > time) return true; diff --git a/parseConfig.js b/src/parseConfig.ts similarity index 64% rename from parseConfig.js rename to src/parseConfig.ts index cbdf5811..6b00aacd 100644 --- a/parseConfig.js +++ b/src/parseConfig.ts @@ -10,19 +10,25 @@ import { assertFileValid, checkTransition, } from './util.js'; -import { registerFont } from './sources/fabric.js'; -import { calcTransition } from './transitions.js'; +import { registerFont } from 'canvas'; +import { calcTransition, type CalculatedTransition } from './transitions.js'; +import type { AudioTrack, CanvasLayer, EditlyBannerLayer, FabricLayer, GlLayer, ImageLayer, ImageOverlayLayer, Layer, LinearGradientLayer, NewsTitleLayer, SlideInTextLayer, SubtitleLayer, TitleBackgroundLayer, TitleLayer, DefaultOptions, Clip, VideoLayer } from './types.js'; + +export type ProcessedClip = { + layers: Layer[]; + duration: number; + transition: CalculatedTransition; +} -const dirname = fileURLToPath(new URL('.', import.meta.url)); +const dirname = fileURLToPath(new URL('..', import.meta.url)); // Cache -const loadedFonts = []; +const loadedFonts: string[] = []; -async function validateArbitraryAudio(audio, allowRemoteRequests) { +async function validateArbitraryAudio(audio: AudioTrack[] | undefined, allowRemoteRequests?: boolean) { assert(audio === undefined || Array.isArray(audio)); if (audio) { - // eslint-disable-next-line no-restricted-syntax for (const { path, cutFrom, cutTo, start } of audio) { await assertFileValid(path, allowRemoteRequests); @@ -34,7 +40,18 @@ async function validateArbitraryAudio(audio, allowRemoteRequests) { } } -export default async function parseConfig({ defaults: defaultsIn = {}, clips, arbitraryAudio: arbitraryAudioIn, backgroundAudioPath, backgroundAudioVolume, loopAudio, allowRemoteRequests, ffprobePath }) { +type ParseConfigOptions = { + defaults: DefaultOptions; + clips: Clip[]; + backgroundAudioVolume?: string | number; + backgroundAudioPath?: string; + loopAudio?: boolean; + allowRemoteRequests?: boolean; + ffprobePath: string; + arbitraryAudio: AudioTrack[]; +}; + +export default async function parseConfig({ defaults: defaultsIn = {}, clips, arbitraryAudio: arbitraryAudioIn, backgroundAudioPath, backgroundAudioVolume, loopAudio, allowRemoteRequests, ffprobePath }: ParseConfigOptions) { const defaults = { duration: 4, ...defaultsIn, @@ -47,55 +64,61 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar }, }; - async function handleLayer(layer) { - const { type, ...restLayer } = layer; - + async function handleLayer(layer: Layer): Promise { // https://github.com/mifi/editly/issues/39 - if (['image', 'image-overlay'].includes(type)) { - await assertFileValid(restLayer.path, allowRemoteRequests); - } else if (type === 'gl') { - await assertFileValid(restLayer.fragmentPath, allowRemoteRequests); + if (layer.type === 'image' || layer.type === 'image-overlay') { + await assertFileValid((layer as (ImageOverlayLayer | ImageLayer)).path, allowRemoteRequests); + } else if (layer.type === 'gl') { + await assertFileValid(layer.fragmentPath, allowRemoteRequests); } - if (['fabric', 'canvas'].includes(type)) assert(typeof layer.func === 'function', '"func" must be a function'); + if (['fabric', 'canvas'].includes(layer.type)) { + assert(typeof (layer as FabricLayer | CanvasLayer).func === 'function', '"func" must be a function'); + } - if (['image', 'image-overlay', 'fabric', 'canvas', 'gl', 'radial-gradient', 'linear-gradient', 'fill-color'].includes(type)) return layer; + if (['image', 'image-overlay', 'fabric', 'canvas', 'gl', 'radial-gradient', 'linear-gradient', 'fill-color'].includes(layer.type)) { + return layer; + } // TODO if random-background radial-gradient linear etc - if (type === 'pause') return handleLayer({ ...restLayer, type: 'fill-color' }); + if (layer.type === 'pause') { + return handleLayer({ ...layer, type: 'fill-color' }); + } - if (type === 'rainbow-colors') return handleLayer({ type: 'gl', fragmentPath: join(dirname, 'shaders/rainbow-colors.frag') }); + if (layer.type === 'rainbow-colors') { + return handleLayer({ type: 'gl', fragmentPath: join(dirname, 'shaders/rainbow-colors.frag') } as GlLayer); + } - if (type === 'editly-banner') { - const { fontPath } = layer; + if (layer.type === 'editly-banner') { + const { fontPath } = layer as EditlyBannerLayer; return [ - await handleLayer({ type: 'linear-gradient' }), - await handleLayer({ fontPath, type: 'title', text: 'Made with\nEDITLY\nmifi.no' }), - ]; + await handleLayer({ type: 'linear-gradient' } as LinearGradientLayer), + await handleLayer({ type: 'title', text: 'Made with\nEDITLY\nmifi.no', fontPath } as TitleLayer), + ].flat(); } // For convenience - if (type === 'title-background') { - const { text, textColor, background, fontFamily, fontPath } = layer; + if (layer.type === 'title-background') { + const { text, textColor, background, fontFamily, fontPath } = layer as TitleBackgroundLayer; const outLayers = []; if (background) { if (background.type === 'radial-gradient') outLayers.push(await handleLayer({ type: 'radial-gradient', colors: background.colors })); else if (background.type === 'linear-gradient') outLayers.push(await handleLayer({ type: 'linear-gradient', colors: background.colors })); else if (background.color) outLayers.push(await handleLayer({ type: 'fill-color', color: background.color })); } else { - const backgroundTypes = ['radial-gradient', 'linear-gradient', 'fill-color']; + const backgroundTypes: ('radial-gradient' | 'linear-gradient' | 'fill-color')[] = ['radial-gradient', 'linear-gradient', 'fill-color']; const randomType = backgroundTypes[Math.floor(Math.random() * backgroundTypes.length)]; outLayers.push(await handleLayer({ type: randomType })); } outLayers.push(await handleLayer({ type: 'title', fontFamily, fontPath, text, textColor })); - return outLayers; + return outLayers.flat(); } - if (['title', 'subtitle', 'news-title', 'slide-in-text'].includes(type)) { - assert(layer.text, 'Please specify a text'); + if (['title', 'subtitle', 'news-title', 'slide-in-text'].includes(layer.type)) { + const { fontPath, ...rest } = layer as TitleLayer | SubtitleLayer | NewsTitleLayer | SlideInTextLayer; + assert(rest.text, 'Please specify a text'); - let { fontFamily } = layer; - const { fontPath, ...rest } = layer; + let { fontFamily } = rest; if (fontPath) { fontFamily = Buffer.from(basename(fontPath)).toString('base64'); if (!loadedFonts.includes(fontFamily)) { @@ -106,12 +129,12 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar return { ...rest, fontFamily }; } - throw new Error(`Invalid layer type ${type}`); + throw new Error(`Invalid layer type ${layer.type}`); } - const detachedAudioByClip = {}; + const detachedAudioByClip: Record = {}; - let clipsOut = await pMap(clips, async (clip, clipIndex) => { + let clipsOut: ProcessedClip[] = await pMap(clips, async (clip, clipIndex) => { assert(typeof clip === 'object', '"clips" must contain objects with one or more layers'); const { transition: userTransition, duration: userClipDuration, layers: layersIn } = clip; @@ -128,17 +151,16 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar const userClipDurationOrDefault = userClipDuration || defaults.duration; if (videoLayers.length === 0) assert(userClipDurationOrDefault, `Duration parameter is required for videoless clip ${clipIndex}`); - const transition = calcTransition(defaults, userTransition, clipIndex === clips.length - 1); + const transition = calcTransition(defaults.transition, userTransition, clipIndex === clips.length - 1); - let layersOut = flatMap(await pMap(layers, async (layerIn) => { + let layersOut = flatMap(await pMap(layers, async (layerIn: T) => { const globalLayerDefaults = defaults.layer || {}; const thisLayerDefaults = (defaults.layerType || {})[layerIn.type]; - const layer = { ...globalLayerDefaults, ...thisLayerDefaults, ...layerIn }; - const { type, path } = layer; + const layer: T = { ...globalLayerDefaults, ...thisLayerDefaults, ...layerIn }; - if (type === 'video') { - const { duration: fileDuration, width: widthIn, height: heightIn, framerateStr, rotation } = await readVideoFileInfo(ffprobePath, path); + if (layer.type === 'video') { + const { duration: fileDuration, width: widthIn, height: heightIn, framerateStr, rotation } = await readVideoFileInfo(ffprobePath, layer.path); let { cutFrom, cutTo } = layer; if (!cutFrom) cutFrom = 0; cutFrom = Math.max(cutFrom, 0); @@ -149,41 +171,41 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar cutTo = Math.min(cutTo, fileDuration); assert(cutFrom < cutTo, 'cutFrom must be lower than cutTo'); - const inputDuration = cutTo - cutFrom; + const layerDuration = cutTo - cutFrom; - const isRotated = [-90, 90, 270, -270].includes(rotation); + const isRotated = rotation && [-90, 90, 270, -270].includes(rotation); const inputWidth = isRotated ? heightIn : widthIn; const inputHeight = isRotated ? widthIn : heightIn; - return { ...layer, cutFrom, cutTo, inputDuration, framerateStr, inputWidth, inputHeight }; + return { ...layer, cutFrom, cutTo, layerDuration, framerateStr, inputWidth, inputHeight } as T; } // Audio is handled later - if (['audio', 'detached-audio'].includes(type)) return layer; + if (['audio', 'detached-audio'].includes(layer.type)) return layer; return handleLayer(layer); }, { concurrency: 1 })); let clipDuration = userClipDurationOrDefault; - const firstVideoLayer = layersOut.find((layer) => layer.type === 'video'); - if (firstVideoLayer && !userClipDuration) clipDuration = firstVideoLayer.inputDuration; + const firstVideoLayer = layersOut.find((layer): layer is VideoLayer => layer.type === 'video'); + if (firstVideoLayer && !userClipDuration) clipDuration = firstVideoLayer.layerDuration!; assert(clipDuration); // We need to map again, because for audio, we need to know the correct clipDuration - layersOut = await pMap(layersOut, async (layerIn) => { - const { type, path, stop, start = 0 } = layerIn; + layersOut = (await pMap(layersOut, async (layerIn: T) => { + if (!layerIn.start) layerIn.start = 0 // This feature allows the user to show another layer overlayed (or replacing) parts of the lower layers (start - stop) - const layerDuration = ((stop || clipDuration) - start); - assert(layerDuration > 0 && layerDuration <= clipDuration, `Invalid start ${start} or stop ${stop} (${clipDuration})`); + const layerDuration = ((layerIn.stop || clipDuration) - layerIn.start); + assert(layerDuration > 0 && layerDuration <= clipDuration, `Invalid start ${layerIn.start} or stop ${layerIn.stop} (${clipDuration})`); // TODO Also need to handle video layers (speedFactor etc) // TODO handle audio in case of start/stop - const layer = { ...layerIn, start, layerDuration }; + const layer: T = { ...layerIn, layerDuration }; - if (type === 'audio') { - const { duration: fileDuration } = await readAudioFileInfo(ffprobePath, path); + if (layer.type === 'audio') { + const { duration: fileDuration } = await readAudioFileInfo(ffprobePath, layer.path); let { cutFrom, cutTo } = layer; // console.log({ cutFrom, cutTo, fileDuration, clipDuration }); @@ -197,22 +219,20 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar cutTo = Math.min(cutTo, fileDuration); assert(cutFrom < cutTo, 'cutFrom must be lower than cutTo'); - const inputDuration = cutTo - cutFrom; + const layerDuration = cutTo - cutFrom; - const speedFactor = clipDuration / inputDuration; + const speedFactor = clipDuration / layerDuration; return { ...layer, cutFrom, cutTo, speedFactor }; } - if (type === 'video') { - const { inputDuration } = layer; - + if (layer.type === 'video') { let speedFactor; // If user explicitly specified duration for clip, it means that should be the output duration of the video if (userClipDuration) { // Later we will speed up or slow down video using this factor - speedFactor = userClipDuration / inputDuration; + speedFactor = userClipDuration / layerDuration; } else { speedFactor = 1; } @@ -222,15 +242,14 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar // These audio tracks are detached from the clips (can run over multiple clips) // This is useful so we can have audio start relative to their parent clip's start time - if (type === 'detached-audio') { - const { cutFrom, cutTo, mixVolume } = layer; + if (layer.type === 'detached-audio') { if (!detachedAudioByClip[clipIndex]) detachedAudioByClip[clipIndex] = []; - detachedAudioByClip[clipIndex].push({ path, cutFrom, cutTo, mixVolume, start }); + detachedAudioByClip[clipIndex].push(layer); return undefined; // Will be filtered out } return layer; - }); + })).filter((l) => l !== undefined); // Filter out deleted layers layersOut = layersOut.filter((l) => l); @@ -243,7 +262,7 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar }, { concurrency: 1 }); let totalClipDuration = 0; - const clipDetachedAudio = []; + const clipDetachedAudio: AudioTrack[] = []; // Need to map again because now we know all clip durations, and we can adjust transitions so they are safe clipsOut = await pMap(clipsOut, async (clip, i) => { @@ -255,11 +274,10 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar let safeTransitionDuration = 0; if (nextClip) { // Each clip can have two transitions, make sure we leave enough room: - safeTransitionDuration = Math.min(clip.duration / 2, nextClip.duration / 2, clip.transition.duration); + safeTransitionDuration = Math.min(clip.duration / 2, nextClip.duration / 2, clip.transition!.duration!); } // We now know all clip durations so we can calculate the offset for detached audio tracks - // eslint-disable-next-line no-restricted-syntax for (const { start, ...rest } of (detachedAudioByClip[i] || [])) { clipDetachedAudio.push({ ...rest, start: totalClipDuration + (start || 0) }); } diff --git a/sources/fabric.js b/src/sources/fabric.ts similarity index 64% rename from sources/fabric.js rename to src/sources/fabric.ts index 9b19a00b..4af4ba59 100644 --- a/sources/fabric.js +++ b/src/sources/fabric.ts @@ -1,22 +1,15 @@ import * as fabric from 'fabric/node'; -import { createCanvas, ImageData } from 'canvas'; +import { type CanvasRenderingContext2D, createCanvas, ImageData } from 'canvas'; import { boxBlurImage } from '../BoxBlur.js'; +import type { CreateFrameSourceOptions, FrameSource, CanvasLayer, CustomFabricFunctionCallbacks, Layer } from '../types.js'; +import { OptionalPromise } from '../../dist/index.js'; -export { registerFont } from 'canvas'; +export type FabricFrameSourceOptions = CreateFrameSourceOptions & { fabric: typeof fabric }; +export type FabricFrameSourceCallback = (options: FabricFrameSourceOptions) => OptionalPromise; // Fabric is used as a fundament for compositing layers in editly -export function canvasToRgba(ctx) { - // const bgra = canvas.toBuffer('raw'); - - /* const rgba = Buffer.allocUnsafe(bgra.length); - for (let i = 0; i < bgra.length; i += 4) { - rgba[i + 0] = bgra[i + 2]; - rgba[i + 1] = bgra[i + 1]; - rgba[i + 2] = bgra[i + 0]; - rgba[i + 3] = bgra[i + 3]; - } */ - +export function canvasToRgba(ctx: CanvasRenderingContext2D) { // We cannot use toBuffer('raw') because it returns pre-multiplied alpha data (a different format) // https://gamedev.stackexchange.com/questions/138813/whats-the-difference-between-alpha-and-premulalpha // https://github.com/Automattic/node-canvas#image-pixel-formats-experimental @@ -24,7 +17,7 @@ export function canvasToRgba(ctx) { return Buffer.from(imageData.data); } -export function fabricCanvasToRgba(fabricCanvas) { +export function fabricCanvasToRgba(fabricCanvas: fabric.StaticCanvas) { const internalCanvas = fabricCanvas.getNodeCanvas(); const ctx = internalCanvas.getContext('2d'); @@ -34,11 +27,11 @@ export function fabricCanvasToRgba(fabricCanvas) { return canvasToRgba(ctx); } -export function createFabricCanvas({ width, height }) { +export function createFabricCanvas({ width, height }: { width: number, height: number }) { return new fabric.StaticCanvas(null, { width, height }); } -export async function renderFabricCanvas(canvas) { +export async function renderFabricCanvas(canvas: fabric.StaticCanvas) { // console.time('canvas.renderAll'); canvas.renderAll(); // console.timeEnd('canvas.renderAll'); @@ -48,7 +41,7 @@ export async function renderFabricCanvas(canvas) { return rgba; } -export function toUint8ClampedArray(buffer) { +export function toUint8ClampedArray(buffer: Buffer) { // return Uint8ClampedArray.from(buffer); // Some people are finding that manual copying is orders of magnitude faster than Uint8ClampedArray.from // Since I'm getting similar times for both methods, then why not: @@ -59,12 +52,13 @@ export function toUint8ClampedArray(buffer) { return data; } -export async function rgbaToFabricImage({ width, height, rgba }) { +export async function rgbaToFabricImage({ width, height, rgba }: { width: number, height: number, rgba: Buffer }) { const canvas = createCanvas(width, height); // FIXME: Fabric tries to add a class to this, but DOM is not defined. Because node? // https://github.com/fabricjs/fabric.js/issues/10032 - canvas.classList = new Set(); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (canvas as any).classList = new Set(); const ctx = canvas.getContext('2d'); // https://developer.mozilla.org/en-US/docs/Web/API/ImageData/ImageData @@ -74,10 +68,11 @@ export async function rgbaToFabricImage({ width, height, rgba }) { return new fabric.FabricImage(canvas); } -export async function createFabricFrameSource(func, { width, height, ...rest }) { - const onInit = async () => func(({ width, height, fabric, ...rest })); - - const { onRender = () => {}, onClose = () => {} } = await onInit() || {}; +export async function createFabricFrameSource( + func: FabricFrameSourceCallback, + options: CreateFrameSourceOptions +): Promise { + const { onRender = () => { }, onClose = () => { } } = await func({ fabric, ...options }) || {}; return { readNextFrame: onRender, @@ -85,13 +80,13 @@ export async function createFabricFrameSource(func, { width, height, ...rest }) }; } -export async function createCustomCanvasFrameSource({ width, height, params }) { +export async function createCustomCanvasFrameSource({ width, height, params }: Pick, "width" | "height" | "params">): Promise { const canvas = createCanvas(width, height); const context = canvas.getContext('2d'); const { onClose, onRender } = await params.func(({ width, height, canvas })); - async function readNextFrame(progress) { + async function readNextFrame(progress: number) { context.clearRect(0, 0, canvas.width, canvas.height); await onRender(progress); // require('fs').writeFileSync(`${new Date().getTime()}.png`, canvas.toBuffer('image/png')); @@ -105,9 +100,14 @@ export async function createCustomCanvasFrameSource({ width, height, params }) { close: onClose, }; } +export type BlurImageOptions = { + mutableImg: fabric.FabricImage, + width: number, + height: number, +} -export async function blurImage({ mutableImg, width, height }) { - mutableImg.setOptions({ scaleX: width / mutableImg.width, scaleY: height / mutableImg.height }); +export async function blurImage({ mutableImg, width, height }: BlurImageOptions) { + mutableImg.set({ scaleX: width / mutableImg.width, scaleY: height / mutableImg.height }); const canvas = mutableImg.toCanvasElement(); const ctx = canvas.getContext('2d'); diff --git a/sources/fabric/fabricFrameSources.js b/src/sources/fabric/fabricFrameSources.ts similarity index 79% rename from sources/fabric/fabricFrameSources.js rename to src/sources/fabric/fabricFrameSources.ts index cc70da9c..9f75cd83 100644 --- a/sources/fabric/fabricFrameSources.js +++ b/src/sources/fabric/fabricFrameSources.ts @@ -4,15 +4,16 @@ import fileUrl from 'file-url'; import { getRandomGradient, getRandomColors } from '../../colors.js'; import { easeOutExpo, easeInOutCubic } from '../../transitions.js'; import { getPositionProps, getFrameByKeyFrames, isUrl } from '../../util.js'; -import { blurImage } from '../fabric.js'; +import { blurImage, type FabricFrameSourceOptions } from '../fabric.js'; +import type { FabricLayer, FillColorLayer, ImageLayer, ImageOverlayLayer, KenBurns, LinearGradientLayer, NewsTitleLayer, RadialGradientLayer, SlideInTextLayer, SubtitleLayer, TitleLayer } from '../../types.js'; // http://fabricjs.com/kitchensink const defaultFontFamily = 'sans-serif'; -const loadImage = (pathOrUrl) => fabric.util.loadImage(isUrl(pathOrUrl) ? pathOrUrl : fileUrl(pathOrUrl)); +const loadImage = (pathOrUrl: string) => fabric.util.loadImage(isUrl(pathOrUrl) ? pathOrUrl : fileUrl(pathOrUrl)); -function getZoomParams({ progress, zoomDirection, zoomAmount }) { +function getZoomParams({ progress, zoomDirection, zoomAmount = 0.1 }: KenBurns & { progress: number }) { let scaleFactor = 1; if (zoomDirection === 'left' || zoomDirection === 'right') return 1.3 + zoomAmount; if (zoomDirection === 'in') scaleFactor = (1 + zoomAmount * progress); @@ -20,7 +21,7 @@ function getZoomParams({ progress, zoomDirection, zoomAmount }) { return scaleFactor; } -function getTranslationParams({ progress, zoomDirection, zoomAmount }) { +function getTranslationParams({ progress, zoomDirection, zoomAmount = 0.1 }: KenBurns & { progress: number }) { let translation = 0; const range = zoomAmount * 1000; @@ -30,7 +31,7 @@ function getTranslationParams({ progress, zoomDirection, zoomAmount }) { return translation; } -export async function imageFrameSource({ verbose, params, width, height }) { +export async function imageFrameSource({ verbose, params, width, height }: FabricFrameSourceOptions) { const { path, zoomDirection = 'in', zoomAmount = 0.1, resizeMode = 'contain-blur' } = params; if (verbose) console.log('Loading', path); @@ -44,7 +45,7 @@ export async function imageFrameSource({ verbose, params, width, height }) { top: height / 2, }); - let blurredImg; + let blurredImg: fabric.FabricImage; // Blurred version if (resizeMode === 'contain-blur') { // If we dispose mutableImg, seems to cause issues with the rendering of blurredImg @@ -53,7 +54,7 @@ export async function imageFrameSource({ verbose, params, width, height }) { blurredImg = await blurImage({ mutableImg, width, height }); } - async function onRender(progress, canvas) { + async function onRender(progress: number, canvas: fabric.StaticCanvas) { const img = createImg(); const scaleFactor = getZoomParams({ progress, zoomDirection, zoomAmount }); @@ -77,7 +78,7 @@ export async function imageFrameSource({ verbose, params, width, height }) { img.scaleToHeight(height * scaleFactor); } } else if (resizeMode === 'stretch') { - img.setOptions({ scaleX: (width / img.width) * scaleFactor, scaleY: (height / img.height) * scaleFactor }); + img.set({ scaleX: (width / img.width) * scaleFactor, scaleY: (height / img.height) * scaleFactor }); } if (blurredImg) canvas.add(blurredImg); @@ -92,12 +93,12 @@ export async function imageFrameSource({ verbose, params, width, height }) { return { onRender, onClose }; } -export async function fillColorFrameSource({ params, width, height }) { +export async function fillColorFrameSource({ params, width, height }: FabricFrameSourceOptions) { const { color } = params; const randomColor = getRandomColors(1)[0]; - async function onRender(progress, canvas) { + async function onRender(_: number, canvas: fabric.StaticCanvas) { const rect = new fabric.Rect({ left: 0, right: 0, @@ -111,17 +112,17 @@ export async function fillColorFrameSource({ params, width, height }) { return { onRender }; } -function getRekt(width, height) { +function getRekt(width: number, height: number) { // width and height with room to rotate return new fabric.Rect({ originX: 'center', originY: 'center', left: width / 2, top: height / 2, width: width * 2, height: height * 2 }); } -export async function radialGradientFrameSource({ width, height, params }) { +export async function radialGradientFrameSource({ width, height, params }: FabricFrameSourceOptions) { const { colors: inColors } = params; const randomColors = getRandomGradient(); - async function onRender(progress, canvas) { + async function onRender(progress: number, canvas: fabric.StaticCanvas) { // console.log('progress', progress); const max = Math.max(width, height); @@ -158,13 +159,13 @@ export async function radialGradientFrameSource({ width, height, params }) { return { onRender }; } -export async function linearGradientFrameSource({ width, height, params }) { +export async function linearGradientFrameSource({ width, height, params }: FabricFrameSourceOptions) { const { colors: inColors } = params; const randomColors = getRandomGradient(); const colors = inColors && inColors.length === 2 ? inColors : randomColors; - async function onRender(progress, canvas) { + async function onRender(progress: number, canvas: fabric.StaticCanvas) { const rect = getRekt(width, height); rect.set('fill', new fabric.Gradient({ @@ -187,9 +188,9 @@ export async function linearGradientFrameSource({ width, height, params }) { return { onRender }; } -export async function subtitleFrameSource({ width, height, params }) { +export async function subtitleFrameSource({ width, height, params }: FabricFrameSourceOptions) { const { text, textColor = '#ffffff', backgroundColor = 'rgba(0,0,0,0.3)', fontFamily = defaultFontFamily, delay = 0, speed = 1 } = params; - async function onRender(progress, canvas) { + async function onRender(progress: number, canvas: fabric.StaticCanvas) { const easedProgress = easeOutExpo(Math.max(0, Math.min((progress - delay) * speed, 1))); const min = Math.min(width, height); @@ -226,7 +227,7 @@ export async function subtitleFrameSource({ width, height, params }) { return { onRender }; } -export async function imageOverlayFrameSource({ params, width, height }) { +export async function imageOverlayFrameSource({ params, width, height }: FabricFrameSourceOptions) { const { path, position, width: relWidth, height: relHeight, zoomDirection, zoomAmount = 0.1 } = params; const imgData = await loadImage(path); @@ -240,7 +241,7 @@ export async function imageOverlayFrameSource({ params, width, height }) { top, }); - async function onRender(progress, canvas) { + async function onRender(progress: number, canvas: fabric.StaticCanvas) { const scaleFactor = getZoomParams({ progress, zoomDirection, zoomAmount }); const translationParams = getTranslationParams({ progress, zoomDirection, zoomAmount }); @@ -261,10 +262,10 @@ export async function imageOverlayFrameSource({ params, width, height }) { return { onRender }; } -export async function titleFrameSource({ width, height, params }) { +export async function titleFrameSource({ width, height, params }: FabricFrameSourceOptions) { const { text, textColor = '#ffffff', fontFamily = defaultFontFamily, position = 'center', zoomDirection = 'in', zoomAmount = 0.2 } = params; - async function onRender(progress, canvas) { + async function onRender(progress: number, canvas: fabric.StaticCanvas) { // console.log('progress', progress); const min = Math.min(width, height); @@ -284,7 +285,7 @@ export async function titleFrameSource({ width, height, params }) { }); // We need the text as an image in order to scale it - const textImage = textBox.cloneAsImage(); + const textImage = textBox.cloneAsImage({}); const { left, top, originX, originY } = getPositionProps({ position, width, height }); @@ -302,10 +303,10 @@ export async function titleFrameSource({ width, height, params }) { return { onRender }; } -export async function newsTitleFrameSource({ width, height, params }) { +export async function newsTitleFrameSource({ width, height, params }: FabricFrameSourceOptions) { const { text, textColor = '#ffffff', backgroundColor = '#d02a42', fontFamily = defaultFontFamily, delay = 0, speed = 1 } = params; - async function onRender(progress, canvas) { + async function onRender(progress: number, canvas: fabric.StaticCanvas) { const min = Math.min(width, height); const fontSize = Math.round(min * 0.05); @@ -345,7 +346,7 @@ export async function newsTitleFrameSource({ width, height, params }) { return { onRender }; } -async function getFadedObject({ object, progress }) { +async function getFadedObject({ object, progress }: { object: T, progress: number }) { const rect = new fabric.Rect({ left: 0, width: object.width, @@ -366,10 +367,10 @@ async function getFadedObject({ object, progress }) { ], })); - const gradientMaskImg = rect.cloneAsImage(); - const fadedImage = object.cloneAsImage(); + const gradientMaskImg = rect.cloneAsImage({}); + const fadedImage = object.cloneAsImage({}); - fadedImage.filters.push(new fabric.FabricImage.filters.BlendImage({ + fadedImage.filters.push(new fabric.filters.BlendImage({ image: gradientMaskImg, mode: 'multiply', })); @@ -379,14 +380,20 @@ async function getFadedObject({ object, progress }) { return fadedImage; } -export async function slideInTextFrameSource({ width, height, params: { position, text, fontSize = 0.05, charSpacing = 0.1, color = '#ffffff', fontFamily = defaultFontFamily } = {} }) { - async function onRender(progress, canvas) { +export async function slideInTextFrameSource({ width, height, params }: FabricFrameSourceOptions) { + const { position, text, fontSize = 0.05, charSpacing = 0.1, textColor = '#ffffff', color = undefined, fontFamily = defaultFontFamily } = params; + + if (color) { + console.warn('slide-in-text: color is deprecated, use textColor.'); + } + + async function onRender(progress: number, canvas: fabric.StaticCanvas) { const fontSizeAbs = Math.round(width * fontSize); const { left, top, originX, originY } = getPositionProps({ position, width, height }); const textBox = new fabric.FabricText(text, { - fill: color, + fill: color ?? textColor, fontFamily, fontSize: fontSizeAbs, charSpacing: width * charSpacing, @@ -400,7 +407,7 @@ export async function slideInTextFrameSource({ width, height, params: { position ], progress); const fadedObject = await getFadedObject({ object: textBox, progress: easeInOutCubic(textSlide) }); - fadedObject.setOptions({ + fadedObject.set({ originX, originY, top, @@ -414,6 +421,6 @@ export async function slideInTextFrameSource({ width, height, params: { position return { onRender }; } -export async function customFabricFrameSource({ canvas, width, height, params }) { - return params.func(({ width, height, fabric, canvas, params })); +export async function customFabricFrameSource({ width, height, fabric, params }: FabricFrameSourceOptions) { + return params.func(({ width, height, fabric, params })); } diff --git a/sources/frameSource.js b/src/sources/frameSource.ts similarity index 71% rename from sources/frameSource.js rename to src/sources/frameSource.ts index 8a13607e..2de0652d 100644 --- a/sources/frameSource.js +++ b/src/sources/frameSource.ts @@ -7,6 +7,7 @@ import { createFabricFrameSource, createFabricCanvas, renderFabricCanvas, + type FabricFrameSourceCallback, } from './fabric.js'; import { customFabricFrameSource, @@ -22,8 +23,12 @@ import { } from './fabric/fabricFrameSources.js'; import createVideoFrameSource from './videoFrameSource.js'; import createGlFrameSource from './glFrameSource.js'; +import type { CreateFrameSource, CreateFrameSourceOptions, DebugOptions } from '../types.js'; +import { ProcessedClip } from '../parseConfig.js'; -const fabricFrameSources = { +// FIXME[ts] +// eslint-disable-next-line @typescript-eslint/no-explicit-any +const fabricFrameSources: Record> = { fabric: customFabricFrameSource, image: imageFrameSource, 'image-overlay': imageOverlayFrameSource, @@ -36,7 +41,26 @@ const fabricFrameSources = { 'slide-in-text': slideInTextFrameSource, }; -export async function createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, framerateStr }) { +// FIXME[ts] +// eslint-disable-next-line @typescript-eslint/no-explicit-any +const frameSources: Record> = { + video: createVideoFrameSource, + gl: createGlFrameSource, + canvas: createCustomCanvasFrameSource, +}; + +type FrameSourceOptions = DebugOptions & { + clip: ProcessedClip; + clipIndex: number; + ffmpegPath: string; + ffprobePath: string; + width: number, + height: number, + channels: number, + framerateStr: string, +} + +export async function createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, framerateStr }: FrameSourceOptions) { const { layers, duration } = clip; const visualLayers = layers.filter((layer) => layer.type !== 'audio'); @@ -45,15 +69,13 @@ export async function createFrameSource({ clip, clipIndex, width, height, channe const { type, ...params } = layer; if (verbose) console.log('createFrameSource', type, 'clip', clipIndex, 'layer', layerIndex); - let createFrameSourceFunc; + let createFrameSourceFunc: CreateFrameSource; if (fabricFrameSources[type]) { - createFrameSourceFunc = async (opts) => createFabricFrameSource(fabricFrameSources[type], opts); + // FIXME[TS] + // eslint-disable-next-line @typescript-eslint/no-explicit-any + createFrameSourceFunc = async (opts: CreateFrameSourceOptions) => createFabricFrameSource(fabricFrameSources[type], opts); } else { - createFrameSourceFunc = { - video: createVideoFrameSource, - gl: createGlFrameSource, - canvas: createCustomCanvasFrameSource, - }[type]; + createFrameSourceFunc = frameSources[type]; } assert(createFrameSourceFunc, `Invalid type ${type}`); @@ -62,14 +84,14 @@ export async function createFrameSource({ clip, clipIndex, width, height, channe return { layer, frameSource }; }, { concurrency: 1 }); - async function readNextFrame({ time }) { + async function readNextFrame({ time }: { time: number }) { const canvas = createFabricCanvas({ width, height }); - // eslint-disable-next-line no-restricted-syntax + for (const { frameSource, layer } of layerFrameSources) { // console.log({ start: layer.start, stop: layer.stop, layerDuration: layer.layerDuration, time }); - const offsetTime = time - layer.start; - const offsetProgress = offsetTime / layer.layerDuration; + const offsetTime = time - (layer?.start ?? 0); + const offsetProgress = offsetTime / layer.layerDuration!; // console.log({ offsetProgress }); const shouldDrawLayer = offsetProgress >= 0 && offsetProgress <= 1; @@ -102,7 +124,7 @@ export async function createFrameSource({ clip, clipIndex, width, height, channe } async function close() { - await pMap(layerFrameSources, async ({ frameSource }) => frameSource.close()); + await pMap(layerFrameSources, async ({ frameSource }) => frameSource.close?.()); } return { diff --git a/sources/glFrameSource.js b/src/sources/glFrameSource.ts similarity index 71% rename from sources/glFrameSource.js rename to src/sources/glFrameSource.ts index 48e81847..07fa4a03 100644 --- a/sources/glFrameSource.js +++ b/src/sources/glFrameSource.ts @@ -1,10 +1,11 @@ import GL from 'gl'; import createShader from 'gl-shader'; -import fsExtra from 'fs-extra'; +import { readFile } from 'node:fs/promises'; +import type { GlLayer, CreateFrameSourceOptions, FrameSource } from '../types.js'; // I have no idea what I'm doing but it works ¯\_(ツ)_/¯ -export default async function createGlFrameSource({ width, height, channels, params }) { +export default async function createGlFrameSource({ width, height, channels, params }: CreateFrameSourceOptions): Promise { const gl = GL(width, height); const defaultVertexSrc = ` @@ -13,24 +14,30 @@ export default async function createGlFrameSource({ width, height, channels, par gl_Position = vec4(position, 0.0, 1.0 ); } `; - const { vertexPath, fragmentPath, vertexSrc: vertexSrcIn, fragmentSrc: fragmentSrcIn, speed = 1 } = params; + const { + vertexPath, + fragmentPath, + vertexSrc: vertexSrcIn, + fragmentSrc: fragmentSrcIn, + speed = 1 + } = params; let fragmentSrc = fragmentSrcIn; let vertexSrc = vertexSrcIn; - if (fragmentPath) fragmentSrc = await fsExtra.readFile(fragmentPath); - if (vertexPath) vertexSrc = await fsExtra.readFile(vertexPath); + if (fragmentPath) fragmentSrc = (await readFile(fragmentPath)).toString(); + if (vertexPath) vertexSrc = (await readFile(vertexPath)).toString(); if (!vertexSrc) vertexSrc = defaultVertexSrc; - const shader = createShader(gl, vertexSrc, fragmentSrc); + const shader = createShader(gl, vertexSrc, fragmentSrc ?? ''); const buffer = gl.createBuffer(); gl.bindBuffer(gl.ARRAY_BUFFER, buffer); // https://blog.mayflower.de/4584-Playing-around-with-pixel-shaders-in-WebGL.html gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1, -1, 1, -1, 1, 1, -1, 1]), gl.STATIC_DRAW); - async function readNextFrame(progress) { + async function readNextFrame(progress: number) { shader.bind(); shader.attributes.position.pointer(); @@ -56,6 +63,6 @@ export default async function createGlFrameSource({ width, height, channels, par return { readNextFrame, - close: () => {}, + close: () => { }, }; } diff --git a/sources/videoFrameSource.js b/src/sources/videoFrameSource.ts similarity index 88% rename from sources/videoFrameSource.js rename to src/sources/videoFrameSource.ts index 57b8bdb3..c57f348b 100644 --- a/sources/videoFrameSource.js +++ b/src/sources/videoFrameSource.ts @@ -8,8 +8,9 @@ import { rgbaToFabricImage, blurImage, } from './fabric.js'; +import type { CreateFrameSourceOptions, VideoLayer } from '../types.js'; -export default async ({ width: canvasWidth, height: canvasHeight, channels, framerateStr, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, params }) => { +export default async ({ width: canvasWidth, height: canvasHeight, channels, framerateStr, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, params }: CreateFrameSourceOptions) => { const { path, cutFrom, cutTo, resizeMode = 'contain-blur', speedFactor, inputWidth, inputHeight, width: requestedWidthRel, height: requestedHeightRel, left: leftRel = 0, top: topRel = 0, originX = 'left', originY = 'top', fabricImagePostProcessing = null } = params; const requestedWidth = requestedWidthRel ? Math.round(requestedWidthRel * canvasWidth) : canvasWidth; @@ -18,9 +19,9 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram const left = leftRel * canvasWidth; const top = topRel * canvasHeight; - const ratioW = requestedWidth / inputWidth; - const ratioH = requestedHeight / inputHeight; - const inputAspectRatio = inputWidth / inputHeight; + const ratioW = requestedWidth / inputWidth!; + const ratioH = requestedHeight / inputHeight!; + const inputAspectRatio = inputWidth! / inputHeight!; let targetWidth = requestedWidth; let targetHeight = requestedHeight; @@ -77,8 +78,8 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram // https://superuser.com/a/1116905/658247 let inputCodec; - if (firstVideoStream.codec_name === 'vp8') inputCodec = 'libvpx'; - else if (firstVideoStream.codec_name === 'vp9') inputCodec = 'libvpx-vp9'; + if (firstVideoStream?.codec_name === 'vp8') inputCodec = 'libvpx'; + else if (firstVideoStream?.codec_name === 'vp9') inputCodec = 'libvpx-vp9'; // http://zulko.github.io/blog/2013/09/27/read-and-write-video-frames-in-python-using-ffmpeg/ // Testing: ffmpeg -i 'vid.mov' -t 1 -vcodec rawvideo -pix_fmt rgba -f image2pipe - | ffmpeg -f rawvideo -vcodec rawvideo -pix_fmt rgba -s 2166x1650 -i - -vf format=yuv420p -vcodec libx264 -y out.mp4 @@ -86,9 +87,9 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram const args = [ ...getFfmpegCommonArgs({ enableFfmpegLog }), ...(inputCodec ? ['-vcodec', inputCodec] : []), - ...(cutFrom ? ['-ss', cutFrom] : []), + ...(cutFrom ? ['-ss', cutFrom.toString()] : []), '-i', path, - ...(cutTo ? ['-t', (cutTo - cutFrom) * speedFactor] : []), + ...(cutTo ? ['-t', ((cutTo - cutFrom!) * speedFactor!).toString()] : []), '-vf', `${ptsFilter}fps=${framerateStr},${scaleFilter}`, '-map', 'v:0', '-vcodec', 'rawvideo', @@ -100,9 +101,9 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram const ps = execa(ffmpegPath, args, { encoding: null, buffer: false, stdin: 'ignore', stdout: 'pipe', stderr: process.stderr }); - const stream = ps.stdout; + const stream = ps.stdout!; - let timeout; + let timeout: NodeJS.Timeout; let ended = false; stream.once('end', () => { @@ -124,8 +125,8 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram return null; } - async function readNextFrame(progress, canvas, time) { - const rgba = await new Promise((resolve, reject) => { + async function readNextFrame(progress: number, canvas: fabric.StaticCanvas, time: number) { + const rgba = await new Promise | void>((resolve, reject) => { const frame = getNextFrame(); if (frame) { resolve(frame); @@ -145,13 +146,12 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram function cleanup() { stream.pause(); - // eslint-disable-next-line no-use-before-define stream.removeListener('data', handleChunk); stream.removeListener('end', onEnd); stream.removeListener('error', reject); } - function handleChunk(chunk) { + function handleChunk(chunk: Buffer) { const nCopied = Math.min(buf.length - length, chunk.length); chunk.copy(buf, length, 0, nCopied); length += nCopied; @@ -197,7 +197,7 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram const img = await rgbaToFabricImage({ width: targetWidth, height: targetHeight, rgba }); if (logTimes) console.timeEnd('rgbaToFabricImage'); - img.setOptions({ + img.set({ originX, originY, }); @@ -211,15 +211,15 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram centerOffsetY = (dirY * (requestedHeight - targetHeight)) / 2; } - img.setOptions({ + img.set({ left: left + centerOffsetX, top: top + centerOffsetY, }); if (resizeMode === 'contain-blur') { - const mutableImg = img.cloneAsImage(); + const mutableImg = img.cloneAsImage({}); const blurredImg = await blurImage({ mutableImg, width: requestedWidth, height: requestedHeight }); - blurredImg.setOptions({ + blurredImg.set({ left, top, originX, diff --git a/src/transitions.ts b/src/transitions.ts new file mode 100644 index 00000000..db674c45 --- /dev/null +++ b/src/transitions.ts @@ -0,0 +1,68 @@ +import assert from 'assert'; +import type { Transition } from './types.js'; + +export type EasingFunction = (progress: number) => number; + +export type CalculatedTransition = Transition & { + duration: number; + easingFunction: EasingFunction; +} + +const randomTransitionsSet = ['fade', 'fadegrayscale', 'directionalwarp', 'crosswarp', 'dreamyzoom', 'burn', 'crosszoom', 'simplezoom', 'linearblur', 'directional-left', 'directional-right', 'directional-up', 'directional-down']; + +function getRandomTransition() { + return randomTransitionsSet[Math.floor(Math.random() * randomTransitionsSet.length)]; +} + +// https://easings.net/ + +export function easeOutExpo(x: number) { + return x === 1 ? 1 : 1 - (2 ** (-10 * x)); +} + +export function easeInOutCubic(x: number) { + return x < 0.5 ? 4 * x * x * x : 1 - ((-2 * x + 2) ** 3) / 2; +} + +export function linear(x: number) { + return x; +} + +function getTransitionEasingFunction(easing: string | null | undefined, transitionName?: string): EasingFunction { + if (easing !== null) { + // FIXME[TS]: `easing` always appears to be null or undefined, so this never gets called + if (easing) return { easeOutExpo }[easing] || linear; + if (transitionName === 'directional') return easeOutExpo; + } + return linear; +} + +const TransitionAliases: Record> = { + 'directional-left': { name: 'directional', params: { direction: [1, 0] } }, + 'directional-right': { name: 'directional', params: { direction: [-1, 0] } }, + 'directional-down': { name: 'directional', params: { direction: [0, 1] } }, + 'directional-up': { name: 'directional', params: { direction: [0, -1] } }, +} + +export function calcTransition(defaults: Transition | null | undefined, transition: Transition | null | undefined, isLastClip: boolean): CalculatedTransition { + if (transition === null || isLastClip) return { duration: 0, easingFunction: linear }; + + let transitionOrDefault: Transition = { ...defaults, ...transition } + + assert(!transitionOrDefault.duration || transitionOrDefault.name, 'Please specify transition name or set duration to 0'); + + if (transitionOrDefault.name === 'random' && transitionOrDefault.duration) { + transitionOrDefault = { ...transitionOrDefault, name: getRandomTransition() }; + } + + const aliasedTransition = transitionOrDefault.name ? TransitionAliases[transitionOrDefault.name] : undefined; + if (aliasedTransition) { + transitionOrDefault = { ...transitionOrDefault, ...aliasedTransition }; + } + + return { + ...transitionOrDefault, + duration: transitionOrDefault.duration || 0, + easingFunction: getTransitionEasingFunction(transitionOrDefault.easing, transitionOrDefault.name), + }; +} diff --git a/src/types.ts b/src/types.ts new file mode 100644 index 00000000..2b3ddd1a --- /dev/null +++ b/src/types.ts @@ -0,0 +1,1131 @@ +// TODO[ts]: Move these elsewhere + +import type * as Fabric from 'fabric/node'; +import type { Canvas } from "canvas" + +/** Little utility */ +export type OptionalPromise = Promise | T; + +export type OriginX = Fabric.TOriginX; + +export type OriginY = Fabric.TOriginY; + +/** + * How to fit image to screen. Can be one of: + * - `'contain'` - All the video will be contained within the frame and letterboxed. + * - `'contain-blur'` - Like contain, but with a blurred copy as the letterbox. + * - `'cover'` - Video be cropped to cover the whole screen (aspect ratio preserved). + * - `'stretch'` - Video will be stretched to cover the whole screen (aspect ratio ignored). + * + * @default 'contain-blur' + * @see [Example 'image.json5']{@link https://github.com/mifi/editly/blob/master/examples/image.json5} + * @see [Example 'videos.json5']{@link https://github.com/mifi/editly/blob/master/examples/videos.json5} + */ +export type ResizeMode = + 'contain' | + 'contain-blur' | + 'cover' | + 'stretch'; + +/** + * An object, where `{ x: 0, y: 0 }` is the upper left corner of the screen and `{ x: 1, y: 1 }` is the lower right corner. + */ +export interface PositionObject { + + /** + * X-position relative to video width. + */ + x: number; + + /** + * Y-position relative to video height. + */ + y: number; + + /** + * X-anchor position of the object. + */ + originX?: OriginX; + + /** + * Y-anchor position of the object. + */ + originY?: OriginY; + +} + +/** + * Certain layers support the position parameter. + * + * @see [Position parameter]{@link https://github.com/mifi/editly#position-parameter} + * @see [Example 'position.json5']{@link https://github.com/mifi/editly/blob/master/examples/position.json5} + */ +export type Position = + 'top' | + 'top-left' | + 'top-right' | + 'center' | + 'center-left' | + 'center-right' | + 'bottom' | + 'bottom-left' | + 'bottom-right' | + PositionObject; + +/** + * @see [Curve types]{@link https://trac.ffmpeg.org/wiki/AfadeCurves} + */ +export type CurveType = + 'tri' | + 'qsin' | + 'hsin' | + 'esin' | + 'log' | + 'ipar' | + 'qua' | + 'cub' | + 'squ' | + 'cbr' | + 'par' | + 'exp' | + 'iqsin' | + 'ihsin' | + 'dese' | + 'desi' | + 'losi' | + 'nofade' | + string; + +/** + * @see [Transition types]{@link https://github.com/mifi/editly#transition-types} + */ +export type TransitionType = + 'directional-left' | + 'directional-right' | + 'directional-up' | + 'directional-down' | + 'random' | + 'dummy' | + string; + +/** + * WARNING: Undocumented feature! + */ +export type GLTextureLike = { + bind: (unit: number) => number, + shape: [number, number], +}; + +/** + * WARNING: Undocumented feature! + */ +export interface TransitionParams { + + /** + * WARNING: Undocumented feature! + */ + [key: string]: number | boolean | GLTextureLike | number[]; + +} + +export interface Transition { + /** + * Transition duration. + * + * @default 0.5 + */ + duration?: number; + + /** + * Transition type. + * + * @default 'random' + * @see [Transition types]{@link https://github.com/mifi/editly#transition-types} + */ + name?: TransitionType; + + /** + * [Fade out curve]{@link https://trac.ffmpeg.org/wiki/AfadeCurves} in audio cross fades. + * + * @default 'tri' + */ + audioOutCurve?: CurveType; + + /** + * [Fade in curve]{@link https://trac.ffmpeg.org/wiki/AfadeCurves} in audio cross fades. + * + * @default 'tri' + */ + audioInCurve?: CurveType; + + /** + * WARNING: Undocumented feature! + */ + easing?: string | null; + + /** + * WARNING: Undocumented feature! + */ + params?: TransitionParams; + +} + +/** + * @see [Arbitrary audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} + */ +export interface AudioTrack { + + /** + * File path for this track. + */ + path: string; + + /** + * Relative volume for this track. + * + * @default 1 + */ + mixVolume?: number | string; + + /** + * Time value to cut source file from (in seconds). + * + * @default 0 + */ + cutFrom?: number; + + /** + * Time value to cut source file to (in seconds). + */ + cutTo?: number; + + /** + * How many seconds into video to start this audio track. + * + * @default 0 + */ + start?: number; + +} + +/** + * @see [Ken Burns parameters]{@link https://github.com/mifi/editly#ken-burns-parameters} + */ +export interface KenBurns { + + /** + * Zoom direction for Ken Burns effect. + * Use `null` to disable. + */ + zoomDirection?: 'in' | 'out' | 'left' | `right` | null; + + /** + * Zoom amount for Ken Burns effect. + * + * @default 0.1 + */ + zoomAmount?: number; + +} + +export type LayerType = + 'video' | + 'audio' | + 'detached-audio' | + 'image' | + 'image-overlay' | + 'title' | + 'subtitle' | + 'title-background' | + 'news-title' | + 'slide-in-text' | + 'fill-color' | + 'pause' | + 'radial-gradient' | + 'linear-gradient' | + 'rainbow-colors' | + 'canvas' | + 'fabric' | + 'gl' | + 'editly-banner'; + +export interface BaseLayer { + + /** + * Layer type. + */ + type: LayerType; + + /** + * What time into the clip should this layer start (in seconds). + */ + start?: number; + + /** + * What time into the clip should this layer stop (in seconds). + */ + stop?: number; + + /** + * FIXME[ts]: This is used internally and should be removed after some refactoring. + * @private + */ + layerDuration?: number; +} + +export interface TextLayer extends BaseLayer { + /** + * Subtitle text to show. + */ + text: string; + + /** + * Text color. + * Defaults to '#ffffff'. + */ + textColor?: string; + + /** + * Set font (`.ttf`). + * Defaults to system font. + */ + fontPath?: string; + + /** + * WARNING: Undocumented feature! + * The font family to use. Must already be registered using `fontPath`. + * If `fontPath` is also provided, this will be ignored. + */ + fontFamily?: string; +} + +export interface VideoPostProcessingFunctionArgs { + canvas: Fabric.StaticCanvas; + image: Fabric.FabricImage; + fabric: typeof Fabric, + progress: number; + time: number; +} + +/** + * For video layers, if parent `clip.duration` is specified, the video will be slowed/sped-up to match `clip.duration`. + * If `cutFrom`/`cutTo` is set, the resulting segment (`cutTo`-`cutFrom`) will be slowed/sped-up to fit `clip.duration`. + * If the layer has audio, it will be kept (and mixed with other audio layers if present). + */ +export interface VideoLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'video'; + + /** + * Path to video file. + */ + path: string; + + /** + * How to fit video to screen. + * + * @default 'contain-blur' + * @see [Resize modes]{@link https://github.com/mifi/editly#resize-modes} + */ + resizeMode?: ResizeMode; + + /** + * Time value to cut from (in seconds). + * + * @default 0 + */ + cutFrom?: number; + + /** + * Time value to cut to (in seconds). + * Defaults to *end of video*. + */ + cutTo?: number; + + /** + * Width relative to screen width. + * Must be between 0 and 1. + * + * @default 1 + */ + width?: number; + + /** + * Height relative to screen height. + * Must be between 0 and 1. + * + * @default 1 + */ + height?: number; + + /** + * X-position relative to screen width. + * Must be between 0 and 1. + * + * @default 0 + */ + left?: number; + + /** + * Y-position relative to screen height. + * Must be between 0 and 1. + * + * @default 0 + */ + top?: number; + + /** + * X-anchor. + * + * @default 'left' + */ + originX?: OriginX; + + /** + * Y-anchor. + * + * @default 'top' + */ + originY?: OriginY; + + /** + * Relative volume when mixing this video's audio track with others. + * + * @default 1 + */ + mixVolume?: number | string; + + /** + * Post-processing function after calling rgbaToFabricImage but before adding it to StaticCanvas. + */ + fabricImagePostProcessing?: (data: VideoPostProcessingFunctionArgs) => Promise; + + // FIXME[TS]: Used internally, but should be removed after refactoring + framerateStr?: string; + inputWidth?: number; + inputHeight?: number; + speedFactor?: number; +} + +/** + * Audio layers will be mixed together. + * If `cutFrom`/`cutTo` is set, the resulting segment (`cutTo`-`cutFrom`) will be slowed/sped-up to fit `clip.duration`. + * The slow down/speed-up operation is limited to values between `0.5x` and `100x`. + */ +export interface AudioLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'audio'; + + /** + * Path to audio file. + */ + path: string; + + /** + * Time value to cut from (in seconds). + * + * @default 0 + */ + cutFrom?: number; + + /** + * Time value to cut to (in seconds). + * Defaults to `clip.duration`. + */ + cutTo?: number; + + /** + * Relative volume when mixing this audio track with others. + * + * @default 1 + */ + mixVolume?: number | string; + +} + +/** + * This is a special case of `audioTracks` that makes it easier to start the audio relative to clips start times, + * without having to calculate global start times. + * + * This layer has the exact same properties as [`audioTracks`]{@link https://github.com/mifi/editly#arbitrary-audio-tracks}, + * except `start` time is relative to the clip's start. + */ +export interface DetachedAudioLayer extends BaseLayer, AudioTrack { + + /** + * Layer type. + */ + type: 'detached-audio'; + +} + +/** + * Full screen image. + */ +export interface ImageLayer extends BaseLayer, KenBurns { + + /** + * Layer type. + */ + type: 'image'; + + /** + * Path to image file. + */ + path: string; + + /** + * How to fit image to screen. + */ + resizeMode?: ResizeMode; + + /** + * WARNING: Undocumented feature! + */ + duration?: number; + +} + +/** + * Image overlay with a custom position and size on the screen. + */ +export interface ImageOverlayLayer extends BaseLayer, KenBurns { + + /** + * Layer type. + */ + type: 'image-overlay'; + + /** + * Path to image file. + */ + path: string; + + /** + * Position. + */ + position?: Position; + + /** + * Width (from 0 to 1) where 1 is screen width. + */ + width?: number; + + /** + * Height (from 0 to 1) where 1 is screen height. + */ + height?: number; + +} + +export interface TitleLayer extends TextLayer, KenBurns { + + /** + * Layer type. + */ + type: 'title'; + + /** + * Position. + */ + position?: Position; + +} + +export interface SubtitleLayer extends TextLayer { + + /** + * Layer type. + */ + type: 'subtitle'; + + /** + * WARNING: Undocumented feature! + */ + backgroundColor?: string; + + delay: number; + speed: number; +} + +/** + * Title with background. + */ +export interface TitleBackgroundLayer extends TextLayer { + + /** + * Layer type. + */ + type: 'title-background'; + + /** + * Background layer. + * Defaults to random background. + */ + background?: BackgroundLayer; + +} + +export interface NewsTitleLayer extends TextLayer { + + /** + * Layer type. + */ + type: 'news-title'; + + /** + * Background color. + * Defaults to '#d02a42'. + */ + backgroundColor?: string; + + /** + * Position. + */ + position?: Position; + + delay: number; + speed: number; +} + +export interface SlideInTextLayer extends TextLayer { + + /** + * Layer type. + */ + type: 'slide-in-text'; + + /** + * Font size. + */ + fontSize?: number; + + /** + * Char spacing. + */ + charSpacing?: number; + + /** + * Color. + * @deprecated use `fontColor` instead. + */ + color?: string; + + /** + * Position. + */ + position?: Position; + +} + +export interface FillColorLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'fill-color'; + + /** + * Color to fill background. + * Defaults to random color. + */ + color?: string; + +} + +export interface PauseLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'pause'; + + /** + * Color to fill background. + * Defaults to random color. + */ + color?: string; + +} + +export interface RadialGradientLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'radial-gradient'; + + /** + * Array of two colors. + * Defaults to random colors. + */ + colors?: [string, string]; + +} + +export interface LinearGradientLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'linear-gradient'; + + /** + * Array of two colors. + * Defaults to random colors. + */ + colors?: [string, string]; + +} + +export interface RainbowColorsLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'rainbow-colors'; + +} + +export interface CustomFabricFunctionCallbacks { + onRender: (progress: number, canvas: Fabric.StaticCanvas) => OptionalPromise; + onClose?: () => OptionalPromise; +} + +export interface CustomCanvasFunctionArgs { + width: number; + height: number; + canvas: Canvas; +} + +export interface CustomCanvasFunctionCallbacks { + onRender: (progress: number) => OptionalPromise; + onClose?: () => OptionalPromise; +} + +export type CustomCanvasFunction = (args: CustomCanvasFunctionArgs) => OptionalPromise; + +export interface CanvasLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'canvas'; + + /** + * Custom JavaScript function. + */ + func: CustomCanvasFunction; + +} + +export interface CustomFabricFunctionArgs { + width: number; + height: number; + fabric: typeof Fabric; + params: unknown; +} + +export type CustomFabricFunction = (args: CustomFabricFunctionArgs) => OptionalPromise; + +export interface FabricLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'fabric'; + + /** + * Custom JavaScript function. + */ + func: CustomFabricFunction; + +} + +export interface GlLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'gl'; + + /** + * Fragment path (`.frag` file) + */ + fragmentPath: string; + + /** + * Vertex path (`.vert` file). + */ + vertexPath?: string; + + /** + * WARNING: Undocumented feature! + */ + speed?: number; + + vertexSrc?: string; + fragmentSrc?: string; +} + +/** + * WARNING: Undocumented feature! + */ +export interface EditlyBannerLayer extends BaseLayer { + + /** + * Layer type. + */ + type: 'editly-banner'; + + /** + * Set font (`.ttf`). + * Defaults to system font. + */ + fontPath?: string; +} + +/** + * @see [Examples]{@link https://github.com/mifi/editly/tree/master/examples} + * @see [Example 'commonFeatures.json5']{@link https://github.com/mifi/editly/blob/master/examples/commonFeatures.json5} + */ +export type Layer = + VideoLayer | + AudioLayer | + DetachedAudioLayer | + ImageLayer | + ImageOverlayLayer | + TitleLayer | + SubtitleLayer | + TitleBackgroundLayer | + NewsTitleLayer | + SlideInTextLayer | + FillColorLayer | + PauseLayer | + RadialGradientLayer | + LinearGradientLayer | + RainbowColorsLayer | + CanvasLayer | + FabricLayer | + GlLayer | + EditlyBannerLayer; + +/** + * Special layers that can be used f.e. in the 'title-background' layer. + */ +export type BackgroundLayer = + RadialGradientLayer | + LinearGradientLayer | + FillColorLayer; + +export interface Clip { + + /** + * List of layers within the current clip that will be overlaid in their natural order (final layer on top). + */ + layers: Layer[]; + + /** + * Clip duration. + * If unset, the clip duration will be that of the first video layer. + * Defaults to `defaults.duration`. + */ + duration?: number; + + /** + * Specify transition at the end of this clip. + * Defaults to `defaults.transition`. + * Set to `null` to disable transitions. + */ + transition?: Transition | null; + +} + +export interface DefaultLayerOptions { + + /** + * Set default font (`.ttf`). + * Defaults to system font. + */ + fontPath?: string; + + /** + * Set any layer parameter that all layers will inherit. + */ + // FIXME[ts]: Define a type for this + [key: string]: unknown; + +} + +export type DefaultLayerTypeOptions = { + + /** + * Set any layer parameter that all layers of the same type (specified in key) will inherit. + */ + [P in LayerType]?: Partial, 'type'>>; + +} + +export interface DefaultOptions { + + /** + * Set default clip duration for clips that don't have an own duration (in seconds). + * + * @default 4 + */ + duration?: number; + + /** + * An object describing the default layer options. + */ + layer?: DefaultLayerOptions; + + /** + * Defaults for each individual layer types. + */ + layerType?: DefaultLayerTypeOptions; + + /** + * An object describing the default transition. + * Set to `null` to disable transitions. + */ + transition?: Transition | null; + +} + +/** + * You can enable audio normalization of the final output audio. + * This is useful if you want to achieve Audio Ducking (e.g. automatically lower volume of all other tracks when voice-over speaks). + * + * @see [Dynaudnorm]{@link https://ffmpeg.org/ffmpeg-filters.html#dynaudnorm} + * @see [Example of audio ducking]{@link https://github.com/mifi/editly/blob/master/examples/audio2.json5} + */ +export interface AudioNormalizationOptions { + + /** + * Enable audio normalization? + * + * @default false + * @see [Audio normalization]{@link https://github.com/mifi/editly#audio-normalization} + */ + enable?: boolean; + + /** + * Audio normalization gauss size. + * + * @default 5 + * @see [Audio normalization]{@link https://github.com/mifi/editly#audio-normalization} + */ + gaussSize?: number; + + /** + * Audio normalization max gain. + * + * @default 30 + * @see [Audio normalization]{@link https://github.com/mifi/editly#audio-normalization} + */ + maxGain?: number; + +} + +export interface DebugOptions { + enableFfmpegLog?: boolean; + verbose?: boolean; + logTimes?: boolean; +} + +export interface Config extends DebugOptions { + /** + * Output path (`.mp4` or `.mkv`, can also be a `.gif`). + */ + outPath: string; + + /** + * List of clip objects that will be played in sequence. + * Each clip can have one or more layers. + * + * @default [] + */ + clips: Clip[]; + + /** + * Width which all media will be converted to. + * + * @default 640 + */ + width?: number; + + /** + * Height which all media will be converted to. + * Decides height based on `width` and aspect ratio of the first video by default. + */ + height?: number; + + /** + * FPS which all videos will be converted to. + * Defaults to first video's FPS or `25`. + */ + fps?: number; + + /** + * Specify custom output codec/format arguments for ffmpeg. + * Automatically adds codec options (normally `h264`) by default. + * + * @see [Example]{@link https://github.com/mifi/editly/blob/master/examples/customOutputArgs.json5} + */ + customOutputArgs?: string[]; + + /** + * Allow remote URLs as paths. + * + * @default false + */ + allowRemoteRequests?: boolean; + + /** + * Fast mode (low resolution and FPS, useful for getting a quick preview ⏩). + * + * @default false + */ + fast?: boolean; + + /** + * An object describing default options for clips and layers. + */ + defaults?: DefaultOptions; + + /** + * List of arbitrary audio tracks. + * + * @default [] + * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} + */ + audioTracks?: AudioTrack[]; + + /** + * Set an audio track for the whole video.. + * + * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} + */ + audioFilePath?: string; + + /** + * Background Volume + * + * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} + */ + backgroundAudioVolume?: string | number; + + /** + * Loop the audio track if it is shorter than video? + * + * @default false + */ + loopAudio?: boolean; + + /** + * Keep source audio from `clips`? + * + * @default false + */ + keepSourceAudio?: boolean; + + /** + * Volume of audio from `clips` relative to `audioTracks`. + * + * @default 1 + * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} + */ + clipsAudioVolume?: number | string; + + /** + * Adjust output [volume]{@link http://ffmpeg.org/ffmpeg-filters.html#volume} (final stage). + * + * @default 1 + * @see [Example]{@link https://github.com/mifi/editly/blob/master/examples/audio-volume.json5} + * @example + * 0.5 + * @example + * '10db' + */ + outputVolume?: number | string; + + /** + * Audio normalization. + */ + audioNorm?: AudioNormalizationOptions; + + /** + * WARNING: Undocumented feature! + */ + ffmpegPath?: string; + + /** + * WARNING: Undocumented feature! + */ + ffprobePath?: string; + + /** + * WARNING: Undocumented feature! + */ + keepTmp?: boolean; +}; + +export interface RenderSingleFrameConfig extends Config { + + /** + * Output path (`.mp4` or `.mkv`, can also be a `.gif`). + */ + outPath: string; + + /** + * Timestamp to render. + */ + time?: number; + +} + +// Internal types + +export type Stream = { + codec_type: string; + codec_name: string; + r_frame_rate: string; + width?: number; + height?: number; + tags?: { + rotate: string; + }; + side_data_list?: { + rotation: string; + }[]; +}; + +export type Keyframe = { + t: number; + props: { [key: string]: number }; +}; + +export interface FrameSource { + readNextFrame(progress: number, canvas: Fabric.StaticCanvas, offsetTime: number): OptionalPromise; + close?(): OptionalPromise; +} + +export type CreateFrameSourceOptions = DebugOptions & { + ffmpegPath: string; + ffprobePath: string; + width: number, + height: number, + duration: number, + channels: number, + framerateStr: string, + params: Omit, +}; + +export type CreateFrameSource = (options: CreateFrameSourceOptions) => Promise; diff --git a/src/types/gl-buffer.d.ts b/src/types/gl-buffer.d.ts new file mode 100644 index 00000000..517a313d --- /dev/null +++ b/src/types/gl-buffer.d.ts @@ -0,0 +1,3 @@ +declare module 'gl-buffer' { + export default function createBuffer(gl: WebGLRenderingContext, data: number[], target: number, usage: number): WebGLBuffer; +} diff --git a/src/types/gl-texture2d.d.ts b/src/types/gl-texture2d.d.ts new file mode 100644 index 00000000..a0edb0fc --- /dev/null +++ b/src/types/gl-texture2d.d.ts @@ -0,0 +1,8 @@ +declare module 'gl-texture2d' { + import ndarray from 'ndarray'; + + // There are other overloads for this function, but we only care about this one. + declare function createTexture(gl: WebGLRenderingContext, data: ndarray): WebGLTexture; + + export default createTexture; +} diff --git a/src/types/gl-transition.d.ts b/src/types/gl-transition.d.ts new file mode 100644 index 00000000..63138b97 --- /dev/null +++ b/src/types/gl-transition.d.ts @@ -0,0 +1,58 @@ +declare module 'gl-transition' { + type TransitionObjectLike = { + glsl: string, + defaultParams: { [key: string]: mixed }, + paramsTypes: { [key: string]: string }, + }; + + + type GLTextureLike = { + bind: (unit: number) => number, + shape: [number, number], + }; + + type Options = { + resizeMode?: "cover" | "contain" | "stretch", + }; + + declare function createTransition( + gl: WebGLRenderingContext, + transition: TransitionObjectLike, + options: Options = {} + ): { + // renders one frame of the transition (up to you to run the animation loop the way you want) + draw: ( + progress: number, + from: GLTextureLike, + to: GLTextureLike, + width: number = gl.drawingBufferWidth, + height: number = gl.drawingBufferHeight, + params: { [key: string]: number | number[] | boolean | GLTextureLike } = {} + ) => void, + // dispose and destroy all objects created by the function call. + dispose: () => void, + }; + + export = { default: createTransition }; +} + +/* + +( + gl: WebGLRenderingContext, + transition: TransitionObjectLike, + options: Options = {} +) => { + // renders one frame of the transition (up to you to run the animation loop the way you want) + draw: ( + progress: number, + from: GLTextureLike, + to: GLTextureLike, + width: number = gl.drawingBufferWidth, + height: number = gl.drawingBufferHeight, + params: { [key: string]: number | boolean | GLTextureLike } = {} + ) => void, + // dispose and destroy all objects created by the function call. + dispose: () => void, +} +*/ diff --git a/src/types/gl-transitions.d.ts b/src/types/gl-transitions.d.ts new file mode 100644 index 00000000..e739d2a7 --- /dev/null +++ b/src/types/gl-transitions.d.ts @@ -0,0 +1,15 @@ +declare module 'gl-transitions' { + type GlTransition = { + name: string, + author: string, + license: string, + glsl: string, + defaultParams: { [key: string]: mixed }, + paramsTypes: { [key: string]: string }, + createdAt: string, + updatedAt: string, + } + + declare const _default: GlTransition[]; + export default _default; +} diff --git a/src/util.ts b/src/util.ts new file mode 100644 index 00000000..54f44a52 --- /dev/null +++ b/src/util.ts @@ -0,0 +1,183 @@ +import { execa } from 'execa'; +import assert from 'assert'; +import { sortBy } from 'lodash-es'; +import { pathExists } from 'fs-extra'; + +import type { Keyframe, Stream } from './types.js'; +import type { Position, PositionObject, Transition } from './types.js'; +import type { TOriginX, TOriginY } from 'fabric'; + +export function parseFps(fps?: string) { + const match = typeof fps === 'string' && fps.match(/^([0-9]+)\/([0-9]+)$/); + if (match) { + const num = parseInt(match[1], 10); + const den = parseInt(match[2], 10); + if (den > 0) return num / den; + } + return undefined; +} + +export async function readDuration(ffprobePath: string, p: string) { + const { stdout } = await execa(ffprobePath, ['-v', 'error', '-show_entries', 'format=duration', '-of', 'default=noprint_wrappers=1:nokey=1', p]); + const parsed = parseFloat(stdout); + assert(!Number.isNaN(parsed)); + return parsed; +} + +export async function readFileStreams(ffprobePath: string, p: string) { + const { stdout } = await execa(ffprobePath, [ + '-show_entries', 'stream', '-of', 'json', p, + ]); + return JSON.parse(stdout).streams as Stream[]; +} + + +export async function readVideoFileInfo(ffprobePath: string, p: string) { + const streams = await readFileStreams(ffprobePath, p); + const stream = streams.find((s) => s.codec_type === 'video'); // TODO + + if (!stream) { + throw new Error(`Could not find a video stream in ${p}`); + } + + const duration = await readDuration(ffprobePath, p); + + let rotation = parseInt(stream.tags?.rotate ?? '', 10); + + // If we can't find rotation, try side_data_list + if (Number.isNaN(rotation) && stream.side_data_list?.[0]?.rotation) { + rotation = parseInt(stream.side_data_list[0].rotation, 10); + } + + return { + // numFrames: parseInt(stream.nb_frames, 10), + duration, + width: stream.width, // TODO coded_width? + height: stream.height, + framerateStr: stream.r_frame_rate, + rotation: !Number.isNaN(rotation) ? rotation : undefined, + }; +} + +export async function readAudioFileInfo(ffprobePath: string, p: string) { + const duration = await readDuration(ffprobePath, p); + + return { duration }; +} + +export function toArrayInteger(buffer: Buffer) { + if (buffer.length > 0) { + const data = new Uint8ClampedArray(buffer.length); + for (let i = 0; i < buffer.length; i += 1) { + data[i] = buffer[i]; + } + return data; + } + return []; +} + +// x264 requires multiple of 2 +export const multipleOf2 = (x: number) => Math.round(x / 2) * 2; + +export function getPositionProps({ position, width, height }: { position?: Position | PositionObject, width: number, height: number }) { + let originY: TOriginY = 'center'; + let originX: TOriginX = 'center'; + let top = height / 2; + let left = width / 2; + const margin = 0.05; + + if (typeof position === 'string') { + if (position === 'top') { + originY = 'top'; + top = height * margin; + } else if (position === 'bottom') { + originY = 'bottom'; + top = height * (1 - margin); + } else if (position === 'center') { + originY = 'center'; + top = height / 2; + } else if (position === 'top-left') { + originX = 'left'; + originY = 'top'; + left = width * margin; + top = height * margin; + } else if (position === 'top-right') { + originX = 'right'; + originY = 'top'; + left = width * (1 - margin); + top = height * margin; + } else if (position === 'center-left') { + originX = 'left'; + originY = 'center'; + left = width * margin; + top = height / 2; + } else if (position === 'center-right') { + originX = 'right'; + originY = 'center'; + left = width * (1 - margin); + top = height / 2; + } else if (position === 'bottom-left') { + originX = 'left'; + originY = 'bottom'; + left = width * margin; + top = height * (1 - margin); + } else if (position === 'bottom-right') { + originX = 'right'; + originY = 'bottom'; + left = width * (1 - margin); + top = height * (1 - margin); + } + } else { + if (position?.x != null) { + originX = position.originX || 'left'; + left = width * position.x; + } + if (position?.y != null) { + originY = position.originY || 'top'; + top = height * position.y; + } + } + + return { originX, originY, top, left }; +} + +export function getFrameByKeyFrames(keyframes: Keyframe[], progress: number) { + if (keyframes.length < 2) throw new Error('Keyframes must be at least 2'); + const sortedKeyframes = sortBy(keyframes, 't'); + + // TODO check that max is 1 + // TODO check that all keyframes have all props + // TODO make smarter so user doesn't need to replicate non-changing props + + const invalidKeyframe = sortedKeyframes.find((k, i) => { + if (i === 0) return false; + return k.t === sortedKeyframes[i - 1].t; + }); + if (invalidKeyframe) throw new Error('Invalid keyframe'); + + let prevKeyframe = [...sortedKeyframes].reverse().find((k) => k.t < progress); + if (!prevKeyframe) prevKeyframe = sortedKeyframes[0]; + + let nextKeyframe = sortedKeyframes.find((k) => k.t >= progress); + if (!nextKeyframe) nextKeyframe = sortedKeyframes[sortedKeyframes.length - 1]; + + if (nextKeyframe.t === prevKeyframe.t) return prevKeyframe.props; + + const interProgress = (progress - prevKeyframe.t) / (nextKeyframe.t - prevKeyframe.t); + return Object.fromEntries(Object.entries(prevKeyframe.props).map(([propName, prevVal]) => ([propName, prevVal + ((nextKeyframe.props[propName] - prevVal) * interProgress)]))); +} + +export const isUrl = (path: string) => /^https?:\/\//.test(path); + +export const assertFileValid = async (path: string, allowRemoteRequests?: boolean) => { + if (isUrl(path)) { + assert(allowRemoteRequests, 'Remote requests are not allowed'); + return; + } + assert(await pathExists(path), `File does not exist ${path}`); +}; + +// See #16 +export function checkTransition(transition?: Transition | null) { + assert(transition == null || typeof transition === 'object', 'Transition must be an object'); +} diff --git a/test.js b/test/index.ts similarity index 88% rename from test.js rename to test/index.ts index 80f68ac6..34e1d29b 100644 --- a/test.js +++ b/test/index.ts @@ -1,8 +1,9 @@ import { execa } from 'execa'; // todo use jest -await execa('node', [ - 'cli.js', +await execa('npx', [ + 'tsx', + 'src/cli.ts', '--allow-remote-requests', "title:'My video'", 'https://raw.githubusercontent.com/mifi/editly-assets/main/overlay.svg', diff --git a/transitions.js b/transitions.js deleted file mode 100644 index 6c8a64cb..00000000 --- a/transitions.js +++ /dev/null @@ -1,62 +0,0 @@ -import assert from 'assert'; - -const randomTransitionsSet = ['fade', 'fadegrayscale', 'directionalwarp', 'crosswarp', 'dreamyzoom', 'burn', 'crosszoom', 'simplezoom', 'linearblur', 'directional-left', 'directional-right', 'directional-up', 'directional-down']; - -function getRandomTransition() { - return randomTransitionsSet[Math.floor(Math.random() * randomTransitionsSet.length)]; -} - -// https://easings.net/ - -export function easeOutExpo(x) { - return x === 1 ? 1 : 1 - (2 ** (-10 * x)); -} - -export function easeInOutCubic(x) { - return x < 0.5 ? 4 * x * x * x : 1 - ((-2 * x + 2) ** 3) / 2; -} - -function getTransitionEasingFunction(easing, transitionName) { - if (easing !== null) { - if (easing) return { easeOutExpo }[easing]; - if (transitionName === 'directional') return easeOutExpo; - } - return (progress) => progress; -} - -export function calcTransition(defaults, transition, isLastClip) { - if (transition === null || isLastClip) return { duration: 0 }; - - const getTransitionDefault = (key) => (defaults.transition ? defaults.transition[key] : undefined); - - let transitionOrDefault = { - name: (transition && transition.name) || getTransitionDefault('name'), - duration: (transition && transition.duration != null) ? transition.duration : getTransitionDefault('duration'), - params: (transition && transition.params) || getTransitionDefault('params'), - easing: (transition && transition.easing !== undefined) ? transition.easing : getTransitionDefault('easing'), - audioOutCurve: (transition && transition.audioOutCurve) || getTransitionDefault('audioOutCurve'), - audioInCurve: (transition && transition.audioInCurve) || getTransitionDefault('audioInCurve'), - }; - - assert(!transitionOrDefault.duration || transitionOrDefault.name, 'Please specify transition name or set duration to 0'); - - if (transitionOrDefault.name === 'random' && transitionOrDefault.duration) { - transitionOrDefault = { ...transitionOrDefault, name: getRandomTransition() }; - } - - const aliasedTransition = { - 'directional-left': { name: 'directional', params: { direction: [1, 0] } }, - 'directional-right': { name: 'directional', params: { direction: [-1, 0] } }, - 'directional-down': { name: 'directional', params: { direction: [0, 1] } }, - 'directional-up': { name: 'directional', params: { direction: [0, -1] } }, - }[transitionOrDefault.name]; - if (aliasedTransition) { - transitionOrDefault = { ...transitionOrDefault, ...aliasedTransition }; - } - - return { - ...transitionOrDefault, - duration: transitionOrDefault.duration || 0, - easingFunction: getTransitionEasingFunction(transitionOrDefault.easing, transitionOrDefault.name), - }; -} diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 00000000..d2f61b7d --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "@tsconfig/node-lts/tsconfig.json", + "compilerOptions": { + "noEmit": true, + + /* Linting */ + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true, + "noUncheckedSideEffectImports": true + } +} diff --git a/util.js b/util.js deleted file mode 100644 index ea72fb06..00000000 --- a/util.js +++ /dev/null @@ -1,172 +0,0 @@ -import { execa } from 'execa'; -import assert from 'assert'; -import sortBy from 'lodash-es/sortBy.js'; -import fsExtra from 'fs-extra'; - -export function parseFps(fps) { - const match = typeof fps === 'string' && fps.match(/^([0-9]+)\/([0-9]+)$/); - if (match) { - const num = parseInt(match[1], 10); - const den = parseInt(match[2], 10); - if (den > 0) return num / den; - } - return undefined; -} - -export async function readDuration(ffprobePath, p) { - const { stdout } = await execa(ffprobePath, ['-v', 'error', '-show_entries', 'format=duration', '-of', 'default=noprint_wrappers=1:nokey=1', p]); - const parsed = parseFloat(stdout); - assert(!Number.isNaN(parsed)); - return parsed; -} - -export async function readFileStreams(ffprobePath, p) { - const { stdout } = await execa(ffprobePath, [ - '-show_entries', 'stream', '-of', 'json', p, - ]); - const json = JSON.parse(stdout); - return json.streams; -} - -export async function readVideoFileInfo(ffprobePath, p) { - const streams = await readFileStreams(ffprobePath, p); - const stream = streams.find((s) => s.codec_type === 'video'); // TODO - - const duration = await readDuration(ffprobePath, p); - - let rotation = parseInt(stream.tags && stream.tags.rotate, 10); - - // If we can't find rotation, try side_data_list - if (Number.isNaN(rotation) && Array.isArray(stream.side_data_list) && stream.side_data_list[0] && stream.side_data_list[0].rotation) { - rotation = parseInt(stream.side_data_list[0].rotation, 10); - } - - return { - // numFrames: parseInt(stream.nb_frames, 10), - duration, - width: stream.width, // TODO coded_width? - height: stream.height, - framerateStr: stream.r_frame_rate, - rotation: !Number.isNaN(rotation) ? rotation : undefined, - }; -} - -export async function readAudioFileInfo(ffprobePath, p) { - const duration = await readDuration(ffprobePath, p); - - return { duration }; -} - -export function toArrayInteger(buffer) { - if (buffer.length > 0) { - const data = new Uint8ClampedArray(buffer.length); - for (let i = 0; i < buffer.length; i += 1) { - data[i] = buffer[i]; - } - return data; - } - return []; -} - -// x264 requires multiple of 2 -export const multipleOf2 = (x) => Math.round(x / 2) * 2; - -export function getPositionProps({ position, width, height }) { - let originY = 'center'; - let originX = 'center'; - let top = height / 2; - let left = width / 2; - - const margin = 0.05; - if (position === 'top') { - originY = 'top'; - top = height * margin; - } else if (position === 'bottom') { - originY = 'bottom'; - top = height * (1 - margin); - } else if (position === 'center') { - originY = 'center'; - top = height / 2; - } else if (position === 'top-left') { - originX = 'left'; - originY = 'top'; - left = width * margin; - top = height * margin; - } else if (position === 'top-right') { - originX = 'right'; - originY = 'top'; - left = width * (1 - margin); - top = height * margin; - } else if (position === 'center-left') { - originX = 'left'; - originY = 'center'; - left = width * margin; - top = height / 2; - } else if (position === 'center-right') { - originX = 'right'; - originY = 'center'; - left = width * (1 - margin); - top = height / 2; - } else if (position === 'bottom-left') { - originX = 'left'; - originY = 'bottom'; - left = width * margin; - top = height * (1 - margin); - } else if (position === 'bottom-right') { - originX = 'right'; - originY = 'bottom'; - left = width * (1 - margin); - top = height * (1 - margin); - } - - if (position && position.x != null) { - originX = position.originX || 'left'; - left = width * position.x; - } - if (position && position.y != null) { - originY = position.originY || 'top'; - top = height * position.y; - } - - return { originX, originY, top, left }; -} - -export function getFrameByKeyFrames(keyframes, progress) { - if (keyframes.length < 2) throw new Error('Keyframes must be at least 2'); - const sortedKeyframes = sortBy(keyframes, 't'); - - // TODO check that max is 1 - // TODO check that all keyframes have all props - // TODO make smarter so user doesn't need to replicate non-changing props - - const invalidKeyframe = sortedKeyframes.find((k, i) => { - if (i === 0) return false; - return k.t === sortedKeyframes[i - 1].t; - }); - if (invalidKeyframe) throw new Error('Invalid keyframe'); - - let prevKeyframe = [...sortedKeyframes].reverse().find((k) => k.t < progress); - // eslint-disable-next-line prefer-destructuring - if (!prevKeyframe) prevKeyframe = sortedKeyframes[0]; - - let nextKeyframe = sortedKeyframes.find((k) => k.t >= progress); - if (!nextKeyframe) nextKeyframe = sortedKeyframes[sortedKeyframes.length - 1]; - - if (nextKeyframe.t === prevKeyframe.t) return prevKeyframe.props; - - const interProgress = (progress - prevKeyframe.t) / (nextKeyframe.t - prevKeyframe.t); - return Object.fromEntries(Object.entries(prevKeyframe.props).map(([propName, prevVal]) => ([propName, prevVal + ((nextKeyframe.props[propName] - prevVal) * interProgress)]))); -} - -export const isUrl = (path) => /^https?:\/\//.test(path); - -export const assertFileValid = async (path, allowRemoteRequests) => { - if (isUrl(path)) { - assert(allowRemoteRequests, 'Remote requests are not allowed'); - return; - } - assert(await fsExtra.pathExists(path), `File does not exist ${path}`); -}; - -// See #16 -export const checkTransition = (transition) => assert(transition == null || typeof transition === 'object', 'Transition must be an object');