diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index 4d100cce..551e3cda 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -1,3 +1,3 @@ - [ ] I have tried with the newest version of editly: `npm i -g editly` or `npm i editly@latest` - [ ] I have tried ffmpeg newest stable version -- [ ] I have searched for existing issues \ No newline at end of file +- [ ] I have searched for existing issues diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100644 index 00000000..2312dc58 --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1 @@ +npx lint-staged diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 00000000..ab308b55 --- /dev/null +++ b/.prettierignore @@ -0,0 +1,2 @@ +src/BoxBlur.js +shaders/rainbow-colors.frag diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 00000000..a7cc412a --- /dev/null +++ b/.prettierrc @@ -0,0 +1,5 @@ +{ + "$schema": "https://json.schemastore.org/prettierrc", + "printWidth": 100, + "plugins": ["prettier-plugin-organize-imports"] +} diff --git a/README.md b/README.md index 9ce0522e..b133c805 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ This GIF / YouTube was created with this command: "editly [commonFeatures.json5] - Declarative API with fun defaults - Create colorful videos with random colors generated from aesthetically pleasing palettes and random effects - Supports any input size, e.g. 4K video and DSLR photos -- Can output to any dimensions and aspect ratio, e.g. *Instagram post* (1:1), *Instagram story* (9:16), *YouTube* (16:9), or any other dimensions you like. +- Can output to any dimensions and aspect ratio, e.g. _Instagram post_ (1:1), _Instagram story_ (9:16), _YouTube_ (16:9), or any other dimensions you like. - Content is scaled and letterboxed automatically, even if the input aspect ratio is not the same and the framerate will be converted. - Speed up / slow down videos automatically to match the `cutFrom`/`cutTo` segment length with each clip's `duration` - Overlay text and subtitles on videos, images or backgrounds @@ -76,7 +76,7 @@ editly \ --audio-file-path /path/to/music.mp3 ``` -Or create an MP4 (or GIF) from a JSON or JSON5 edit spec *(JSON5 is just a more user friendly JSON format)*: +Or create an MP4 (or GIF) from a JSON or JSON5 edit spec _(JSON5 is just a more user friendly JSON format)_: ```sh editly my-spec.json5 --fast --keep-source-audio --out output.gif @@ -87,15 +87,15 @@ For examples of how to make a JSON edit spec, see below or [examples](https://gi Without `--fast`, it will default to using the **width**, **height** and **frame rate** from the **first** input video. **All other clips will be converted to these dimensions.** You can of course override any or all of these parameters. - **TIP:** Use this tool in conjunction with [LosslessCut](https://github.com/mifi/lossless-cut) -- **TIP:** If you need catchy music for your video, have a look at [this YouTube](https://www.youtube.com/channel/UCht8qITGkBvXKsR1Byln-wA) or the [YouTube audio library](https://www.youtube.com/audiolibrary/music?nv=1). Then use [youtube-dl](https://github.com/ytdl-org/youtube-dl) to download the video, and then point `--audio-file-path` at the video file. *Be sure to respect their license!* +- **TIP:** If you need catchy music for your video, have a look at [this YouTube](https://www.youtube.com/channel/UCht8qITGkBvXKsR1Byln-wA) or the [YouTube audio library](https://www.youtube.com/audiolibrary/music?nv=1). Then use [youtube-dl](https://github.com/ytdl-org/youtube-dl) to download the video, and then point `--audio-file-path` at the video file. _Be sure to respect their license!_ ## JavaScript library ```js -import editly from 'editly'; +import editly from "editly"; // See editSpec documentation -await editly(editSpec) +await editly(editSpec); ``` ## Edit spec @@ -172,39 +172,39 @@ Edit specs are JavaScript / JSON objects describing the whole edit operation wit ### Parameters -| Parameter | CLI equivalent | Description | Default | | -|-|-|-|-|-| -| `outPath` | `--out` | Output path (mp4, mkv), can also be a `.gif` | | | -| `width` | `--width` | Width which all media will be converted to | `640` | | -| `height` | `--height` | Height which all media will be converted to | auto based on `width` and aspect ratio of **first video** | | -| `fps` | `--fps` | FPS which all videos will be converted to | First video FPS or `25` | | -| `customOutputArgs` | | Specify custom output codec/format arguments for ffmpeg (See [example](https://github.com/mifi/editly/blob/master/examples/customOutputArgs.json5)) | auto (h264) | | -| `allowRemoteRequests` | `--allow-remote-requests` | Allow remote URLs as paths | `false` | | -| `fast` | `--fast`, `-f` | Fast mode (low resolution and FPS, useful for getting a quick preview ⏩) | `false` | | -| `defaults.layer.fontPath` | `--font-path` | Set default font to a .ttf | System font | | -| `defaults.layer.*` | | Set any layer parameter that all layers will inherit | | | -| `defaults.duration` | `--clip-duration` | Set default clip duration for clips that don't have an own duration | `4` | sec | -| `defaults.transition` | | An object `{ name, duration }` describing the default transition. Set to **null** to disable transitions | | | -| `defaults.transition.duration` | `--transition-duration` | Default transition duration | `0.5` | sec | -| `defaults.transition.name` | `--transition-name` | Default transition type. See [Transition types](#transition-types) | `random` | | -| `defaults.transition.audioOutCurve` | | Default [fade out curve](https://trac.ffmpeg.org/wiki/AfadeCurves) in audio cross fades | `tri` | | -| `defaults.transition.audioInCurve` | | Default [fade in curve](https://trac.ffmpeg.org/wiki/AfadeCurves) in audio cross fades | `tri` | | -| `clips[]` | | List of clip objects that will be played in sequence. Each clip can have one or more layers. | | | -| `clips[].duration` | | Clip duration. See `defaults.duration`. If unset, the clip duration will be that of the **first video layer**. | `defaults.duration` | | -| `clips[].transition` | | Specify transition at the **end** of this clip. See `defaults.transition` | `defaults.transition` | | -| `clips[].layers[]` | | List of layers within the current clip that will be overlaid in their natural order (final layer on top) | | | -| `clips[].layers[].type` | | Layer type, see below | | | -| `clips[].layers[].start` | | What time into the clip should this layer start | | sec | -| `clips[].layers[].stop` | | What time into the clip should this layer stop | | sec | -| `audioTracks[]` | | List of arbitrary audio tracks. See [audio tracks](#arbitrary-audio-tracks). | `[]` | | -| `audioFilePath` | `--audio-file-path` | Set an audio track for the whole video. See also [audio tracks](#arbitrary-audio-tracks) | | | -| `loopAudio` | `--loop-audio` | Loop the audio track if it is shorter than video? | `false` | | -| `keepSourceAudio` | `--keep-source-audio` | Keep source audio from `clips`? | `false` | | -| `clipsAudioVolume` | | Volume of audio from `clips` relative to `audioTracks`. See [audio tracks](#arbitrary-audio-tracks). | `1` | | -| `outputVolume` | `--output-volume` | Adjust output [volume](http://ffmpeg.org/ffmpeg-filters.html#volume) (final stage). See [example](https://github.com/mifi/editly/blob/master/examples/audio-volume.json5) | `1` | e.g. `0.5` or `10dB` | -| `audioNorm.enable` | | Enable audio normalization? See [audio normalization](#audio-normalization). | `false` | | -| `audioNorm.gaussSize` | | Audio normalization gauss size. See [audio normalization](#audio-normalization). | `5` | | -| `audioNorm.maxGain` | | Audio normalization max gain. See [audio normalization](#audio-normalization). | `30` | | +| Parameter | CLI equivalent | Description | Default | | +| ----------------------------------- | ------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------- | -------------------- | +| `outPath` | `--out` | Output path (mp4, mkv), can also be a `.gif` | | | +| `width` | `--width` | Width which all media will be converted to | `640` | | +| `height` | `--height` | Height which all media will be converted to | auto based on `width` and aspect ratio of **first video** | | +| `fps` | `--fps` | FPS which all videos will be converted to | First video FPS or `25` | | +| `customOutputArgs` | | Specify custom output codec/format arguments for ffmpeg (See [example](https://github.com/mifi/editly/blob/master/examples/customOutputArgs.json5)) | auto (h264) | | +| `allowRemoteRequests` | `--allow-remote-requests` | Allow remote URLs as paths | `false` | | +| `fast` | `--fast`, `-f` | Fast mode (low resolution and FPS, useful for getting a quick preview ⏩) | `false` | | +| `defaults.layer.fontPath` | `--font-path` | Set default font to a .ttf | System font | | +| `defaults.layer.*` | | Set any layer parameter that all layers will inherit | | | +| `defaults.duration` | `--clip-duration` | Set default clip duration for clips that don't have an own duration | `4` | sec | +| `defaults.transition` | | An object `{ name, duration }` describing the default transition. Set to **null** to disable transitions | | | +| `defaults.transition.duration` | `--transition-duration` | Default transition duration | `0.5` | sec | +| `defaults.transition.name` | `--transition-name` | Default transition type. See [Transition types](#transition-types) | `random` | | +| `defaults.transition.audioOutCurve` | | Default [fade out curve](https://trac.ffmpeg.org/wiki/AfadeCurves) in audio cross fades | `tri` | | +| `defaults.transition.audioInCurve` | | Default [fade in curve](https://trac.ffmpeg.org/wiki/AfadeCurves) in audio cross fades | `tri` | | +| `clips[]` | | List of clip objects that will be played in sequence. Each clip can have one or more layers. | | | +| `clips[].duration` | | Clip duration. See `defaults.duration`. If unset, the clip duration will be that of the **first video layer**. | `defaults.duration` | | +| `clips[].transition` | | Specify transition at the **end** of this clip. See `defaults.transition` | `defaults.transition` | | +| `clips[].layers[]` | | List of layers within the current clip that will be overlaid in their natural order (final layer on top) | | | +| `clips[].layers[].type` | | Layer type, see below | | | +| `clips[].layers[].start` | | What time into the clip should this layer start | | sec | +| `clips[].layers[].stop` | | What time into the clip should this layer stop | | sec | +| `audioTracks[]` | | List of arbitrary audio tracks. See [audio tracks](#arbitrary-audio-tracks). | `[]` | | +| `audioFilePath` | `--audio-file-path` | Set an audio track for the whole video. See also [audio tracks](#arbitrary-audio-tracks) | | | +| `loopAudio` | `--loop-audio` | Loop the audio track if it is shorter than video? | `false` | | +| `keepSourceAudio` | `--keep-source-audio` | Keep source audio from `clips`? | `false` | | +| `clipsAudioVolume` | | Volume of audio from `clips` relative to `audioTracks`. See [audio tracks](#arbitrary-audio-tracks). | `1` | | +| `outputVolume` | `--output-volume` | Adjust output [volume](http://ffmpeg.org/ffmpeg-filters.html#volume) (final stage). See [example](https://github.com/mifi/editly/blob/master/examples/audio-volume.json5) | `1` | e.g. `0.5` or `10dB` | +| `audioNorm.enable` | | Enable audio normalization? See [audio normalization](#audio-normalization). | `false` | | +| `audioNorm.gaussSize` | | Audio normalization gauss size. See [audio normalization](#audio-normalization). | `5` | | +| `audioNorm.maxGain` | | Audio normalization max gain. See [audio normalization](#audio-normalization). | `30` | | ### Transition types @@ -218,31 +218,30 @@ See [examples](https://github.com/mifi/editly/tree/master/examples) and [commonF For video layers, if parent `clip.duration` is specified, the video will be slowed/sped-up to match `clip.duration`. If `cutFrom`/`cutTo` is set, the resulting segment (`cutTo`-`cutFrom`) will be slowed/sped-up to fit `clip.duration`. If the layer has audio, it will be kept (and mixed with other audio layers if present.) -| Parameter | Description | Default | | -|-|-|-|-| -| `path` | Path to video file | | | -| `resizeMode` | See [Resize modes](#resize-modes) | | | -| `cutFrom` | Time value to cut from | `0` | sec | -| `cutTo` | Time value to cut to | *end of video* | sec | -| `width` | Width relative to screen width | `1` | `0` to `1` | -| `height` | Height relative to screen height | `1` | `0` to `1` | -| `left` | X-position relative to screen width | `0` | `0` to `1` | -| `top` | Y-position relative to screen height | `0` | `0` to `1` | -| `originX` | X anchor | `left` | `left` or `right` | -| `originY` | Y anchor | `top` | `top` or `bottom` | -| `mixVolume` | Relative volume when mixing this video's audio track with others | `1` | | - +| Parameter | Description | Default | | +| ------------ | ---------------------------------------------------------------- | -------------- | ----------------- | +| `path` | Path to video file | | | +| `resizeMode` | See [Resize modes](#resize-modes) | | | +| `cutFrom` | Time value to cut from | `0` | sec | +| `cutTo` | Time value to cut to | _end of video_ | sec | +| `width` | Width relative to screen width | `1` | `0` to `1` | +| `height` | Height relative to screen height | `1` | `0` to `1` | +| `left` | X-position relative to screen width | `0` | `0` to `1` | +| `top` | Y-position relative to screen height | `0` | `0` to `1` | +| `originX` | X anchor | `left` | `left` or `right` | +| `originY` | Y anchor | `top` | `top` or `bottom` | +| `mixVolume` | Relative volume when mixing this video's audio track with others | `1` | | #### Layer type 'audio' Audio layers will be mixed together. If `cutFrom`/`cutTo` is set, the resulting segment (`cutTo`-`cutFrom`) will be slowed/sped-up to fit `clip.duration`. The slow down/speed-up operation is limited to values between `0.5x` and `100x`. -| Parameter | Description | Default | | -|-|-|-|-| -| `path` | Path to audio file | | | -| `cutFrom` | Time value to cut from | `0` | sec | -| `cutTo` | Time value to cut to | `clip.duration` | sec | -| `mixVolume` | Relative volume when mixing this audio track with others | `1` | | +| Parameter | Description | Default | | +| ----------- | -------------------------------------------------------- | --------------- | --- | +| `path` | Path to audio file | | | +| `cutFrom` | Time value to cut from | `0` | sec | +| `cutTo` | Time value to cut to | `clip.duration` | sec | +| `mixVolume` | Relative volume when mixing this audio track with others | `1` | | #### Layer type 'detached-audio' @@ -256,10 +255,10 @@ This is a special case of `audioTracks` that makes it easier to start the audio Full screen image -| Parameter | Description | Default | | -|-|-|-|-| -| `path` | Path to image file | | | -| `resizeMode` | See [Resize modes](#resize-modes) | | | +| Parameter | Description | Default | | +| ------------ | --------------------------------- | ------- | --- | +| `path` | Path to image file | | | +| `resizeMode` | See [Resize modes](#resize-modes) | | | See also See [Ken Burns parameters](#ken-burns-parameters). @@ -267,16 +266,17 @@ See also See [Ken Burns parameters](#ken-burns-parameters). Image overlay with a custom position and size on the screen. NOTE: If you want to use animated GIFs use `video` instead. -| Parameter | Description | Default | | -|-|-|-|-| -| `path` | Path to image file | | | -| `position` | See [Position parameter](#position-parameter) | | | -| `width` | Width (from 0 to 1) where 1 is screen width | | | -| `height` | Height (from 0 to 1) where 1 is screen height | | | +| Parameter | Description | Default | | +| ---------- | --------------------------------------------- | ------- | --- | +| `path` | Path to image file | | | +| `position` | See [Position parameter](#position-parameter) | | | +| `width` | Width (from 0 to 1) where 1 is screen width | | | +| `height` | Height (from 0 to 1) where 1 is screen height | | | See also [Ken Burns parameters](#ken-burns-parameters). #### Layer type 'title' + - `fontPath` - See `defaults.layer.fontPath` - `text` - Title text to show, keep it short - `textColor` - default `#ffffff` @@ -285,6 +285,7 @@ See also [Ken Burns parameters](#ken-burns-parameters). See also [Ken Burns parameters](#ken-burns-parameters) #### Layer type 'subtitle' + - `fontPath` - See `defaults.layer.fontPath` - `text` - Subtitle text to show - `textColor` - default `#ffffff` @@ -299,6 +300,7 @@ Title with background - `fontPath` - See type `title` #### Layer type 'news-title' + - `fontPath` - See `defaults.layer.fontPath` - `text` - `textColor` - default `#ffffff` @@ -306,6 +308,7 @@ Title with background - `position` - See [Position parameter](#position-parameter) #### Layer type 'slide-in-text' + - `fontPath` - See `defaults.layer.fontPath` - `text` - `fontSize` @@ -314,12 +317,15 @@ Title with background - `position` - See [Position parameter](#position-parameter) #### Layer type 'fill-color', 'pause' + - `color` - Color to fill background, default: randomize #### Layer type 'radial-gradient' + - `colors` - Array of two colors, default: randomize #### Layer type 'linear-gradient' + - `colors` - Array of two colors, default: randomize #### Layer type 'rainbow-colors' @@ -349,13 +355,13 @@ Loads a GLSL shader. See [gl.json5](examples/gl.json5) and [rainbow-colors.frag] `audioTracks` property can optionally contain a list of objects which specify audio tracks that can be started at arbitrary times in the final video. These tracks will be mixed together (`mixVolume` specifying a relative number for how loud each track is compared to the other tracks). Because audio from `clips` will be mixed separately from `audioTracks`, `clipsAudioVolume` specifies the volume of the **combined** audio from `clips` relative to the volume of **each of the audio tracks** from `audioTracks`. -| Parameter | Description | Default | | -|-|-|-|-| -| `audioTracks[].path` | File path for this track | | | -| `audioTracks[].mixVolume` | Relative volume for this track | `1` | | -| `audioTracks[].cutFrom` | Time value to cut source file **from** | `0` | sec | -| `audioTracks[].cutTo` | Time value to cut source file **to** | | sec | -| `audioTracks[].start` | How many seconds into video to start this audio track | `0` | sec | +| Parameter | Description | Default | | +| ------------------------- | ----------------------------------------------------- | ------- | --- | +| `audioTracks[].path` | File path for this track | | | +| `audioTracks[].mixVolume` | Relative volume for this track | `1` | | +| `audioTracks[].cutFrom` | Time value to cut source file **from** | `0` | sec | +| `audioTracks[].cutTo` | Time value to cut source file **to** | | sec | +| `audioTracks[].start` | How many seconds into video to start this audio track | `0` | sec | The difference between `audioTracks` and **Layer type 'audio'** is that `audioTracks` will continue to play across multiple `clips` and can start and stop whenever needed. @@ -374,6 +380,7 @@ You can enable audio normalization of the final output audio. This is useful if ### Resize modes `resizeMode` - How to fit image to screen. Can be one of: + - `contain` - All the video will be contained within the frame and letterboxed - `contain-blur` - Like `contain`, but with a blurred copy as the letterbox - `cover` - Video be cropped to cover the whole screen (aspect ratio preserved) @@ -382,6 +389,7 @@ You can enable audio normalization of the final output audio. This is useful if Default `contain-blur`. See: + - [image.json5](examples/image.json5) - [videos.json5](examples/videos.json5) @@ -390,17 +398,18 @@ See: Certain layers support the position parameter `position` can be one of either: - - `top`, `bottom` `center`, `top-left`, `top-right`, `center-left`, `center-right`, `bottom-left`, `bottom-right` - - An object `{ x, y, originX = 'left', originY = 'top' }`, where `{ x: 0, y: 0 }` is the upper left corner of the screen, and `{ x: 1, y: 1 }` is the lower right corner, `x` is relative to video width, `y` to video height. `originX` and `originY` are optional, and specify the position's origin (anchor position) of the object. + +- `top`, `bottom` `center`, `top-left`, `top-right`, `center-left`, `center-right`, `bottom-left`, `bottom-right` +- An object `{ x, y, originX = 'left', originY = 'top' }`, where `{ x: 0, y: 0 }` is the upper left corner of the screen, and `{ x: 1, y: 1 }` is the lower right corner, `x` is relative to video width, `y` to video height. `originX` and `originY` are optional, and specify the position's origin (anchor position) of the object. See [position.json5](examples/position.json5) ### Ken Burns parameters -| Parameter | Description | Default | | -|-|-|-|-| -| `zoomDirection` | Zoom direction for Ken Burns effect: `in`, `out`, `left`, `right` or `null` to disable | | | -| `zoomAmount` | Zoom amount for Ken Burns effect | `0.1` | | +| Parameter | Description | Default | | +| --------------- | -------------------------------------------------------------------------------------- | ------- | --- | +| `zoomDirection` | Zoom direction for Ken Burns effect: `in`, `out`, `left`, `right` or `null` to disable | | | +| `zoomAmount` | Zoom amount for Ken Burns effect | `0.1` | | ## Docker diff --git a/eslint.config.mjs b/eslint.config.mjs index b3749a7e..584570fb 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -1,10 +1,12 @@ // @ts-check -import eslint from '@eslint/js'; -import tseslint from 'typescript-eslint'; +import eslint from "@eslint/js"; +import eslintConfigPrettier from "eslint-config-prettier"; +import tseslint from "typescript-eslint"; export default tseslint.config( eslint.configs.recommended, tseslint.configs.recommended, + eslintConfigPrettier, { ignores: ["dist/"] }, ); diff --git a/examples/README.md b/examples/README.md index 667c17d6..dc035af1 100644 --- a/examples/README.md +++ b/examples/README.md @@ -63,7 +63,6 @@ git clone https://github.com/mifi/editly-assets.git assets - [audio1.json5](https://github.com/mifi/editly/blob/master/examples/audio1.json5) - [audio2.json5](https://github.com/mifi/editly/blob/master/examples/audio2.json5) - ## Custom HTML5 canvas Javascript ![](https://github.com/mifi/gifs/raw/master/customCanvas.gif) diff --git a/examples/alpha.json5 b/examples/alpha.json5 index adf004a6..dccf4741 100644 --- a/examples/alpha.json5 +++ b/examples/alpha.json5 @@ -1,14 +1,25 @@ { // enableFfmpegLog: true, - outPath: './alpha.mp4', + outPath: "./alpha.mp4", clips: [ - { duration: 2, layers: [ - { type: 'video', path: './assets/lofoten.mp4', cutFrom: 0.4, cutTo: 2 }, - { type: 'video', path: './assets/dancer1.webm', resizeMode: 'contain', cutFrom: 0, cutTo: 6 }, - ] }, - { layers: [ - { type: 'video', path: './assets/lofoten.mp4', cutFrom: 0.4, cutTo: 2 }, - { type: 'video', path: './assets/dancer1.webm', resizeMode: 'contain' }, - ] }, + { + duration: 2, + layers: [ + { type: "video", path: "./assets/lofoten.mp4", cutFrom: 0.4, cutTo: 2 }, + { + type: "video", + path: "./assets/dancer1.webm", + resizeMode: "contain", + cutFrom: 0, + cutTo: 6, + }, + ], + }, + { + layers: [ + { type: "video", path: "./assets/lofoten.mp4", cutFrom: 0.4, cutTo: 2 }, + { type: "video", path: "./assets/dancer1.webm", resizeMode: "contain" }, + ], + }, ], } diff --git a/examples/audio-transition.json5 b/examples/audio-transition.json5 index 873683d8..64265c27 100644 --- a/examples/audio-transition.json5 +++ b/examples/audio-transition.json5 @@ -1,48 +1,75 @@ { // enableFfmpegLog: true, - outPath: './audio-transition.mp4', + outPath: "./audio-transition.mp4", keepSourceAudio: true, defaults: { duration: 3, - transition: { duration: 1, name: 'directional' }, - layer: { fontPath: './assets/Patua_One/PatuaOne-Regular.ttf' }, + transition: { duration: 1, name: "directional" }, + layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, }, clips: [ - { layers: [ - { type: 'title-background', text: 'Default transition' }, - { type: 'audio', path: './assets/sample1.m4a' } - ] }, - { transition: { duration: 0.2 }, layers: [ - { type: 'title-background', text: 'Fast transition' }, - { type: 'audio', path: './assets/sample2.m4a' } - ] }, - { transition: { duration: 0 }, layers: [ - { type: 'title-background', text: 'No transition' }, - { type: 'audio', path: './assets/sample1.m4a' } - ] }, - { transition: { audioInCurve: 'exp', audioOutCurve: 'exp' }, layers: [ - { type: 'title-background', text: 'Exp curve' }, - { type: 'audio', path: './assets/sample2.m4a' } - ] }, - { transition: { name: 'dummy' }, layers: [ - { type: 'title-background', text: 'Dummy' }, - { type: 'audio', path: './assets/sample1.m4a' } - ] }, - { transition: { duration: 2 }, layers: [ - { type: 'title-background', text: 'Too short' }, - { type: 'audio', path: './assets/sample2.m4a' } - ] }, - { duration: 1, transition: { duration: 2 }, layers: [ - { type: 'title-background', text: 'Too short' }, - { type: 'audio', path: './assets/sample2.m4a' } - ] }, - { duration: 1, transition: { duration: 2 }, layers: [ - { type: 'title-background', text: 'Too short' }, - { type: 'audio', path: './assets/sample2.m4a' } - ] }, - { layers: [ - { type: 'title-background', text: 'THE END' }, - { type: 'audio', path: './assets/sample2.m4a' } - ] }, + { + layers: [ + { type: "title-background", text: "Default transition" }, + { type: "audio", path: "./assets/sample1.m4a" }, + ], + }, + { + transition: { duration: 0.2 }, + layers: [ + { type: "title-background", text: "Fast transition" }, + { type: "audio", path: "./assets/sample2.m4a" }, + ], + }, + { + transition: { duration: 0 }, + layers: [ + { type: "title-background", text: "No transition" }, + { type: "audio", path: "./assets/sample1.m4a" }, + ], + }, + { + transition: { audioInCurve: "exp", audioOutCurve: "exp" }, + layers: [ + { type: "title-background", text: "Exp curve" }, + { type: "audio", path: "./assets/sample2.m4a" }, + ], + }, + { + transition: { name: "dummy" }, + layers: [ + { type: "title-background", text: "Dummy" }, + { type: "audio", path: "./assets/sample1.m4a" }, + ], + }, + { + transition: { duration: 2 }, + layers: [ + { type: "title-background", text: "Too short" }, + { type: "audio", path: "./assets/sample2.m4a" }, + ], + }, + { + duration: 1, + transition: { duration: 2 }, + layers: [ + { type: "title-background", text: "Too short" }, + { type: "audio", path: "./assets/sample2.m4a" }, + ], + }, + { + duration: 1, + transition: { duration: 2 }, + layers: [ + { type: "title-background", text: "Too short" }, + { type: "audio", path: "./assets/sample2.m4a" }, + ], + }, + { + layers: [ + { type: "title-background", text: "THE END" }, + { type: "audio", path: "./assets/sample2.m4a" }, + ], + }, ], } diff --git a/examples/audio-volume.json5 b/examples/audio-volume.json5 index 032d5267..61288a54 100644 --- a/examples/audio-volume.json5 +++ b/examples/audio-volume.json5 @@ -1,11 +1,13 @@ { - outPath: './audio-volume.mp4', - width: 200, height: 200, - clips: [ - { duration: 2, layers: [{ type: 'title-background', text: 'Audio output volume' }] }, - ], + outPath: "./audio-volume.mp4", + width: 200, + height: 200, + clips: [{ duration: 2, layers: [{ type: "title-background", text: "Audio output volume" }] }], audioTracks: [ - { path: './assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a', cutFrom: 18 }, + { + path: "./assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a", + cutFrom: 18, + }, ], - outputVolume: '-10dB', -} \ No newline at end of file + outputVolume: "-10dB", +} diff --git a/examples/audio1.json5 b/examples/audio1.json5 index 40c392b6..f97ada2c 100644 --- a/examples/audio1.json5 +++ b/examples/audio1.json5 @@ -1,26 +1,57 @@ { // enableFfmpegLog: true, - outPath: './audio1.mp4', + outPath: "./audio1.mp4", keepSourceAudio: true, defaults: { transition: null, - layer: { fontPath: './assets/Patua_One/PatuaOne-Regular.ttf' }, + layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, }, clips: [ - { duration: 0.5, layers: [{ type: 'video', path: './assets/lofoten.mp4', cutFrom: 0.4, cutTo: 2 }] }, + { + duration: 0.5, + layers: [{ type: "video", path: "./assets/lofoten.mp4", cutFrom: 0.4, cutTo: 2 }], + }, - { layers: [ - { type: 'title-background', text: 'test' }, - { type: 'audio', path: './assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a', cutFrom: 2, cutTo: 5 }] }, + { + layers: [ + { type: "title-background", text: "test" }, + { + type: "audio", + path: "./assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a", + cutFrom: 2, + cutTo: 5, + }, + ], + }, - { layers: [ - { type: 'video', path: './assets/lofoten.mp4', cutFrom: 0, cutTo: 2, mixVolume: 0 }, - { type: 'audio', path: './assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a', mixVolume: 0.1 }] }, + { + layers: [ + { type: "video", path: "./assets/lofoten.mp4", cutFrom: 0, cutTo: 2, mixVolume: 0 }, + { + type: "audio", + path: "./assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a", + mixVolume: 0.1, + }, + ], + }, - { duration: 2, layers: [ - { type: 'video', path: './assets/lofoten.mp4', cutFrom: 0.4, cutTo: 2 }, - { type: 'audio', path: './assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a', cutFrom: 2, cutTo: 3, mixVolume: 0.5 }] }, + { + duration: 2, + layers: [ + { type: "video", path: "./assets/lofoten.mp4", cutFrom: 0.4, cutTo: 2 }, + { + type: "audio", + path: "./assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a", + cutFrom: 2, + cutTo: 3, + mixVolume: 0.5, + }, + ], + }, - { duration: 1.8, layers: [{ type: 'video', path: './assets/lofoten.mp4', cutFrom: 1, cutTo: 2 }] }, + { + duration: 1.8, + layers: [{ type: "video", path: "./assets/lofoten.mp4", cutFrom: 1, cutTo: 2 }], + }, ], } diff --git a/examples/audio2.json5 b/examples/audio2.json5 index bd8613cc..f61fae24 100644 --- a/examples/audio2.json5 +++ b/examples/audio2.json5 @@ -1,19 +1,23 @@ { // enableFfmpegLog: true, - outPath: './audio2.mp4', - width: 200, height: 200, + outPath: "./audio2.mp4", + width: 200, + height: 200, defaults: { - layer: { fontPath: './assets/Patua_One/PatuaOne-Regular.ttf' }, + layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, }, clips: [ - { layers: [{ type: 'video', path: './assets/lofoten.mp4', cutFrom: 1, cutTo: 2 }] }, - { duration: 15, layers: { type: 'title-background', text: 'Audio track' } }, + { layers: [{ type: "video", path: "./assets/lofoten.mp4", cutFrom: 1, cutTo: 2 }] }, + { duration: 15, layers: { type: "title-background", text: "Audio track" } }, ], audioNorm: { enable: true, gaussSize: 3, maxGain: 100 }, clipsAudioVolume: 50, audioTracks: [ - { path: './assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a', cutFrom: 18 }, - { path: './assets/winxp.mp3', mixVolume: 10, cutFrom: 1, cutTo: 2, start: 2 }, - { path: './assets/Julen_ribas.m4a', mixVolume: 50, cutTo: 7, start: 5 }, + { + path: "./assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a", + cutFrom: 18, + }, + { path: "./assets/winxp.mp3", mixVolume: 10, cutFrom: 1, cutTo: 2, start: 2 }, + { path: "./assets/Julen_ribas.m4a", mixVolume: 50, cutTo: 7, start: 5 }, ], } diff --git a/examples/audio3.json5 b/examples/audio3.json5 index 600ec410..d1269439 100644 --- a/examples/audio3.json5 +++ b/examples/audio3.json5 @@ -1,18 +1,45 @@ { - outPath: './audio3.mp4', - width: 200, height: 200, + outPath: "./audio3.mp4", + width: 200, + height: 200, defaults: { - layer: { fontPath: './assets/Patua_One/PatuaOne-Regular.ttf' }, + layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, }, clips: [ - { layers: [{ type: 'video', path: './assets/lofoten.mp4', cutTo: 2 }, { type: 'title', text: 'Arbitrary audio' }] }, - { duration: 3, layers: [{ type: 'title-background', text: 'Voice starts in 1 sec' }, { type: 'detached-audio', path: './assets/Julen_ribas.m4a', mixVolume: 50, cutFrom: 2, start: 1 }] }, - { duration: 1, layers: [{ type: 'title-background', text: 'Voice continues over clip 2' }] }, - { duration: 3, layers: [{ type: 'title-background', text: 'Voice continues over clip 3' }] }, - { duration: 2, layers: [{ type: 'title-background', text: 'XP sound starts' }, { type: 'detached-audio', path: './assets/winxp.mp3', mixVolume: 10, cutFrom: 0.5 }] }, + { + layers: [ + { type: "video", path: "./assets/lofoten.mp4", cutTo: 2 }, + { type: "title", text: "Arbitrary audio" }, + ], + }, + { + duration: 3, + layers: [ + { type: "title-background", text: "Voice starts in 1 sec" }, + { + type: "detached-audio", + path: "./assets/Julen_ribas.m4a", + mixVolume: 50, + cutFrom: 2, + start: 1, + }, + ], + }, + { duration: 1, layers: [{ type: "title-background", text: "Voice continues over clip 2" }] }, + { duration: 3, layers: [{ type: "title-background", text: "Voice continues over clip 3" }] }, + { + duration: 2, + layers: [ + { type: "title-background", text: "XP sound starts" }, + { type: "detached-audio", path: "./assets/winxp.mp3", mixVolume: 10, cutFrom: 0.5 }, + ], + }, ], audioNorm: { enable: true, gaussSize: 3, maxGain: 100 }, audioTracks: [ - { path: './assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a', cutFrom: 18 }, + { + path: "./assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a", + cutFrom: 18, + }, ], } diff --git a/examples/audioLoop.json5 b/examples/audioLoop.json5 index 158868fb..d7bd4a98 100644 --- a/examples/audioLoop.json5 +++ b/examples/audioLoop.json5 @@ -1,14 +1,13 @@ { - outPath: './audioLoop.mp4', - width: 200, height: 200, - audioFilePath: './assets/winxp.mp3', + outPath: "./audioLoop.mp4", + width: 200, + height: 200, + audioFilePath: "./assets/winxp.mp3", loopAudio: true, // Should properly cut off and not crash with EPIPE if loopAudio=false and audio duration is shorter than total duration // loopAudio: false, defaults: { - layer: { fontPath: './assets/Patua_One/PatuaOne-Regular.ttf' }, + layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, }, - clips: [ - { duration: 10, layers: [{ type: 'title-background', text: 'Looping audio!' }] }, - ], + clips: [{ duration: 10, layers: [{ type: "title-background", text: "Looping audio!" }] }], } diff --git a/examples/commonFeatures.json5 b/examples/commonFeatures.json5 index cfc28f5c..382f341b 100644 --- a/examples/commonFeatures.json5 +++ b/examples/commonFeatures.json5 @@ -1,47 +1,162 @@ { // width: 2166, height: 1650, fps: 30, - width: 720, height: 1280, fps: 30, - outPath: './commonFeatures.mp4', + width: 720, + height: 1280, + fps: 30, + outPath: "./commonFeatures.mp4", // outPath: './commonFeatures.gif', - audioFilePath: './assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a', + audioFilePath: "./assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a", defaults: { - transition: { name: 'random' }, - layer: { fontPath: './assets/Patua_One/PatuaOne-Regular.ttf' }, + transition: { name: "random" }, + layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, }, clips: [ - { duration: 3, transition: { name: 'directional-left' }, layers: [{ type: 'title-background', text: 'EDITLY\nVideo editing framework', background: { type: 'linear-gradient', colors: ['#02aab0', '#00cdac'] } }] }, - { duration: 4, transition: { name: 'dreamyzoom' }, layers: [{ type: 'title-background', text: 'Multi-line text with animated linear or radial gradients', background: { type: 'radial-gradient' } }] }, - { duration: 3, transition: { name: 'directional-right' }, layers: [{ type: 'rainbow-colors' }, { type: 'title', text: 'Colorful backgrounds' }] }, - { duration: 3, layers: [{ type: 'pause' }, { type: 'title', text: 'and separators' }] }, + { + duration: 3, + transition: { name: "directional-left" }, + layers: [ + { + type: "title-background", + text: "EDITLY\nVideo editing framework", + background: { type: "linear-gradient", colors: ["#02aab0", "#00cdac"] }, + }, + ], + }, + { + duration: 4, + transition: { name: "dreamyzoom" }, + layers: [ + { + type: "title-background", + text: "Multi-line text with animated linear or radial gradients", + background: { type: "radial-gradient" }, + }, + ], + }, + { + duration: 3, + transition: { name: "directional-right" }, + layers: [{ type: "rainbow-colors" }, { type: "title", text: "Colorful backgrounds" }], + }, + { duration: 3, layers: [{ type: "pause" }, { type: "title", text: "and separators" }] }, - { duration: 3, transition: { name: 'fadegrayscale' }, layers: [{ type: 'title-background', text: 'Image slideshows with Ken Burns effect', background: { type: 'linear-gradient' } }] }, - { duration: 2.5, transition: { name: 'directionalWarp' }, layers: [{ type: 'image', path: './assets/vertical.jpg', zoomDirection: 'out' }] }, - { duration: 3, transition: { name: 'dreamyzoom' }, layers: [{ type: 'image', path: './assets/img1.jpg', duration: 2.5, zoomDirection: 'in' }, { type: 'subtitle', text: 'Indonesia has many spectacular locations. Here is the volcano Kelimutu, which has three lakes in its core, some days with three different colors!' }, { type: 'title', position: 'top', text: 'With text' }] }, - { duration: 3, transition: { name: 'colorphase' }, layers: [{ type: 'image', path: './assets/img2.jpg', zoomDirection: 'out' }, { type: 'subtitle', text: 'Komodo national park is the only home of the endangered Komodo dragons' }] }, - { duration: 2.5, transition: { name: 'simplezoom' }, layers: [{ type: 'image', path: './assets/img3.jpg', zoomDirection: 'in' }] }, + { + duration: 3, + transition: { name: "fadegrayscale" }, + layers: [ + { + type: "title-background", + text: "Image slideshows with Ken Burns effect", + background: { type: "linear-gradient" }, + }, + ], + }, + { + duration: 2.5, + transition: { name: "directionalWarp" }, + layers: [{ type: "image", path: "./assets/vertical.jpg", zoomDirection: "out" }], + }, + { + duration: 3, + transition: { name: "dreamyzoom" }, + layers: [ + { type: "image", path: "./assets/img1.jpg", duration: 2.5, zoomDirection: "in" }, + { + type: "subtitle", + text: "Indonesia has many spectacular locations. Here is the volcano Kelimutu, which has three lakes in its core, some days with three different colors!", + }, + { type: "title", position: "top", text: "With text" }, + ], + }, + { + duration: 3, + transition: { name: "colorphase" }, + layers: [ + { type: "image", path: "./assets/img2.jpg", zoomDirection: "out" }, + { + type: "subtitle", + text: "Komodo national park is the only home of the endangered Komodo dragons", + }, + ], + }, + { + duration: 2.5, + transition: { name: "simplezoom" }, + layers: [{ type: "image", path: "./assets/img3.jpg", zoomDirection: "in" }], + }, - { duration: 1.5, transition: { name: 'crosszoom', duration: 0.3 }, layers: [{ type: 'video', path: 'assets/kohlipe1.mp4', cutTo: 58 }, { type: 'title', text: 'Videos' }] }, - { duration: 3, transition: { name: 'fade' }, layers: [{ type: 'video', path: 'assets/kohlipe1.mp4', cutFrom: 58 }] }, - { transition: { name: 'fade' }, layers: [{ type: 'video', path: 'assets/kohlipe2.mp4', cutTo: 2.5 }] }, - { duration: 1.5, layers: [{ type: 'video', path: 'assets/kohlipe3.mp4', cutFrom: 3, cutTo: 30 }] }, + { + duration: 1.5, + transition: { name: "crosszoom", duration: 0.3 }, + layers: [ + { type: "video", path: "assets/kohlipe1.mp4", cutTo: 58 }, + { type: "title", text: "Videos" }, + ], + }, + { + duration: 3, + transition: { name: "fade" }, + layers: [{ type: "video", path: "assets/kohlipe1.mp4", cutFrom: 58 }], + }, + { + transition: { name: "fade" }, + layers: [{ type: "video", path: "assets/kohlipe2.mp4", cutTo: 2.5 }], + }, + { + duration: 1.5, + layers: [{ type: "video", path: "assets/kohlipe3.mp4", cutFrom: 3, cutTo: 30 }], + }, - { duration: 3, transition: { name: 'crosszoom' }, layers: [{ type: 'gl', fragmentPath: './assets/shaders/3l23Rh.frag' }, { type: 'title', text: 'OpenGL\nshaders' }] }, - { duration: 3, layers: [{ type: 'gl', fragmentPath: './assets/shaders/MdXyzX.frag' }] }, - { duration: 3, layers: [{ type: 'gl', fragmentPath: './assets/shaders/30daysofshade_010.frag' }] }, - { duration: 3, layers: [{ type: 'gl', fragmentPath: './assets/shaders/wd2yDm.frag', speed: 5 }] }, + { + duration: 3, + transition: { name: "crosszoom" }, + layers: [ + { type: "gl", fragmentPath: "./assets/shaders/3l23Rh.frag" }, + { type: "title", text: "OpenGL\nshaders" }, + ], + }, + { duration: 3, layers: [{ type: "gl", fragmentPath: "./assets/shaders/MdXyzX.frag" }] }, + { + duration: 3, + layers: [{ type: "gl", fragmentPath: "./assets/shaders/30daysofshade_010.frag" }], + }, + { + duration: 3, + layers: [{ type: "gl", fragmentPath: "./assets/shaders/wd2yDm.frag", speed: 5 }], + }, - { duration: 3, layers: [ - { type: 'image', path: './assets/91083241_573589476840991_4224678072281051330_n.jpg' }, - { type: 'news-title', text: 'BREAKING NEWS' }, - { type: 'subtitle', text: 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.', backgroundColor: 'rgba(0,0,0,0.5)' } - ] }, + { + duration: 3, + layers: [ + { type: "image", path: "./assets/91083241_573589476840991_4224678072281051330_n.jpg" }, + { type: "news-title", text: "BREAKING NEWS" }, + { + type: "subtitle", + text: "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.", + backgroundColor: "rgba(0,0,0,0.5)", + }, + ], + }, - { duration: 3, layers: [ - { type: 'rainbow-colors' }, - { type: 'video', path: './assets/tungestolen.mp4', resizeMode: 'contain', width: 0.4, height: 0.4, top: 0.05, left: 0.95, originY: 'top', originX: 'right' }, - { type: 'title', position: 'bottom', text: 'Picture-in-Picture' } - ] }, + { + duration: 3, + layers: [ + { type: "rainbow-colors" }, + { + type: "video", + path: "./assets/tungestolen.mp4", + resizeMode: "contain", + width: 0.4, + height: 0.4, + top: 0.05, + left: 0.95, + originY: "top", + originX: "right", + }, + { type: "title", position: "bottom", text: "Picture-in-Picture" }, + ], + }, - { duration: 3, layers: [{ type: 'editly-banner' }] }, + { duration: 3, layers: [{ type: "editly-banner" }] }, ], } diff --git a/examples/contain-blur.json5 b/examples/contain-blur.json5 index 13e2d552..6c4ed10f 100644 --- a/examples/contain-blur.json5 +++ b/examples/contain-blur.json5 @@ -1,11 +1,19 @@ { - width: 3000, height: 2000, fps: 15, - outPath: './contain-blur.mp4', + width: 3000, + height: 2000, + fps: 15, + outPath: "./contain-blur.mp4", defaults: { transition: null, }, clips: [ - { duration: 0.3, layers: [{ type: 'image', path: './assets/vertical.jpg', zoomDirection: null }] }, - { duration: 0.5, layers: [{ type: 'video', path: './assets/IMG_1884.MOV', cutFrom: 0, cutTo: 2 }] }, + { + duration: 0.3, + layers: [{ type: "image", path: "./assets/vertical.jpg", zoomDirection: null }], + }, + { + duration: 0.5, + layers: [{ type: "video", path: "./assets/IMG_1884.MOV", cutFrom: 0, cutTo: 2 }], + }, ], -} \ No newline at end of file +} diff --git a/examples/customCanvas.ts b/examples/customCanvas.ts index 6ed0e9a9..b83a030d 100644 --- a/examples/customCanvas.ts +++ b/examples/customCanvas.ts @@ -1,40 +1,37 @@ -import editly from 'editly'; -import type { CustomCanvasFunctionArgs, CustomCanvasFunctionCallbacks } from 'editly'; +import type { CustomCanvasFunctionArgs, CustomCanvasFunctionCallbacks } from "editly"; +import editly from "editly"; function func({ canvas }: CustomCanvasFunctionArgs): CustomCanvasFunctionCallbacks { return { async onRender(progress) { - const context = canvas.getContext('2d'); + const context = canvas.getContext("2d"); const centerX = canvas.width / 2; const centerY = canvas.height / 2; const radius = 40 * (1 + progress * 0.5); context.beginPath(); context.arc(centerX, centerY, radius, 0, 2 * Math.PI, false); - context.fillStyle = 'hsl(350, 100%, 37%)'; + context.fillStyle = "hsl(350, 100%, 37%)"; context.fill(); context.lineWidth = 5; - context.strokeStyle = '#ffffff'; + context.strokeStyle = "#ffffff"; context.stroke(); }, onClose() { // Cleanup if you initialized anything - } + }, }; } await editly({ // fast: true, // outPath: './customCanvas.mp4', - outPath: './customCanvas.gif', + outPath: "./customCanvas.gif", clips: [ { duration: 2, - layers: [ - { type: 'rainbow-colors' }, - { type: 'canvas', func }, - ] + layers: [{ type: "rainbow-colors" }, { type: "canvas", func }], }, ], }); diff --git a/examples/customFabric.ts b/examples/customFabric.ts index 58228cfc..b01117f3 100644 --- a/examples/customFabric.ts +++ b/examples/customFabric.ts @@ -1,23 +1,23 @@ -import editly from 'editly'; -import type { CustomFabricFunctionArgs, CustomFabricFunctionCallbacks } from 'editly'; -import { registerFont } from 'canvas'; +import { registerFont } from "canvas"; +import type { CustomFabricFunctionArgs, CustomFabricFunctionCallbacks } from "editly"; +import editly from "editly"; -registerFont('./assets/Patua_One/PatuaOne-Regular.ttf', { family: 'Patua One' }); +registerFont("./assets/Patua_One/PatuaOne-Regular.ttf", { family: "Patua One" }); function func({ width, height, fabric }: CustomFabricFunctionArgs): CustomFabricFunctionCallbacks { return { async onRender(progress, canvas) { - canvas.backgroundColor = 'hsl(33, 100%, 50%)'; + canvas.backgroundColor = "hsl(33, 100%, 50%)"; const text = new fabric.FabricText(`PROGRESS\n${Math.floor(progress * 100)}%`, { - originX: 'center', - originY: 'center', + originX: "center", + originY: "center", left: width / 2, top: (height / 2) * (1 + (progress * 0.1 - 0.05)), fontSize: 20, - fontFamily: 'Patua One', - textAlign: 'center', - fill: 'white', + fontFamily: "Patua One", + textAlign: "center", + fill: "white", }); canvas.add(text); @@ -25,15 +25,13 @@ function func({ width, height, fabric }: CustomFabricFunctionArgs): CustomFabric onClose() { // Cleanup if you initialized anything - } + }, }; } await editly({ // fast: true, - outPath: './customFabric.gif', + outPath: "./customFabric.gif", // outPath: './customFabric.mp4', - clips: [ - { duration: 2, layers: [{ type: 'fabric', func }] }, - ], + clips: [{ duration: 2, layers: [{ type: "fabric", func }] }], }); diff --git a/examples/customOutputArgs.json5 b/examples/customOutputArgs.json5 index 8c472624..04efd51f 100644 --- a/examples/customOutputArgs.json5 +++ b/examples/customOutputArgs.json5 @@ -1,7 +1,5 @@ { - outPath: './customOutputArgs.webp', - clips: [ - { duration: 2, layers: [{ type: 'title-background', text: 'Custom output args' }] }, - ], - customOutputArgs: ['-compression_level', '5', '-qscale', '60', '-vcodec', 'libwebp'], -} \ No newline at end of file + outPath: "./customOutputArgs.webp", + clips: [{ duration: 2, layers: [{ type: "title-background", text: "Custom output args" }] }], + customOutputArgs: ["-compression_level", "5", "-qscale", "60", "-vcodec", "libwebp"], +} diff --git a/examples/fabricImagePostProcessing.ts b/examples/fabricImagePostProcessing.ts index 2af3dc6c..22f0aa0f 100644 --- a/examples/fabricImagePostProcessing.ts +++ b/examples/fabricImagePostProcessing.ts @@ -1,42 +1,45 @@ -import editly from 'editly'; +import editly from "editly"; // See https://github.com/mifi/editly/pull/222 await editly({ - outPath: './fabricImagePostProcessing.mp4', - clips: [{ - duration: 4, - layers: [ - { type: 'video', path: './assets/kohlipe1.mp4', cutFrom: 0, cutTo: 4 }, - { - type: 'video', - path: './assets/kohlipe2.mp4', - cutFrom: 0, - cutTo: 4, - resizeMode: 'cover', - originX: 'center', - originY: 'center', - left: 0.5, - top: 0.5, - width: 0.5, - height: 0.5, - fabricImagePostProcessing: async ({ image, fabric, canvas }) => { - const circleArgs: ConstructorParameters[0] = { - radius: Math.min(image.width, image.height) * 0.4, - originX: 'center', - originY: 'center', - stroke: 'white', - strokeWidth: 22, - }; - image.set({ clipPath: new fabric.Circle(circleArgs) }); - canvas.add(new fabric.Circle({ - ...circleArgs, - left: image.getCenterPoint().x, - top: image.getCenterPoint().y, - })); + outPath: "./fabricImagePostProcessing.mp4", + clips: [ + { + duration: 4, + layers: [ + { type: "video", path: "./assets/kohlipe1.mp4", cutFrom: 0, cutTo: 4 }, + { + type: "video", + path: "./assets/kohlipe2.mp4", + cutFrom: 0, + cutTo: 4, + resizeMode: "cover", + originX: "center", + originY: "center", + left: 0.5, + top: 0.5, + width: 0.5, + height: 0.5, + fabricImagePostProcessing: async ({ image, fabric, canvas }) => { + const circleArgs: ConstructorParameters[0] = { + radius: Math.min(image.width, image.height) * 0.4, + originX: "center", + originY: "center", + stroke: "white", + strokeWidth: 22, + }; + image.set({ clipPath: new fabric.Circle(circleArgs) }); + canvas.add( + new fabric.Circle({ + ...circleArgs, + left: image.getCenterPoint().x, + top: image.getCenterPoint().y, + }), + ); + }, }, - }, - ] - }, + ], + }, ], }); diff --git a/examples/gl.json5 b/examples/gl.json5 index 9186a21a..6cee1543 100644 --- a/examples/gl.json5 +++ b/examples/gl.json5 @@ -1,10 +1,23 @@ { - outPath: './gl.mp4', + outPath: "./gl.mp4", clips: [ - { transition: null, duration: 3, layers: [{ type: 'gl', fragmentPath: './assets/shaders/3l23Rh.frag' }] }, - { duration: 3, layers: [{ type: 'gl', fragmentPath: './assets/shaders/MdXyzX.frag' }] }, - { duration: 3, layers: [{ type: 'gl', fragmentPath: './assets/shaders/30daysofshade_010.frag', speed: 1 }] }, - { duration: 3, layers: [{ type: 'gl', fragmentPath: './assets/shaders/rainbow-background.frag' }] }, - { duration: 3, layers: [{ type: 'gl', fragmentPath: './assets/shaders/wd2yDm.frag', speed: 5 }] }, + { + transition: null, + duration: 3, + layers: [{ type: "gl", fragmentPath: "./assets/shaders/3l23Rh.frag" }], + }, + { duration: 3, layers: [{ type: "gl", fragmentPath: "./assets/shaders/MdXyzX.frag" }] }, + { + duration: 3, + layers: [{ type: "gl", fragmentPath: "./assets/shaders/30daysofshade_010.frag", speed: 1 }], + }, + { + duration: 3, + layers: [{ type: "gl", fragmentPath: "./assets/shaders/rainbow-background.frag" }], + }, + { + duration: 3, + layers: [{ type: "gl", fragmentPath: "./assets/shaders/wd2yDm.frag", speed: 5 }], + }, ], -} \ No newline at end of file +} diff --git a/examples/gradients.json5 b/examples/gradients.json5 index 676e2cd2..00d5f3ae 100644 --- a/examples/gradients.json5 +++ b/examples/gradients.json5 @@ -1,12 +1,12 @@ { - outPath: './gradients.mp4', + outPath: "./gradients.mp4", defaults: { - transition: { name: 'linearblur', duration: 0.1 }, + transition: { name: "linearblur", duration: 0.1 }, }, clips: [ - { duration: 1, layers: [{ type: 'linear-gradient', colors: ['#02aab0', '#00cdac'] }] }, - { duration: 1, layers: [{ type: 'radial-gradient', colors: ['#b002aa', '#ac00cd'] }] }, - { duration: 1, layers: [{ type: 'linear-gradient' }] }, - { duration: 1, layers: [{ type: 'radial-gradient' }] }, + { duration: 1, layers: [{ type: "linear-gradient", colors: ["#02aab0", "#00cdac"] }] }, + { duration: 1, layers: [{ type: "radial-gradient", colors: ["#b002aa", "#ac00cd"] }] }, + { duration: 1, layers: [{ type: "linear-gradient" }] }, + { duration: 1, layers: [{ type: "radial-gradient" }] }, ], } diff --git a/examples/image.json5 b/examples/image.json5 index d040e26a..44097b98 100644 --- a/examples/image.json5 +++ b/examples/image.json5 @@ -1,19 +1,29 @@ { width: 600, height: 300, - outPath: './image.mp4', + outPath: "./image.mp4", defaults: { transition: null, duration: 0.2, }, clips: [ - { layers: [{ type: 'image', path: './assets/pano.jpg' }] }, - { layers: [{ type: 'image', path: './assets/vertical.jpg' }] }, - { layers: [{ type: 'fill-color', color: 'white' }, { type: 'image', path: './assets/pano.jpg', resizeMode: 'contain' }] }, - { layers: [{ type: 'fill-color', color: 'white' }, { type: 'image', path: './assets/vertical.jpg', resizeMode: 'contain' }] }, - { layers: [{ type: 'image', path: './assets/pano.jpg', resizeMode: 'cover' }] }, - { layers: [{ type: 'image', path: './assets/vertical.jpg', resizeMode: 'cover' }] }, - { layers: [{ type: 'image', path: './assets/pano.jpg', resizeMode: 'stretch' }] }, - { layers: [{ type: 'image', path: './assets/vertical.jpg', resizeMode: 'stretch' }] }, + { layers: [{ type: "image", path: "./assets/pano.jpg" }] }, + { layers: [{ type: "image", path: "./assets/vertical.jpg" }] }, + { + layers: [ + { type: "fill-color", color: "white" }, + { type: "image", path: "./assets/pano.jpg", resizeMode: "contain" }, + ], + }, + { + layers: [ + { type: "fill-color", color: "white" }, + { type: "image", path: "./assets/vertical.jpg", resizeMode: "contain" }, + ], + }, + { layers: [{ type: "image", path: "./assets/pano.jpg", resizeMode: "cover" }] }, + { layers: [{ type: "image", path: "./assets/vertical.jpg", resizeMode: "cover" }] }, + { layers: [{ type: "image", path: "./assets/pano.jpg", resizeMode: "stretch" }] }, + { layers: [{ type: "image", path: "./assets/vertical.jpg", resizeMode: "stretch" }] }, ], } diff --git a/examples/imageOverlay.json5 b/examples/imageOverlay.json5 index f8f7d271..c0cccb39 100644 --- a/examples/imageOverlay.json5 +++ b/examples/imageOverlay.json5 @@ -1,12 +1,33 @@ { - outPath: './imageOverlay.mp4', + outPath: "./imageOverlay.mp4", clips: [ - { layers: [ - { type: 'video', path: './assets/changi.mp4', cutTo: 2 }, - { type: 'image-overlay', path: './assets/overlay.svg', width: 0.2, position: { x: 0.95, y: 0.03, originX: 'right' } }, - { type: 'image-overlay', path: './assets/emoji.png', stop: 0.5, zoomDirection: 'in' }, - { type: 'image-overlay', path: './assets/emoji2.svg', position: 'top', start: 0.7, stop: 1.5, width: 0.2 }, - { type: 'image-overlay', path: './assets/emoji2.svg', position: 'bottom', start: 0.7, stop: 1.5, height: 0.2 }, - ] }, + { + layers: [ + { type: "video", path: "./assets/changi.mp4", cutTo: 2 }, + { + type: "image-overlay", + path: "./assets/overlay.svg", + width: 0.2, + position: { x: 0.95, y: 0.03, originX: "right" }, + }, + { type: "image-overlay", path: "./assets/emoji.png", stop: 0.5, zoomDirection: "in" }, + { + type: "image-overlay", + path: "./assets/emoji2.svg", + position: "top", + start: 0.7, + stop: 1.5, + width: 0.2, + }, + { + type: "image-overlay", + path: "./assets/emoji2.svg", + position: "bottom", + start: 0.7, + stop: 1.5, + height: 0.2, + }, + ], + }, ], } diff --git a/examples/kenBurns.json5 b/examples/kenBurns.json5 index e2cc8339..4402744b 100644 --- a/examples/kenBurns.json5 +++ b/examples/kenBurns.json5 @@ -1,11 +1,11 @@ { - outPath: './kenBurns.mp4', + outPath: "./kenBurns.mp4", defaults: { - transition: { name: 'fade' }, + transition: { name: "fade" }, }, clips: [ - { duration: 3, layers: [{ type: 'image', path: './assets/img2.jpg', zoomDirection: 'out' }] }, - { duration: 3, layers: [{ type: 'image', path: './assets/img3.jpg', zoomDirection: 'in' }] }, - { duration: 3, layers: [{ type: 'image', path: './assets/img1.jpg', zoomDirection: null }] }, + { duration: 3, layers: [{ type: "image", path: "./assets/img2.jpg", zoomDirection: "out" }] }, + { duration: 3, layers: [{ type: "image", path: "./assets/img3.jpg", zoomDirection: "in" }] }, + { duration: 3, layers: [{ type: "image", path: "./assets/img1.jpg", zoomDirection: null }] }, ], } diff --git a/examples/mosaic.json5 b/examples/mosaic.json5 index 1ec9b249..c795bd70 100644 --- a/examples/mosaic.json5 +++ b/examples/mosaic.json5 @@ -1,21 +1,67 @@ { // width: 200, height: 500, - width: 500, height: 500, - outPath: './mosaic.mp4', + width: 500, + height: 500, + outPath: "./mosaic.mp4", defaults: { transition: { duration: 0 }, - layer: { fontPath: './assets/Patua_One/PatuaOne-Regular.ttf' }, + layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, layerType: { video: { width: 0.4, height: 0.4 }, - } + }, }, clips: [ - { duration: 2, layers: [ - { type: 'video', path: './assets/palawan.mp4', cutFrom: 0, cutTo: 2, resizeMode: 'cover', top: 0.5, left: 0.5, originY: 'center', originX: 'center' }, - { type: 'video', path: './assets/palawan.mp4', cutFrom: 0, cutTo: 2, resizeMode: 'contain' }, - { type: 'video', path: './assets/palawan.mp4', cutFrom: 0, cutTo: 2, resizeMode: 'contain-blur', left: 1, originX: 'right' }, - { type: 'video', path: './assets/IMG_1884.MOV', cutFrom: 0, cutTo: 2, resizeMode: 'contain-blur', left: 1, top: 1, originX: 'right', originY: 'bottom' }, - { type: 'video', path: './assets/palawan.mp4', cutFrom: 0, cutTo: 2, resizeMode: 'stretch', top: 1, originY: 'bottom' }, - ] }, + { + duration: 2, + layers: [ + { + type: "video", + path: "./assets/palawan.mp4", + cutFrom: 0, + cutTo: 2, + resizeMode: "cover", + top: 0.5, + left: 0.5, + originY: "center", + originX: "center", + }, + { + type: "video", + path: "./assets/palawan.mp4", + cutFrom: 0, + cutTo: 2, + resizeMode: "contain", + }, + { + type: "video", + path: "./assets/palawan.mp4", + cutFrom: 0, + cutTo: 2, + resizeMode: "contain-blur", + left: 1, + originX: "right", + }, + { + type: "video", + path: "./assets/IMG_1884.MOV", + cutFrom: 0, + cutTo: 2, + resizeMode: "contain-blur", + left: 1, + top: 1, + originX: "right", + originY: "bottom", + }, + { + type: "video", + path: "./assets/palawan.mp4", + cutFrom: 0, + cutTo: 2, + resizeMode: "stretch", + top: 1, + originY: "bottom", + }, + ], + }, ], -} \ No newline at end of file +} diff --git a/examples/newsTitle.json5 b/examples/newsTitle.json5 index 36af020d..9cc535ab 100644 --- a/examples/newsTitle.json5 +++ b/examples/newsTitle.json5 @@ -1,15 +1,22 @@ { width: 900, height: 1600, - outPath: './newsTitle.mp4', + outPath: "./newsTitle.mp4", defaults: { - layer: { fontPath: './assets/Patua_One/PatuaOne-Regular.ttf' }, + layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, }, clips: [ - { duration: 10, layers: [ - { type: 'image', path: './assets/91083241_573589476840991_4224678072281051330_n.jpg' }, - { type: 'news-title', text: 'BREAKING NEWS' }, - { type: 'subtitle', text: 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.', backgroundColor: 'rgba(0,0,0,0.5)' } - ] }, + { + duration: 10, + layers: [ + { type: "image", path: "./assets/91083241_573589476840991_4224678072281051330_n.jpg" }, + { type: "news-title", text: "BREAKING NEWS" }, + { + type: "subtitle", + text: "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.", + backgroundColor: "rgba(0,0,0,0.5)", + }, + ], + }, ], -} \ No newline at end of file +} diff --git a/examples/ph.json5 b/examples/ph.json5 index e5cc5093..244051af 100644 --- a/examples/ph.json5 +++ b/examples/ph.json5 @@ -1,18 +1,37 @@ { - width: 240, height: 240, fps: 14, - outPath: './ph.gif', + width: 240, + height: 240, + fps: 14, + outPath: "./ph.gif", defaults: { transition: { duration: 0.4 }, - layer: { fontPath: './assets/Patua_One/PatuaOne-Regular.ttf' }, + layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, }, clips: [ - { duration: 1, transition: { name: 'directionalWarp' }, layers: [{ type: 'image', path: './assets/vertical.jpg', zoomDirection: 'out' }, { type: 'title', text: 'EDITLY' }] }, - { duration: 1.5, transition: { name: 'dreamyzoom' }, layers: [{ type: 'image', path: './assets/img1.jpg', duration: 2.5, zoomDirection: 'in' }, { type: 'title', position: 'bottom', text: 'Video editing API' }] }, + { + duration: 1, + transition: { name: "directionalWarp" }, + layers: [ + { type: "image", path: "./assets/vertical.jpg", zoomDirection: "out" }, + { type: "title", text: "EDITLY" }, + ], + }, + { + duration: 1.5, + transition: { name: "dreamyzoom" }, + layers: [ + { type: "image", path: "./assets/img1.jpg", duration: 2.5, zoomDirection: "in" }, + { type: "title", position: "bottom", text: "Video editing API" }, + ], + }, - { duration: 2, layers: [ - { type: 'image', path: './assets/91083241_573589476840991_4224678072281051330_n.jpg' }, - { type: 'news-title', text: 'EDITLY' }, - { type: 'subtitle', text: 'Get it from npm', backgroundColor: 'rgba(0,0,0,0.5)' } - ] }, + { + duration: 2, + layers: [ + { type: "image", path: "./assets/91083241_573589476840991_4224678072281051330_n.jpg" }, + { type: "news-title", text: "EDITLY" }, + { type: "subtitle", text: "Get it from npm", backgroundColor: "rgba(0,0,0,0.5)" }, + ], + }, ], } diff --git a/examples/pip.json5 b/examples/pip.json5 index 9d0043ac..8820d9b6 100644 --- a/examples/pip.json5 +++ b/examples/pip.json5 @@ -1,15 +1,40 @@ { - outPath: './pip.mp4', - width: 1280, height: 720, fps: 30, + outPath: "./pip.mp4", + width: 1280, + height: 720, + fps: 30, defaults: { - layer: { fontPath: './assets/Patua_One/PatuaOne-Regular.ttf' }, + layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, }, clips: [ - { duration: 4, layers: [ - { type: 'rainbow-colors' }, - { type: 'video', path: './assets/tungestolen.mp4', resizeMode: 'cover', width: 0.3, height: 0.4, top: 0.05, left: 0.95, originY: 'top', originX: 'right' }, - { type: 'video', path: './assets/tungestolen.mp4', resizeMode: 'cover', width: 0.4, height: 0.2, top: 0.05, left: 0.05, originY: 'top', originX: 'left' }, - { type: 'title', position: 'bottom', text: 'Picture-in-Picture' } - ] }, + { + duration: 4, + layers: [ + { type: "rainbow-colors" }, + { + type: "video", + path: "./assets/tungestolen.mp4", + resizeMode: "cover", + width: 0.3, + height: 0.4, + top: 0.05, + left: 0.95, + originY: "top", + originX: "right", + }, + { + type: "video", + path: "./assets/tungestolen.mp4", + resizeMode: "cover", + width: 0.4, + height: 0.2, + top: 0.05, + left: 0.05, + originY: "top", + originX: "left", + }, + { type: "title", position: "bottom", text: "Picture-in-Picture" }, + ], + }, ], -} \ No newline at end of file +} diff --git a/examples/position.json5 b/examples/position.json5 index 3151905b..a6039252 100644 --- a/examples/position.json5 +++ b/examples/position.json5 @@ -1,23 +1,30 @@ { - outPath: './position.mp4', + outPath: "./position.mp4", defaults: { layerType: { - 'image-overlay': { width: 0.1 }, + "image-overlay": { width: 0.1 }, }, }, clips: [ - { layers: [ - { type: 'rainbow-colors' }, - { type: 'image-overlay', path: './assets/emoji2.svg', position: 'top' }, - { type: 'image-overlay', path: './assets/emoji2.svg', position: 'center' }, - { type: 'image-overlay', path: './assets/emoji2.svg', position: 'bottom' }, - { type: 'image-overlay', path: './assets/emoji2.svg', position: 'top-left' }, - { type: 'image-overlay', path: './assets/emoji2.svg', position: 'top-right' }, - { type: 'image-overlay', path: './assets/emoji2.svg', position: 'center-left' }, - { type: 'image-overlay', path: './assets/emoji2.svg', position: 'center-right' }, - { type: 'image-overlay', path: './assets/emoji2.svg', position: 'bottom-left' }, - { type: 'image-overlay', path: './assets/emoji2.svg', position: 'bottom-right' }, - { type: 'image-overlay', path: './assets/emoji.png', width: 0.06, position: { originX: 'center', originY: 'center', x: 0.75, y: 0.75 } }, - ] }, + { + layers: [ + { type: "rainbow-colors" }, + { type: "image-overlay", path: "./assets/emoji2.svg", position: "top" }, + { type: "image-overlay", path: "./assets/emoji2.svg", position: "center" }, + { type: "image-overlay", path: "./assets/emoji2.svg", position: "bottom" }, + { type: "image-overlay", path: "./assets/emoji2.svg", position: "top-left" }, + { type: "image-overlay", path: "./assets/emoji2.svg", position: "top-right" }, + { type: "image-overlay", path: "./assets/emoji2.svg", position: "center-left" }, + { type: "image-overlay", path: "./assets/emoji2.svg", position: "center-right" }, + { type: "image-overlay", path: "./assets/emoji2.svg", position: "bottom-left" }, + { type: "image-overlay", path: "./assets/emoji2.svg", position: "bottom-right" }, + { + type: "image-overlay", + path: "./assets/emoji.png", + width: 0.06, + position: { originX: "center", originY: "center", x: 0.75, y: 0.75 }, + }, + ], + }, ], } diff --git a/examples/remote.json5 b/examples/remote.json5 index 4f8a46b1..f6e55bcf 100644 --- a/examples/remote.json5 +++ b/examples/remote.json5 @@ -1,10 +1,10 @@ { - outPath: './remote.mp4', + outPath: "./remote.mp4", allowRemoteRequests: true, - audioFilePath: './assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a', + audioFilePath: "./assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a", clips: [ - { layers: [{ type: 'image', path: 'https://picsum.photos/400/400' }] }, - { layers: [{ type: 'image', path: 'https://picsum.photos/200/400' }] }, - { layers: [{ type: 'image', path: 'https://picsum.photos/400/200' }] }, + { layers: [{ type: "image", path: "https://picsum.photos/400/400" }] }, + { layers: [{ type: "image", path: "https://picsum.photos/200/400" }] }, + { layers: [{ type: "image", path: "https://picsum.photos/400/200" }] }, ], -} \ No newline at end of file +} diff --git a/examples/renderSingleFrame.ts b/examples/renderSingleFrame.ts index 193b6cf7..e4bf2893 100644 --- a/examples/renderSingleFrame.ts +++ b/examples/renderSingleFrame.ts @@ -1,9 +1,9 @@ -import JSON from 'json5'; -import fsExtra from 'fs-extra'; -import { renderSingleFrame } from 'editly'; +import { renderSingleFrame } from "editly"; +import fsExtra from "fs-extra"; +import JSON from "json5"; await renderSingleFrame({ time: 0, - clips: JSON.parse(await fsExtra.readFile('./videos.json5', 'utf-8')).clips, - outPath: 'renderSingleFrame.png' + clips: JSON.parse(await fsExtra.readFile("./videos.json5", "utf-8")).clips, + outPath: "renderSingleFrame.png", }); diff --git a/examples/single.json5 b/examples/single.json5 index 740bc452..86987b2d 100644 --- a/examples/single.json5 +++ b/examples/single.json5 @@ -1,8 +1,6 @@ { // This is a test of a single clip to make sure that it works - outPath: './single.mp4', + outPath: "./single.mp4", keepSourceAudio: true, - clips: [ - { layers: [{ type: 'video', path: './assets/lofoten.mp4', cutFrom: 0, cutTo: 2 }] }, - ], + clips: [{ layers: [{ type: "video", path: "./assets/lofoten.mp4", cutFrom: 0, cutTo: 2 }] }], } diff --git a/examples/smartFit.json5 b/examples/smartFit.json5 index ba809811..3abfde38 100644 --- a/examples/smartFit.json5 +++ b/examples/smartFit.json5 @@ -1,13 +1,33 @@ { // enableFfmpegLog: true, - outPath: './smartFit.mp4', + outPath: "./smartFit.mp4", defaults: { transition: null, - layer: { backgroundColor: 'white' }, + layer: { backgroundColor: "white" }, }, clips: [ - { layers: [{ type: 'video', path: './assets/changi.mp4', cutFrom: 0.4, cutTo: 2 }] }, - { layers: [{ type: 'video', path: './assets/changi.mp4', cutFrom: 0.4, cutTo: 2, resizeMode: 'contain' }] }, - { layers: [{ type: 'video', path: './assets/changi.mp4', cutFrom: 0.4, cutTo: 2, resizeMode: 'stretch' }] }, + { layers: [{ type: "video", path: "./assets/changi.mp4", cutFrom: 0.4, cutTo: 2 }] }, + { + layers: [ + { + type: "video", + path: "./assets/changi.mp4", + cutFrom: 0.4, + cutTo: 2, + resizeMode: "contain", + }, + ], + }, + { + layers: [ + { + type: "video", + path: "./assets/changi.mp4", + cutFrom: 0.4, + cutTo: 2, + resizeMode: "stretch", + }, + ], + }, ], } diff --git a/examples/speedTest.json5 b/examples/speedTest.json5 index c05bb142..c8e1efb7 100644 --- a/examples/speedTest.json5 +++ b/examples/speedTest.json5 @@ -1,13 +1,40 @@ { - outPath: './speedTest.mp4', + outPath: "./speedTest.mp4", defaults: { transition: null, - layer: { fontPath: './assets/Patua_One/PatuaOne-Regular.ttf' }, + layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, }, clips: [ - { duration: 2, layers: [{ type: 'title-background', text: 'Speed up or slow down video', background: { type: 'radial-gradient' } }] }, - { duration: 2, layers: [{ type: 'video', path: './assets/changi.mp4', cutFrom: 0, cutTo: 2 }, { type: 'title', text: 'Same speed' }] }, - { duration: 1, layers: [{ type: 'video', path: './assets/changi.mp4', cutFrom: 0, cutTo: 4 }, { type: 'title', text: '4x' }] }, - { duration: 2, layers: [{ type: 'video', path: './assets/changi.mp4', cutFrom: 0, cutTo: 1 }, { type: 'title', text: '1/2x' }] }, + { + duration: 2, + layers: [ + { + type: "title-background", + text: "Speed up or slow down video", + background: { type: "radial-gradient" }, + }, + ], + }, + { + duration: 2, + layers: [ + { type: "video", path: "./assets/changi.mp4", cutFrom: 0, cutTo: 2 }, + { type: "title", text: "Same speed" }, + ], + }, + { + duration: 1, + layers: [ + { type: "video", path: "./assets/changi.mp4", cutFrom: 0, cutTo: 4 }, + { type: "title", text: "4x" }, + ], + }, + { + duration: 2, + layers: [ + { type: "video", path: "./assets/changi.mp4", cutFrom: 0, cutTo: 1 }, + { type: "title", text: "1/2x" }, + ], + }, ], -} \ No newline at end of file +} diff --git a/examples/subtitle.json5 b/examples/subtitle.json5 index 5641fe33..c8c77b34 100644 --- a/examples/subtitle.json5 +++ b/examples/subtitle.json5 @@ -1,18 +1,32 @@ { - outPath: './subtitle.mp4', + outPath: "./subtitle.mp4", defaults: { - layer: { fontPath: './assets/Patua_One/PatuaOne-Regular.ttf' }, - layerType: { 'fill-color': { color: '#00aa00' } } + layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, + layerType: { "fill-color": { color: "#00aa00" } }, }, clips: [ - { duration: 2, layers: [ - { type: 'rainbow-colors' }, - { type: 'subtitle', text: 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident.' }, - { type: 'title', position: 'top', text: 'Subtitles' }, - ] }, - { duration: 2, layers: [ - { type: 'fill-color' }, - { type: 'title', position: { x: 0, y: 1, originY: 'bottom' }, text: 'Custom position', zoomDirection: null }, - ] }, + { + duration: 2, + layers: [ + { type: "rainbow-colors" }, + { + type: "subtitle", + text: "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident.", + }, + { type: "title", position: "top", text: "Subtitles" }, + ], + }, + { + duration: 2, + layers: [ + { type: "fill-color" }, + { + type: "title", + position: { x: 0, y: 1, originY: "bottom" }, + text: "Custom position", + zoomDirection: null, + }, + ], + }, ], -} \ No newline at end of file +} diff --git a/examples/timeoutTest.json5 b/examples/timeoutTest.json5 index 8b007864..d82c7ba8 100644 --- a/examples/timeoutTest.json5 +++ b/examples/timeoutTest.json5 @@ -1,7 +1,15 @@ { - outPath: './timeoutTest.mp4', + outPath: "./timeoutTest.mp4", clips: [ - { duration: 1.5, transition: { name: 'crosszoom', duration: 0.3 }, layers: [{ type: 'video', path: './assets/tungestolen.mp4', cutTo: 58 }] }, - { duration: 3, transition: { name: 'fade' }, layers: [{ type: 'video', path: './assets/tungestolen.mp4', cutFrom: 0 }] }, + { + duration: 1.5, + transition: { name: "crosszoom", duration: 0.3 }, + layers: [{ type: "video", path: "./assets/tungestolen.mp4", cutTo: 58 }], + }, + { + duration: 3, + transition: { name: "fade" }, + layers: [{ type: "video", path: "./assets/tungestolen.mp4", cutFrom: 0 }], + }, ], } diff --git a/examples/transitionEasing.json5 b/examples/transitionEasing.json5 index be161874..658fb640 100644 --- a/examples/transitionEasing.json5 +++ b/examples/transitionEasing.json5 @@ -1,13 +1,22 @@ { fast: true, - outPath: './transitionEasing.mp4', + outPath: "./transitionEasing.mp4", defaults: { duration: 2, }, clips: [ - { transition: { name: 'directional', duration: 0.5 }, layers: [{ type: 'video', path: 'assets/changi.mp4', cutTo: 2 }] }, - { transition: { name: 'directional', duration: 0.5, params: { direction: [1, 0] } }, layers: [{ type: 'video', path: 'assets/lofoten.mp4', cutTo: 2 }] }, - { transition: { name: 'directional', duration: 0.5, easing: null }, layers: [{ type: 'video', path: 'assets/lofoten.mp4', cutTo: 2 }] }, - { layers: [{ type: 'pause' }] }, + { + transition: { name: "directional", duration: 0.5 }, + layers: [{ type: "video", path: "assets/changi.mp4", cutTo: 2 }], + }, + { + transition: { name: "directional", duration: 0.5, params: { direction: [1, 0] } }, + layers: [{ type: "video", path: "assets/lofoten.mp4", cutTo: 2 }], + }, + { + transition: { name: "directional", duration: 0.5, easing: null }, + layers: [{ type: "video", path: "assets/lofoten.mp4", cutTo: 2 }], + }, + { layers: [{ type: "pause" }] }, ], } diff --git a/examples/transparentGradient.json5 b/examples/transparentGradient.json5 index e1c9cb90..309c2af1 100644 --- a/examples/transparentGradient.json5 +++ b/examples/transparentGradient.json5 @@ -1,7 +1,13 @@ { // fast: true, - outPath: './transparentGradient.mp4', + outPath: "./transparentGradient.mp4", clips: [ - { duration: 0.1, layers: [{ type: 'fill-color', color: 'green' }, { type: 'linear-gradient', colors: ['#ffffffff', '#ffffff00'] }] }, + { + duration: 0.1, + layers: [ + { type: "fill-color", color: "green" }, + { type: "linear-gradient", colors: ["#ffffffff", "#ffffff00"] }, + ], + }, ], } diff --git a/examples/videos.json5 b/examples/videos.json5 index 560f14cd..e1e86ab6 100644 --- a/examples/videos.json5 +++ b/examples/videos.json5 @@ -1,16 +1,75 @@ { - width: 600, height: 800, - outPath: './videos.mp4', + width: 600, + height: 800, + outPath: "./videos.mp4", defaults: { transition: { duration: 0 }, - layer: { fontPath: './assets/Patua_One/PatuaOne-Regular.ttf' }, + layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, }, clips: [ - { duration: 2, layers: [{ type: 'title-background', text: 'Editly can handle all formats and sizes with different fits', background: { type: 'radial-gradient' } }] }, - { layers: [{ type: 'video', path: './assets/palawan.mp4', cutFrom: 0, cutTo: 2, resizeMode: 'contain' }, { type: 'title', text: 'Contain' }] }, - { layers: [{ type: 'video', path: './assets/palawan.mp4', cutFrom: 0, cutTo: 2, resizeMode: 'contain-blur' }, { type: 'title', text: 'Contain (blur)' }] }, - { layers: [{ type: 'video', path: './assets/IMG_1884.MOV', cutFrom: 0, cutTo: 2, resizeMode: 'contain-blur' }, { type: 'title', text: 'Contain\n(blur, vertical)' }] }, - { layers: [{ type: 'video', path: './assets/palawan.mp4', cutFrom: 0, cutTo: 2, resizeMode: 'stretch' }, { type: 'title', text: 'Stretch' }] }, - { layers: [{ type: 'video', path: './assets/palawan.mp4', cutFrom: 0, cutTo: 2, resizeMode: 'cover' }, { type: 'title', text: 'Cover' }] }, + { + duration: 2, + layers: [ + { + type: "title-background", + text: "Editly can handle all formats and sizes with different fits", + background: { type: "radial-gradient" }, + }, + ], + }, + { + layers: [ + { + type: "video", + path: "./assets/palawan.mp4", + cutFrom: 0, + cutTo: 2, + resizeMode: "contain", + }, + { type: "title", text: "Contain" }, + ], + }, + { + layers: [ + { + type: "video", + path: "./assets/palawan.mp4", + cutFrom: 0, + cutTo: 2, + resizeMode: "contain-blur", + }, + { type: "title", text: "Contain (blur)" }, + ], + }, + { + layers: [ + { + type: "video", + path: "./assets/IMG_1884.MOV", + cutFrom: 0, + cutTo: 2, + resizeMode: "contain-blur", + }, + { type: "title", text: "Contain\n(blur, vertical)" }, + ], + }, + { + layers: [ + { + type: "video", + path: "./assets/palawan.mp4", + cutFrom: 0, + cutTo: 2, + resizeMode: "stretch", + }, + { type: "title", text: "Stretch" }, + ], + }, + { + layers: [ + { type: "video", path: "./assets/palawan.mp4", cutFrom: 0, cutTo: 2, resizeMode: "cover" }, + { type: "title", text: "Cover" }, + ], + }, ], -} \ No newline at end of file +} diff --git a/examples/videos2.json5 b/examples/videos2.json5 index 1e9ff260..9550d313 100644 --- a/examples/videos2.json5 +++ b/examples/videos2.json5 @@ -1,15 +1,20 @@ { // verbose: true, // enableFfmpegLog: true, - outPath: './video2.mp4', + outPath: "./video2.mp4", defaults: { transition: { - name: 'linearblur', + name: "linearblur", }, - layer: { fontPath: './assets/Patua_One/PatuaOne-Regular.ttf' }, + layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, }, clips: [ - { layers: [{ type: 'video', path: './assets/changi.mp4', cutFrom: 0, cutTo: 2 }, { type: 'title', text: 'Video 1' }] }, - { layers: [{ type: 'video', path: './assets/IMG_1884.MOV', cutFrom: 0, cutTo: 2 }] }, + { + layers: [ + { type: "video", path: "./assets/changi.mp4", cutFrom: 0, cutTo: 2 }, + { type: "title", text: "Video 1" }, + ], + }, + { layers: [{ type: "video", path: "./assets/IMG_1884.MOV", cutFrom: 0, cutTo: 2 }] }, ], } diff --git a/examples/vignette.json5 b/examples/vignette.json5 index 28eef8d7..6d210daa 100644 --- a/examples/vignette.json5 +++ b/examples/vignette.json5 @@ -1,9 +1,16 @@ { - outPath: './vignette.mp4', + outPath: "./vignette.mp4", clips: [ - { layers: [ - { type: 'video', path: './assets/tungestolen.mp4', cutTo: 2 }, - { type: 'image', path: './assets/vignette.png', resizeMode: 'stretch', zoomDirection: null }, - ] }, + { + layers: [ + { type: "video", path: "./assets/tungestolen.mp4", cutTo: 2 }, + { + type: "image", + path: "./assets/vignette.png", + resizeMode: "stretch", + zoomDirection: null, + }, + ], + }, ], } diff --git a/examples/visibleFromUntil.json5 b/examples/visibleFromUntil.json5 index e2bfe781..bf3da30e 100644 --- a/examples/visibleFromUntil.json5 +++ b/examples/visibleFromUntil.json5 @@ -1,21 +1,37 @@ { // enableFfmpegLog: true, - outPath: './visibleFromUntil.mp4', + outPath: "./visibleFromUntil.mp4", defaults: { - layer: { fontPath: './assets/Patua_One/PatuaOne-Regular.ttf' }, + layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, }, clips: [ - { duration: 2, layers: [ - { type: 'video', path: './assets/lofoten.mp4', cutFrom: 0.4, cutTo: 2 }, - { type: 'video', path: './assets/dancer1.webm', resizeMode: 'contain', cutFrom: 0, cutTo: 6, start: 0.5, stop: 1 }, - ] }, - { duration: 2, layers: [ - { type: 'video', path: './assets/lofoten.mp4', cutFrom: 0.5, cutTo: 3.5 }, - { type: 'news-title', text: 'Hei', start: 0.5, stop: 1 }, - ] }, - { layers: [ - { type: 'video', path: './assets/lofoten.mp4', cutFrom: 0, cutTo: 4 }, - { type: 'video', path: './assets/changi.mp4', cutFrom: 0, cutTo: 1, start: 1, stop: 2 }, - ] }, + { + duration: 2, + layers: [ + { type: "video", path: "./assets/lofoten.mp4", cutFrom: 0.4, cutTo: 2 }, + { + type: "video", + path: "./assets/dancer1.webm", + resizeMode: "contain", + cutFrom: 0, + cutTo: 6, + start: 0.5, + stop: 1, + }, + ], + }, + { + duration: 2, + layers: [ + { type: "video", path: "./assets/lofoten.mp4", cutFrom: 0.5, cutTo: 3.5 }, + { type: "news-title", text: "Hei", start: 0.5, stop: 1 }, + ], + }, + { + layers: [ + { type: "video", path: "./assets/lofoten.mp4", cutFrom: 0, cutTo: 4 }, + { type: "video", path: "./assets/changi.mp4", cutFrom: 0, cutTo: 1, start: 1, stop: 2 }, + ], + }, ], } diff --git a/package.json b/package.json index 7fb808ce..53e61af7 100644 --- a/package.json +++ b/package.json @@ -44,7 +44,8 @@ "build": "pkgroll --clean-dist --sourcemap", "prepublishOnly": "npm run build", "test": "vitest", - "lint": "eslint ." + "lint": "eslint .", + "prepare": "husky || true" }, "repository": { "type": "git", @@ -57,10 +58,19 @@ "@eslint/js": "^9.18.0", "@tsconfig/node-lts": "^22.0.1", "eslint": "^9.18.0", + "eslint-config-prettier": "^10.0.1", + "husky": "^9.1.7", + "lint-staged": "^15.4.3", "pkgroll": "^2.6.1", + "prettier": "3.4.2", + "prettier-plugin-organize-imports": "4.1.0", "tsx": "^4.19.2", "typescript": "^5.7.3", "typescript-eslint": "^8.20.0", "vitest": "^3.0.4" + }, + "lint-staged": { + "**/*.{js,ts,tsx}": "eslint --fix", + "**/*": "prettier --write --ignore-unknown" } } diff --git a/src/BoxBlur.d.ts b/src/BoxBlur.d.ts index 84ab2193..c4aef708 100644 --- a/src/BoxBlur.d.ts +++ b/src/BoxBlur.d.ts @@ -6,5 +6,5 @@ declare function boxBlurImage( height: number, radius: number, blurAlphaChannel: boolean, - iterations: number + iterations: number, ); diff --git a/src/api/defineFrameSource.ts b/src/api/defineFrameSource.ts index 7f130554..1e708e39 100644 --- a/src/api/defineFrameSource.ts +++ b/src/api/defineFrameSource.ts @@ -1,26 +1,29 @@ -import type { BaseLayer, OptionalPromise } from "../types.js"; -import type { DebugOptions } from "../configuration.js"; import type { StaticCanvas } from "fabric/node"; +import type { DebugOptions } from "../configuration.js"; +import type { BaseLayer, OptionalPromise } from "../types.js"; /** * A public API for defining new frame sources. */ -export function defineFrameSource(type: T["type"], setup: FrameSourceSetupFunction): FrameSourceFactory { +export function defineFrameSource( + type: T["type"], + setup: FrameSourceSetupFunction, +): FrameSourceFactory { return { type, async setup(options: CreateFrameSourceOptions) { return new FrameSource(options, await setup(options)); - } - } + }, + }; } export type CreateFrameSourceOptions = DebugOptions & { - width: number, - height: number, - duration: number, - channels: number, - framerateStr: string, - params: T, + width: number; + height: number; + duration: number; + channels: number; + framerateStr: string; + params: T; }; export interface FrameSourceFactory { @@ -29,11 +32,17 @@ export interface FrameSourceFactory { } export interface FrameSourceImplementation { - readNextFrame(progress: number, canvas: StaticCanvas, offsetTime: number): OptionalPromise; + readNextFrame( + progress: number, + canvas: StaticCanvas, + offsetTime: number, + ): OptionalPromise; close?(): OptionalPromise; } -export type FrameSourceSetupFunction = (fn: CreateFrameSourceOptions) => Promise; +export type FrameSourceSetupFunction = ( + fn: CreateFrameSourceOptions, +) => Promise; export class FrameSource { options: CreateFrameSourceOptions; diff --git a/src/api/index.ts b/src/api/index.ts index 6e11ee56..07d0dabd 100644 --- a/src/api/index.ts +++ b/src/api/index.ts @@ -1,2 +1,2 @@ -export type * from './defineFrameSource.js'; -export { defineFrameSource } from './defineFrameSource.js'; +export type * from "./defineFrameSource.js"; +export { defineFrameSource } from "./defineFrameSource.js"; diff --git a/src/audio.ts b/src/audio.ts index 358b7874..78f18bc4 100644 --- a/src/audio.ts +++ b/src/audio.ts @@ -1,168 +1,221 @@ -import pMap from 'p-map'; -import { join, basename, resolve } from 'path'; -import { flatMap } from 'lodash-es'; - -import { getCutFromArgs, ffmpeg } from './ffmpeg.js'; -import { readFileStreams } from './ffmpeg.js'; - -import type { AudioLayer, AudioNormalizationOptions, AudioTrack, Clip, Transition, VideoLayer } from './types.js' -import type { Configuration } from './configuration.js' +import { flatMap } from "lodash-es"; +import pMap from "p-map"; +import { basename, join, resolve } from "path"; +import type { Configuration } from "./configuration.js"; +import { ffmpeg, getCutFromArgs, readFileStreams } from "./ffmpeg.js"; +import type { + AudioLayer, + AudioNormalizationOptions, + AudioTrack, + Clip, + Transition, + VideoLayer, +} from "./types.js"; export type AudioOptions = { verbose: boolean; tmpDir: string; -} +}; -export type EditAudioOptions = Pick & { - arbitraryAudio: AudioTrack[] +export type EditAudioOptions = Pick< + Configuration, + "keepSourceAudio" | "clips" | "clipsAudioVolume" | "audioNorm" | "outputVolume" +> & { + arbitraryAudio: AudioTrack[]; }; type LayerWithAudio = (AudioLayer | VideoLayer) & { speedFactor: number }; export default ({ verbose, tmpDir }: AudioOptions) => { - async function createMixedAudioClips({ clips, keepSourceAudio }: { clips: Clip[], keepSourceAudio?: boolean }) { - return pMap(clips, async (clip, i) => { - const { duration, layers, transition } = clip; - - async function runInner(): Promise<{ clipAudioPath: string, silent: boolean }> { - const clipAudioPath = join(tmpDir, `clip${i}-audio.flac`); - - async function createSilence() { - if (verbose) console.log('create silence', duration); - const args = [ - '-nostdin', - '-f', 'lavfi', '-i', 'anullsrc=channel_layout=stereo:sample_rate=44100', - '-sample_fmt', 's32', - '-ar', '48000', - '-t', duration!.toString(), - '-c:a', 'flac', - '-y', - clipAudioPath, - ]; - await ffmpeg(args); - - return { silent: true, clipAudioPath }; - } - - // Has user enabled keep source audio? - if (!keepSourceAudio) return createSilence(); - - // TODO:[ts]: Layers is always an array once config is parsed. Fix this in types - const audioLayers = layers.filter(({ type, start, stop }) => ( - ['audio', 'video'].includes(type) - // TODO: We don't support audio for start/stop layers - && !start && stop == null)) as LayerWithAudio[]; - - if (audioLayers.length === 0) return createSilence(); - - const processedAudioLayersRaw = await pMap(audioLayers, async (audioLayer, j) => { - const { path, cutFrom, cutTo, speedFactor } = audioLayer; - - const streams = await readFileStreams(path); - if (!streams.some((s) => s.codec_type === 'audio')) return undefined; - - const layerAudioPath = join(tmpDir, `clip${i}-layer${j}-audio.flac`); - - try { - let atempoFilter; - if (Math.abs(speedFactor - 1) > 0.01) { - if (verbose) console.log('audio speedFactor', speedFactor); - const atempo = (1 / speedFactor); - if (!(atempo >= 0.5 && atempo <= 100)) { // Required range by ffmpeg - console.warn(`Audio speed ${atempo} is outside accepted range, using silence (clip ${i})`); - return undefined; - } - atempoFilter = `atempo=${atempo}`; - } - - const cutToArg = (cutTo! - cutFrom!) * speedFactor; - + async function createMixedAudioClips({ + clips, + keepSourceAudio, + }: { + clips: Clip[]; + keepSourceAudio?: boolean; + }) { + return pMap( + clips, + async (clip, i) => { + const { duration, layers, transition } = clip; + + async function runInner(): Promise<{ clipAudioPath: string; silent: boolean }> { + const clipAudioPath = join(tmpDir, `clip${i}-audio.flac`); + + async function createSilence() { + if (verbose) console.log("create silence", duration); const args = [ - '-nostdin', - ...getCutFromArgs({ cutFrom }), - '-i', path, - '-t', cutToArg!.toString(), - '-sample_fmt', 's32', - '-ar', '48000', - '-map', 'a:0', '-c:a', 'flac', - ...(atempoFilter ? ['-filter:a', atempoFilter] : []), - '-y', - layerAudioPath, + "-nostdin", + "-f", + "lavfi", + "-i", + "anullsrc=channel_layout=stereo:sample_rate=44100", + "-sample_fmt", + "s32", + "-ar", + "48000", + "-t", + duration!.toString(), + "-c:a", + "flac", + "-y", + clipAudioPath, ]; - await ffmpeg(args); - return [ - layerAudioPath, - audioLayer, - ]; - } catch (err) { - if (verbose) console.error('Cannot extract audio from video', path, err); - // Fall back to silence - return undefined; + return { silent: true, clipAudioPath }; } - }, { concurrency: 4 }); - const processedAudioLayers = processedAudioLayersRaw.filter((r): r is [string, LayerWithAudio] => r !== undefined); + // Has user enabled keep source audio? + if (!keepSourceAudio) return createSilence(); + + // TODO:[ts]: Layers is always an array once config is parsed. Fix this in types + const audioLayers = layers.filter( + ({ type, start, stop }) => + ["audio", "video"].includes(type) && + // TODO: We don't support audio for start/stop layers + !start && + stop == null, + ) as LayerWithAudio[]; + + if (audioLayers.length === 0) return createSilence(); + + const processedAudioLayersRaw = await pMap( + audioLayers, + async (audioLayer, j) => { + const { path, cutFrom, cutTo, speedFactor } = audioLayer; + + const streams = await readFileStreams(path); + if (!streams.some((s) => s.codec_type === "audio")) return undefined; + + const layerAudioPath = join(tmpDir, `clip${i}-layer${j}-audio.flac`); + + try { + let atempoFilter; + if (Math.abs(speedFactor - 1) > 0.01) { + if (verbose) console.log("audio speedFactor", speedFactor); + const atempo = 1 / speedFactor; + if (!(atempo >= 0.5 && atempo <= 100)) { + // Required range by ffmpeg + console.warn( + `Audio speed ${atempo} is outside accepted range, using silence (clip ${i})`, + ); + return undefined; + } + atempoFilter = `atempo=${atempo}`; + } + + const cutToArg = (cutTo! - cutFrom!) * speedFactor; + + const args = [ + "-nostdin", + ...getCutFromArgs({ cutFrom }), + "-i", + path, + "-t", + cutToArg!.toString(), + "-sample_fmt", + "s32", + "-ar", + "48000", + "-map", + "a:0", + "-c:a", + "flac", + ...(atempoFilter ? ["-filter:a", atempoFilter] : []), + "-y", + layerAudioPath, + ]; + + await ffmpeg(args); + + return [layerAudioPath, audioLayer]; + } catch (err) { + if (verbose) console.error("Cannot extract audio from video", path, err); + // Fall back to silence + return undefined; + } + }, + { concurrency: 4 }, + ); - if (processedAudioLayers.length < 1) return createSilence(); + const processedAudioLayers = processedAudioLayersRaw.filter( + (r): r is [string, LayerWithAudio] => r !== undefined, + ); - if (processedAudioLayers.length === 1) return { clipAudioPath: processedAudioLayers[0][0], silent: false }; + if (processedAudioLayers.length < 1) return createSilence(); - // Merge/mix all layers' audio - const weights = processedAudioLayers.map(([, { mixVolume }]) => mixVolume ?? 1); - const args = [ - '-nostdin', - ...flatMap(processedAudioLayers, ([layerAudioPath]) => ['-i', layerAudioPath]), - '-filter_complex', `amix=inputs=${processedAudioLayers.length}:duration=longest:weights=${weights.join(' ')}`, - '-c:a', 'flac', - '-y', - clipAudioPath, - ]; + if (processedAudioLayers.length === 1) + return { clipAudioPath: processedAudioLayers[0][0], silent: false }; + + // Merge/mix all layers' audio + const weights = processedAudioLayers.map(([, { mixVolume }]) => mixVolume ?? 1); + const args = [ + "-nostdin", + ...flatMap(processedAudioLayers, ([layerAudioPath]) => ["-i", layerAudioPath]), + "-filter_complex", + `amix=inputs=${processedAudioLayers.length}:duration=longest:weights=${weights.join(" ")}`, + "-c:a", + "flac", + "-y", + clipAudioPath, + ]; - await ffmpeg(args); - return { clipAudioPath, silent: false }; - } + await ffmpeg(args); + return { clipAudioPath, silent: false }; + } - const { clipAudioPath, silent } = await runInner(); + const { clipAudioPath, silent } = await runInner(); - return { - path: resolve(clipAudioPath), // https://superuser.com/a/853262/658247 - transition, - silent, - }; - }, { concurrency: 4 }); + return { + path: resolve(clipAudioPath), // https://superuser.com/a/853262/658247 + transition, + silent, + }; + }, + { concurrency: 4 }, + ); } - async function crossFadeConcatClipAudio(clipAudio: { path: string, transition?: Transition | null }[]) { + async function crossFadeConcatClipAudio( + clipAudio: { path: string; transition?: Transition | null }[], + ) { if (clipAudio.length < 2) { return clipAudio[0].path; } - const outPath = join(tmpDir, 'audio-concat.flac'); + const outPath = join(tmpDir, "audio-concat.flac"); - if (verbose) console.log('Combining audio', clipAudio.map(({ path }) => basename(path))); + if (verbose) + console.log( + "Combining audio", + clipAudio.map(({ path }) => basename(path)), + ); - let inStream = '[0:a]'; - const filterGraph = clipAudio.slice(0, -1).map(({ transition }, i) => { - const outStream = `[concat${i}]`; + let inStream = "[0:a]"; + const filterGraph = clipAudio + .slice(0, -1) + .map(({ transition }, i) => { + const outStream = `[concat${i}]`; - const epsilon = 0.0001; // If duration is 0, ffmpeg seems to default to 1 sec instead, hence epsilon. - let ret = `${inStream}[${i + 1}:a]acrossfade=d=${Math.max(epsilon, transition?.duration ?? 0)}:c1=${transition?.audioOutCurve ?? 'tri'}:c2=${transition?.audioInCurve ?? 'tri'}`; + const epsilon = 0.0001; // If duration is 0, ffmpeg seems to default to 1 sec instead, hence epsilon. + let ret = `${inStream}[${i + 1}:a]acrossfade=d=${Math.max(epsilon, transition?.duration ?? 0)}:c1=${transition?.audioOutCurve ?? "tri"}:c2=${transition?.audioInCurve ?? "tri"}`; - inStream = outStream; + inStream = outStream; - if (i < clipAudio.length - 2) ret += outStream; - return ret; - }).join(','); + if (i < clipAudio.length - 2) ret += outStream; + return ret; + }) + .join(","); const args = [ - '-nostdin', - ...(flatMap(clipAudio, ({ path }) => ['-i', path])), - '-filter_complex', + "-nostdin", + ...flatMap(clipAudio, ({ path }) => ["-i", path]), + "-filter_complex", filterGraph, - '-c', 'flac', - '-y', + "-c", + "flac", + "-y", outPath, ]; await ffmpeg(args); @@ -171,7 +224,15 @@ export default ({ verbose, tmpDir }: AudioOptions) => { } // FIXME[ts]: parseConfig sets `loop` on arbitrary audio tracks. Should that be part of the `AudioTrack` interface? - async function mixArbitraryAudio({ streams, audioNorm, outputVolume }: { streams: (AudioTrack & { loop?: number })[], audioNorm?: AudioNormalizationOptions, outputVolume?: number | string }) { + async function mixArbitraryAudio({ + streams, + audioNorm, + outputVolume, + }: { + streams: (AudioTrack & { loop?: number })[]; + audioNorm?: AudioNormalizationOptions; + outputVolume?: number | string; + }) { let maxGain = 30; let gaussSize = 5; if (audioNorm) { @@ -181,29 +242,30 @@ export default ({ verbose, tmpDir }: AudioOptions) => { const enableAudioNorm = audioNorm && audioNorm.enable; // https://stackoverflow.com/questions/35509147/ffmpeg-amix-filter-volume-issue-with-inputs-of-different-duration - let filterComplex = streams.map(({ start, cutFrom, cutTo }, i) => { - const cutToArg = (cutTo != null ? `:end=${cutTo}` : ''); - const apadArg = i > 0 ? ',apad' : ''; // Don't pad the first track (audio from video clips with correct duration) + let filterComplex = streams + .map(({ start, cutFrom, cutTo }, i) => { + const cutToArg = cutTo != null ? `:end=${cutTo}` : ""; + const apadArg = i > 0 ? ",apad" : ""; // Don't pad the first track (audio from video clips with correct duration) - return `[${i}:a]atrim=start=${cutFrom || 0}${cutToArg},adelay=delays=${Math.floor((start || 0) * 1000)}:all=1${apadArg}[a${i}]`; - }).join(';'); + return `[${i}:a]atrim=start=${cutFrom || 0}${cutToArg},adelay=delays=${Math.floor((start || 0) * 1000)}:all=1${apadArg}[a${i}]`; + }) + .join(";"); - const volumeArg = outputVolume != null ? `,volume=${outputVolume}` : ''; - const audioNormArg = enableAudioNorm ? `,dynaudnorm=g=${gaussSize}:maxgain=${maxGain}` : ''; - filterComplex += `;${streams.map((_, i) => `[a${i}]`).join('')}amix=inputs=${streams.length}:duration=first:dropout_transition=0:weights=${streams.map((s) => (s.mixVolume != null ? s.mixVolume : 1)).join(' ')}${audioNormArg}${volumeArg}`; + const volumeArg = outputVolume != null ? `,volume=${outputVolume}` : ""; + const audioNormArg = enableAudioNorm ? `,dynaudnorm=g=${gaussSize}:maxgain=${maxGain}` : ""; + filterComplex += `;${streams.map((_, i) => `[a${i}]`).join("")}amix=inputs=${streams.length}:duration=first:dropout_transition=0:weights=${streams.map((s) => (s.mixVolume != null ? s.mixVolume : 1)).join(" ")}${audioNormArg}${volumeArg}`; - const mixedAudioPath = join(tmpDir, 'audio-mixed.flac'); + const mixedAudioPath = join(tmpDir, "audio-mixed.flac"); const args = [ - '-nostdin', - ...(flatMap(streams, ({ path, loop }) => ([ - '-stream_loop', (loop || 0).toString(), - '-i', path, - ]))), - '-vn', - '-filter_complex', filterComplex, - '-c:a', 'flac', - '-y', + "-nostdin", + ...flatMap(streams, ({ path, loop }) => ["-stream_loop", (loop || 0).toString(), "-i", path]), + "-vn", + "-filter_complex", + filterComplex, + "-c:a", + "flac", + "-y", mixedAudioPath, ]; @@ -212,14 +274,21 @@ export default ({ verbose, tmpDir }: AudioOptions) => { return mixedAudioPath; } - async function editAudio({ keepSourceAudio, clips, arbitraryAudio, clipsAudioVolume, audioNorm, outputVolume }: EditAudioOptions) { + async function editAudio({ + keepSourceAudio, + clips, + arbitraryAudio, + clipsAudioVolume, + audioNorm, + outputVolume, + }: EditAudioOptions) { // We need clips to process audio, because we need to know duration if (clips.length === 0) return undefined; // No need to process audio if none of these are satisfied if (!(keepSourceAudio || arbitraryAudio.length > 0)) return undefined; - console.log('Extracting audio/silence from all clips'); + console.log("Extracting audio/silence from all clips"); // Mix audio from each clip as separate files (or silent audio of appropriate length for clips with no audio) const clipAudio = await createMixedAudioClips({ clips, keepSourceAudio }); @@ -238,7 +307,7 @@ export default ({ verbose, tmpDir }: AudioOptions) => { ...arbitraryAudio, ]; - console.log('Mixing clip audio with arbitrary audio'); + console.log("Mixing clip audio with arbitrary audio"); if (streams.length < 2) return concatedClipAudioPath; diff --git a/src/cli.ts b/src/cli.ts index 0f775cf9..41db0639 100755 --- a/src/cli.ts +++ b/src/cli.ts @@ -1,16 +1,16 @@ #!/usr/bin/env node -import meow from 'meow'; -import { readFileSync } from 'fs'; -import { fileTypeFromFile } from 'file-type'; -import pMap from 'p-map'; -import JSON5 from 'json5'; -import assert from 'assert'; - -import Editly, { Layer } from './index.js'; -import { ConfigurationOptions } from './configuration.js'; +import assert from "assert"; +import { fileTypeFromFile } from "file-type"; +import { readFileSync } from "fs"; +import JSON5 from "json5"; +import meow from "meow"; +import pMap from "p-map"; +import { ConfigurationOptions } from "./configuration.js"; +import Editly, { Layer } from "./index.js"; // See also readme -const cli = meow(` +const cli = meow( + ` Usage $ editly CLIP1 [CLIP2 [CLIP3 ...]] where each CLIP can be one of the following: @@ -47,69 +47,92 @@ const cli = meow(` Examples $ editly title:'My video' clip1.mov clip2.mov title:'My slideshow' img1.jpg img2.jpg title:'THE END' --audio-file-path /path/to/music.mp3 --font-path /path/to/my-favorite-font.ttf $ editly my-editly.json5 --out output.gif -`, { - importMeta: import.meta, - flags: { - verbose: { type: 'boolean', alias: 'v' }, - keepSourceAudio: { type: 'boolean' }, - allowRemoteRequests: { type: 'boolean' }, - fast: { type: 'boolean', alias: 'f' }, - transitionName: { type: 'string' }, - transitionDuration: { type: 'number' }, - clipDuration: { type: 'number' }, - width: { type: 'number' }, - height: { type: 'number' }, - fps: { type: 'number' }, - fontPath: { type: 'string' }, - loopAudio: { type: 'boolean' }, - outputVolume: { type: 'string' }, - json: { type: 'string' }, - out: { type: 'string' }, - audioFilePath: { type: 'string' }, +`, + { + importMeta: import.meta, + flags: { + verbose: { type: "boolean", alias: "v" }, + keepSourceAudio: { type: "boolean" }, + allowRemoteRequests: { type: "boolean" }, + fast: { type: "boolean", alias: "f" }, + transitionName: { type: "string" }, + transitionDuration: { type: "number" }, + clipDuration: { type: "number" }, + width: { type: "number" }, + height: { type: "number" }, + fps: { type: "number" }, + fontPath: { type: "string" }, + loopAudio: { type: "boolean" }, + outputVolume: { type: "string" }, + json: { type: "string" }, + out: { type: "string" }, + audioFilePath: { type: "string" }, + }, }, -}); +); (async () => { let { json } = cli.flags; - if (cli.input.length === 1 && /\.(json|json5|js)$/.test(cli.input[0].toLowerCase())) json = cli.input[0]; + if (cli.input.length === 1 && /\.(json|json5|js)$/.test(cli.input[0].toLowerCase())) + json = cli.input[0]; let params: Partial = { defaults: {}, }; if (json) { - params = JSON5.parse(readFileSync(json, 'utf-8')); + params = JSON5.parse(readFileSync(json, "utf-8")); } else { const clipsIn = cli.input; if (clipsIn.length < 1) cli.showHelp(); - const clips: Layer[] = await pMap(clipsIn, async (clip) => { - let match = clip.match(/^title:(.+)$/); - if (match) return { type: 'title-background', text: match[1] }; + const clips: Layer[] = await pMap( + clipsIn, + async (clip) => { + let match = clip.match(/^title:(.+)$/); + if (match) return { type: "title-background", text: match[1] }; - match = clip.match(/^https?:\/\/.*\.(jpg|jpeg|png|webp|gif|svg)$/); // todo improve - if (match) return { type: 'image', path: clip }; + match = clip.match(/^https?:\/\/.*\.(jpg|jpeg|png|webp|gif|svg)$/); // todo improve + if (match) return { type: "image", path: clip }; - const fileType = await fileTypeFromFile(clip); - if (!fileType) { - console.error('Invalid file for clip', clip); - cli.showHelp(); - } + const fileType = await fileTypeFromFile(clip); + if (!fileType) { + console.error("Invalid file for clip", clip); + cli.showHelp(); + } - const mime = fileType!.mime; + const mime = fileType!.mime; - if (mime.startsWith('video')) return { type: 'video', path: clip }; - if (mime.startsWith('image')) return { type: 'image', path: clip }; + if (mime.startsWith("video")) return { type: "video", path: clip }; + if (mime.startsWith("image")) return { type: "image", path: clip }; - throw new Error(`Unrecognized clip or file type "${clip}"`); - }, { concurrency: 1 }); + throw new Error(`Unrecognized clip or file type "${clip}"`); + }, + { concurrency: 1 }, + ); - assert(clips.length > 0, 'No clips specified'); + assert(clips.length > 0, "No clips specified"); params.clips = clips.map((clip) => ({ layers: [clip] })); } - const { verbose, transitionName, transitionDuration, clipDuration, width, height, fps, audioFilePath, fontPath, fast, out: outPath, keepSourceAudio, loopAudio, outputVolume, allowRemoteRequests } = cli.flags; + const { + verbose, + transitionName, + transitionDuration, + clipDuration, + width, + height, + fps, + audioFilePath, + fontPath, + fast, + out: outPath, + keepSourceAudio, + loopAudio, + outputVolume, + allowRemoteRequests, + } = cli.flags; if (transitionName || transitionDuration != null) { params.defaults!.transition = {}; @@ -140,10 +163,10 @@ const cli = meow(` if (params.verbose) console.log(JSON5.stringify(params, null, 2)); - if (!params.outPath) params.outPath = './editly-out.mp4'; + if (!params.outPath) params.outPath = "./editly-out.mp4"; await Editly(params as ConfigurationOptions); })().catch((err) => { - console.error('Caught error', err); + console.error("Caught error", err); process.exitCode = 1; }); diff --git a/src/colors.ts b/src/colors.ts index 5c625a52..e16257ef 100644 --- a/src/colors.ts +++ b/src/colors.ts @@ -2,162 +2,63 @@ // https://stackoverflow.com/a/4382138/6519037 const allColors = [ - 'hsl(42, 100%, 50%)', - 'hsl(310, 34%, 37%)', - 'hsl(24, 100%, 50%)', - 'hsl(211, 38%, 74%)', - 'hsl(350, 100%, 37%)', - 'hsl(35, 52%, 59%)', - 'hsl(22, 11%, 45%)', - 'hsl(145, 100%, 24%)', - 'hsl(348, 87%, 71%)', - 'hsl(203, 100%, 27%)', - 'hsl(11, 100%, 68%)', - 'hsl(265, 37%, 34%)', - 'hsl(33, 100%, 50%)', - 'hsl(342, 63%, 42%)', - 'hsl(49, 100%, 47%)', - 'hsl(5, 81%, 27%)', - 'hsl(68, 100%, 33%)', - 'hsl(26, 61%, 21%)', - 'hsl(10, 88%, 51%)', - 'hsl(84, 33%, 12%)', + "hsl(42, 100%, 50%)", + "hsl(310, 34%, 37%)", + "hsl(24, 100%, 50%)", + "hsl(211, 38%, 74%)", + "hsl(350, 100%, 37%)", + "hsl(35, 52%, 59%)", + "hsl(22, 11%, 45%)", + "hsl(145, 100%, 24%)", + "hsl(348, 87%, 71%)", + "hsl(203, 100%, 27%)", + "hsl(11, 100%, 68%)", + "hsl(265, 37%, 34%)", + "hsl(33, 100%, 50%)", + "hsl(342, 63%, 42%)", + "hsl(49, 100%, 47%)", + "hsl(5, 81%, 27%)", + "hsl(68, 100%, 33%)", + "hsl(26, 61%, 21%)", + "hsl(10, 88%, 51%)", + "hsl(84, 33%, 12%)", ]; // https://digitalsynopsis.com/design/beautiful-color-ui-gradients-backgrounds/ const gradientColors = [ - [ - '#ff9aac', - '#ffa875', - ], - [ - '#cc2b5e', - '#753a88', - ], - [ - '#42275a', - '#734b6d', - ], - [ - '#bdc3c7', - '#2c3e50', - ], - [ - '#de6262', - '#ffb88c', - ], - [ - '#eb3349', - '#f45c43', - ], - [ - '#dd5e89', - '#f7bb97', - ], - [ - '#56ab2f', - '#a8e063', - ], - [ - '#614385', - '#516395', - ], - [ - '#eecda3', - '#ef629f', - ], - [ - '#eacda3', - '#d6ae7b', - ], - [ - '#02aab0', - '#00cdac', - ], - [ - '#d66d75', - '#e29587', - ], - [ - '#000428', - '#004e92', - ], - [ - '#ddd6f3', - '#faaca8', - ], - [ - '#7b4397', - '#dc2430', - ], - [ - '#43cea2', - '#185a9d', - ], - [ - '#ba5370', - '#f4e2d8', - ], - [ - '#ff512f', - '#dd2476', - ], - [ - '#4568dc', - '#b06ab3', - ], - [ - '#ec6f66', - '#f3a183', - ], - [ - '#ffd89b', - '#19547b', - ], - [ - '#3a1c71', - '#d76d77', - ], - [ - '#4ca1af', - '#c4e0e5', - ], - [ - '#ff5f6d', - '#ffc371', - ], - [ - '#36d1dc', - '#5b86e5', - ], - [ - '#c33764', - '#1d2671', - ], - [ - '#141e30', - '#243b55', - ], - [ - '#ff7e5f', - '#feb47b', - ], - [ - '#ed4264', - '#ffedbc', - ], - [ - '#2b5876', - '#4e4376', - ], - [ - '#ff9966', - '#ff5e62', - ], - [ - '#aa076b', - '#61045f', - ], + ["#ff9aac", "#ffa875"], + ["#cc2b5e", "#753a88"], + ["#42275a", "#734b6d"], + ["#bdc3c7", "#2c3e50"], + ["#de6262", "#ffb88c"], + ["#eb3349", "#f45c43"], + ["#dd5e89", "#f7bb97"], + ["#56ab2f", "#a8e063"], + ["#614385", "#516395"], + ["#eecda3", "#ef629f"], + ["#eacda3", "#d6ae7b"], + ["#02aab0", "#00cdac"], + ["#d66d75", "#e29587"], + ["#000428", "#004e92"], + ["#ddd6f3", "#faaca8"], + ["#7b4397", "#dc2430"], + ["#43cea2", "#185a9d"], + ["#ba5370", "#f4e2d8"], + ["#ff512f", "#dd2476"], + ["#4568dc", "#b06ab3"], + ["#ec6f66", "#f3a183"], + ["#ffd89b", "#19547b"], + ["#3a1c71", "#d76d77"], + ["#4ca1af", "#c4e0e5"], + ["#ff5f6d", "#ffc371"], + ["#36d1dc", "#5b86e5"], + ["#c33764", "#1d2671"], + ["#141e30", "#243b55"], + ["#ff7e5f", "#feb47b"], + ["#ed4264", "#ffedbc"], + ["#2b5876", "#4e4376"], + ["#ff9966", "#ff5e62"], + ["#aa076b", "#61045f"], ]; /* const lightGradients = [ diff --git a/src/configuration.ts b/src/configuration.ts index 27636b0e..fbd45adf 100644 --- a/src/configuration.ts +++ b/src/configuration.ts @@ -1,20 +1,20 @@ -import { AudioNormalizationOptions, AudioTrack, Clip, DefaultOptions } from "./types.js"; -import { dirname, join } from "path"; import assert from "assert"; +import { merge } from "lodash-es"; import { nanoid } from "nanoid"; -import { merge } from "lodash-es" +import { dirname, join } from "path"; import { expandLayerAliases } from "./sources/index.js"; +import type { AudioNormalizationOptions, AudioTrack, Clip, DefaultOptions } from "./types.js"; export type DebugOptions = { verbose?: boolean; logTimes?: boolean; -} +}; export type FfmpegConfig = { ffmpegPath?: string; ffprobePath?: string; enableFfmpegLog?: boolean; -} +}; export type ConfigurationOptions = { /** @@ -141,17 +141,21 @@ export type ConfigurationOptions = { * WARNING: Undocumented feature! */ keepTmp?: boolean; -} & DebugOptions & FfmpegConfig; +} & DebugOptions & + FfmpegConfig; -export type LayerSourceConfig = Pick; +export type LayerSourceConfig = Pick< + Configuration, + "verbose" | "allowRemoteRequests" | "logTimes" | "tmpDir" +>; const globalDefaults = { duration: 4, transition: { duration: 0.5, - name: 'random', - audioOutCurve: 'tri', - audioInCurve: 'tri', + name: "random", + audioOutCurve: "tri", + audioInCurve: "tri", }, }; @@ -188,52 +192,66 @@ export class Configuration { ffprobePath: string; constructor(input: ConfigurationOptions) { - assert(input.outPath, 'Please provide an output path'); - assert(Array.isArray(input.clips) && input.clips.length > 0, 'Please provide at least 1 clip'); - assert(!input.customOutputArgs || Array.isArray(input.customOutputArgs), 'customOutputArgs must be an array of arguments'); + assert(input.outPath, "Please provide an output path"); + assert(Array.isArray(input.clips) && input.clips.length > 0, "Please provide at least 1 clip"); + assert( + !input.customOutputArgs || Array.isArray(input.customOutputArgs), + "customOutputArgs must be an array of arguments", + ); this.outPath = input.outPath; - this.width = input.width - this.height = input.height - this.fps = input.fps - this.audioFilePath = input.audioFilePath - this.backgroundAudioVolume = input.backgroundAudioVolume - this.loopAudio = input.loopAudio - this.clipsAudioVolume = input.clipsAudioVolume ?? 1 - this.audioTracks = input.audioTracks ?? [] - this.keepSourceAudio = input.keepSourceAudio - this.allowRemoteRequests = input.allowRemoteRequests ?? false - this.audioNorm = input.audioNorm - this.outputVolume = input.outputVolume - this.customOutputArgs = input.customOutputArgs + this.width = input.width; + this.height = input.height; + this.fps = input.fps; + this.audioFilePath = input.audioFilePath; + this.backgroundAudioVolume = input.backgroundAudioVolume; + this.loopAudio = input.loopAudio; + this.clipsAudioVolume = input.clipsAudioVolume ?? 1; + this.audioTracks = input.audioTracks ?? []; + this.keepSourceAudio = input.keepSourceAudio; + this.allowRemoteRequests = input.allowRemoteRequests ?? false; + this.audioNorm = input.audioNorm; + this.outputVolume = input.outputVolume; + this.customOutputArgs = input.customOutputArgs; this.defaults = merge({}, globalDefaults, input.defaults); - this.clips = input.clips.map(clip => { - const { transition, duration } = merge({}, this.defaults, clip) - let { layers } = clip + this.clips = input.clips.map((clip) => { + const { transition, duration } = merge({}, this.defaults, clip); + let { layers } = clip; if (layers && !Array.isArray(layers)) layers = [layers]; // Allow single layer for convenience - assert(Array.isArray(layers) && layers.length > 0, 'clip.layers must be an array with at least one layer.'); - assert(transition == null || typeof transition === 'object', 'Transition must be an object'); - - layers = layers.map(expandLayerAliases).flat().map(layer => { - assert(layer.type, 'All "layers" must have a type'); - return merge({}, this.defaults.layer ?? {}, this.defaults.layerType?.[layer.type] ?? {}, layer) - }); + assert( + Array.isArray(layers) && layers.length > 0, + "clip.layers must be an array with at least one layer.", + ); + assert(transition == null || typeof transition === "object", "Transition must be an object"); + + layers = layers + .map(expandLayerAliases) + .flat() + .map((layer) => { + assert(layer.type, 'All "layers" must have a type'); + return merge( + {}, + this.defaults.layer ?? {}, + this.defaults.layerType?.[layer.type] ?? {}, + layer, + ); + }); return { transition, duration, layers }; }); // Testing options: - this.verbose = input.verbose ?? false - this.enableFfmpegLog = input.enableFfmpegLog ?? this.verbose - this.logTimes = input.logTimes ?? false - this.keepTmp = input.keepTmp ?? false - this.fast = input.fast ?? false + this.verbose = input.verbose ?? false; + this.enableFfmpegLog = input.enableFfmpegLog ?? this.verbose; + this.logTimes = input.logTimes ?? false; + this.keepTmp = input.keepTmp ?? false; + this.fast = input.fast ?? false; - this.defaults = input.defaults ?? {} - this.ffmpegPath = input.ffmpegPath ?? 'ffmpeg' - this.ffprobePath = input.ffprobePath ?? 'ffprobe' + this.defaults = input.defaults ?? {}; + this.ffmpegPath = input.ffmpegPath ?? "ffmpeg"; + this.ffprobePath = input.ffprobePath ?? "ffprobe"; this.tmpDir = join(this.outDir, `editly-tmp-${nanoid()}`); } @@ -243,6 +261,6 @@ export class Configuration { } get isGif() { - return this.outPath.toLowerCase().endsWith('.gif'); + return this.outPath.toLowerCase().endsWith(".gif"); } } diff --git a/src/ffmpeg.ts b/src/ffmpeg.ts index 12470994..f7b363ad 100644 --- a/src/ffmpeg.ts +++ b/src/ffmpeg.ts @@ -1,8 +1,8 @@ -import fsExtra from 'fs-extra'; -import { execa, type Options } from 'execa'; -import assert from 'assert'; -import { compareVersions } from 'compare-versions'; -import { FfmpegConfig } from './configuration.js'; +import assert from "assert"; +import { compareVersions } from "compare-versions"; +import { execa, type Options } from "execa"; +import fsExtra from "fs-extra"; +import { FfmpegConfig } from "./configuration.js"; export type Stream = { codec_type: string; @@ -19,42 +19,50 @@ export type Stream = { }; const config: Required = { - ffmpegPath: 'ffmpeg', - ffprobePath: 'ffprobe', + ffmpegPath: "ffmpeg", + ffprobePath: "ffprobe", enableFfmpegLog: false, -} +}; export function getFfmpegCommonArgs() { - return [ - '-hide_banner', - ...(config.enableFfmpegLog ? [] : ['-loglevel', 'error']), - ]; + return ["-hide_banner", ...(config.enableFfmpegLog ? [] : ["-loglevel", "error"])]; } export function getCutFromArgs({ cutFrom }: { cutFrom?: number }) { - return cutFrom ? ['-ss', cutFrom.toString()] : []; + return cutFrom ? ["-ss", cutFrom.toString()] : []; } -export function getCutToArgs({ cutTo, cutFrom, speedFactor }: { cutTo?: number; cutFrom?: number; speedFactor: number }) { - return cutFrom && cutTo ? ['-t', (cutTo - cutFrom) * speedFactor] : []; +export function getCutToArgs({ + cutTo, + cutFrom, + speedFactor, +}: { + cutTo?: number; + cutFrom?: number; + speedFactor: number; +}) { + return cutFrom && cutTo ? ["-t", (cutTo - cutFrom) * speedFactor] : []; } export async function createConcatFile(segments: string[], concatFilePath: string) { // https://superuser.com/questions/787064/filename-quoting-in-ffmpeg-concat - await fsExtra.writeFile(concatFilePath, segments.map((seg) => `file '${seg.replace(/'/g, "'\\''")}'`).join('\n')); + await fsExtra.writeFile( + concatFilePath, + segments.map((seg) => `file '${seg.replace(/'/g, "'\\''")}'`).join("\n"), + ); } export async function testFf(exePath: string, name: string) { - const minRequiredVersion = '4.3.1'; + const minRequiredVersion = "4.3.1"; try { - const { stdout } = await execa(exePath, ['-version']); - const firstLine = stdout.split('\n')[0]; + const { stdout } = await execa(exePath, ["-version"]); + const firstLine = stdout.split("\n")[0]; const match = firstLine.match(`${name} version ([0-9.]+)`); - assert(match, 'Unknown version string'); + assert(match, "Unknown version string"); const versionStr = match[1]; console.log(`${name} version ${versionStr}`); - assert(compareVersions(versionStr, minRequiredVersion), 'Version is outdated'); + assert(compareVersions(versionStr, minRequiredVersion), "Version is outdated"); } catch (err) { console.error(`WARNING: ${name}:`, err); } @@ -62,12 +70,12 @@ export async function testFf(exePath: string, name: string) { export async function configureFf(params: Partial) { Object.assign(config, params); - await testFf(config.ffmpegPath, 'ffmpeg'); - await testFf(config.ffprobePath, 'ffprobe'); + await testFf(config.ffmpegPath, "ffmpeg"); + await testFf(config.ffprobePath, "ffprobe"); } export function ffmpeg(args: string[], options?: Options) { - if (config.enableFfmpegLog) console.log(`$ ${config.ffmpegPath} ${args.join(' ')}`); + if (config.enableFfmpegLog) console.log(`$ ${config.ffmpegPath} ${args.join(" ")}`); return execa(config.ffmpegPath, [...getFfmpegCommonArgs(), ...args], options); } @@ -76,7 +84,7 @@ export function ffprobe(args: string[]) { } export function parseFps(fps?: string) { - const match = typeof fps === 'string' && fps.match(/^([0-9]+)\/([0-9]+)$/); + const match = typeof fps === "string" && fps.match(/^([0-9]+)\/([0-9]+)$/); if (match) { const num = parseInt(match[1], 10); const den = parseInt(match[2], 10); @@ -86,21 +94,28 @@ export function parseFps(fps?: string) { } export async function readDuration(p: string) { - const { stdout } = await ffprobe(['-v', 'error', '-show_entries', 'format=duration', '-of', 'default=noprint_wrappers=1:nokey=1', p]); + const { stdout } = await ffprobe([ + "-v", + "error", + "-show_entries", + "format=duration", + "-of", + "default=noprint_wrappers=1:nokey=1", + p, + ]); const parsed = parseFloat(stdout); assert(!Number.isNaN(parsed)); return parsed; } export async function readFileStreams(p: string) { - const { stdout } = await ffprobe(['-show_entries', 'stream', '-of', 'json', p]); + const { stdout } = await ffprobe(["-show_entries", "stream", "-of", "json", p]); return JSON.parse(stdout).streams as Stream[]; } - export async function readVideoFileInfo(p: string) { const streams = await readFileStreams(p); - const stream = streams.find((s) => s.codec_type === 'video'); // TODO + const stream = streams.find((s) => s.codec_type === "video"); // TODO if (!stream) { throw new Error(`Could not find a video stream in ${p}`); @@ -108,7 +123,7 @@ export async function readVideoFileInfo(p: string) { const duration = await readDuration(p); - let rotation = parseInt(stream.tags?.rotate ?? '', 10); + let rotation = parseInt(stream.tags?.rotate ?? "", 10); // If we can't find rotation, try side_data_list if (Number.isNaN(rotation) && stream.side_data_list?.[0]?.rotation) { diff --git a/src/frameSource.ts b/src/frameSource.ts index b8107651..90c39542 100644 --- a/src/frameSource.ts +++ b/src/frameSource.ts @@ -1,37 +1,59 @@ -import pMap from 'p-map'; - -import { rgbaToFabricImage, createFabricCanvas, renderFabricCanvas } from './sources/fabric.js'; -import type { DebugOptions } from './configuration.js'; -import type { ProcessedClip } from './parseConfig.js'; -import { createLayerSource } from './sources/index.js'; +import pMap from "p-map"; +import type { DebugOptions } from "./configuration.js"; +import type { ProcessedClip } from "./parseConfig.js"; +import { createFabricCanvas, renderFabricCanvas, rgbaToFabricImage } from "./sources/fabric.js"; +import { createLayerSource } from "./sources/index.js"; type FrameSourceOptions = DebugOptions & { clip: ProcessedClip; clipIndex: number; - width: number, - height: number, - channels: number, - framerateStr: string, -} + width: number; + height: number; + channels: number; + framerateStr: string; +}; -export async function createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, framerateStr }: FrameSourceOptions) { +export async function createFrameSource({ + clip, + clipIndex, + width, + height, + channels, + verbose, + logTimes, + framerateStr, +}: FrameSourceOptions) { const { layers, duration } = clip; - const visualLayers = layers.filter((layer) => layer.type !== 'audio'); + const visualLayers = layers.filter((layer) => layer.type !== "audio"); - const layerFrameSources = await pMap(visualLayers, async (layer, layerIndex) => { - if (verbose) console.log('createFrameSource', layer.type, 'clip', clipIndex, 'layer', layerIndex); - const options = { width, height, duration, channels, verbose, logTimes, framerateStr, params: layer }; - return createLayerSource(options) - }, { concurrency: 1 }); + const layerFrameSources = await pMap( + visualLayers, + async (layer, layerIndex) => { + if (verbose) + console.log("createFrameSource", layer.type, "clip", clipIndex, "layer", layerIndex); + const options = { + width, + height, + duration, + channels, + verbose, + logTimes, + framerateStr, + params: layer, + }; + return createLayerSource(options); + }, + { concurrency: 1 }, + ); async function readNextFrame({ time }: { time: number }) { const canvas = createFabricCanvas({ width, height }); for (const frameSource of layerFrameSources) { - if (logTimes) console.time('frameSource.readNextFrame'); + if (logTimes) console.time("frameSource.readNextFrame"); const rgba = await frameSource.readNextFrame(time, canvas); - if (logTimes) console.timeEnd('frameSource.readNextFrame'); + if (logTimes) console.timeEnd("frameSource.readNextFrame"); // Frame sources can either render to the provided canvas and return nothing // OR return an raw RGBA blob which will be drawn onto the canvas @@ -39,9 +61,9 @@ export async function createFrameSource({ clip, clipIndex, width, height, channe // Optimization: Don't need to draw to canvas if there's only one layer if (layerFrameSources.length === 1) return rgba; - if (logTimes) console.time('rgbaToFabricImage'); + if (logTimes) console.time("rgbaToFabricImage"); const img = await rgbaToFabricImage({ width, height, rgba }); - if (logTimes) console.timeEnd('rgbaToFabricImage'); + if (logTimes) console.timeEnd("rgbaToFabricImage"); canvas.add(img); } else { // Assume this frame source has drawn its content to the canvas @@ -49,14 +71,14 @@ export async function createFrameSource({ clip, clipIndex, width, height, channe } // if (verbose) console.time('Merge frames'); - if (logTimes) console.time('renderFabricCanvas'); + if (logTimes) console.time("renderFabricCanvas"); const rgba = await renderFabricCanvas(canvas); - if (logTimes) console.timeEnd('renderFabricCanvas'); + if (logTimes) console.timeEnd("renderFabricCanvas"); return rgba; } async function close() { - await pMap(layerFrameSources, frameSource => frameSource.close?.()); + await pMap(layerFrameSources, (frameSource) => frameSource.close?.()); } return { diff --git a/src/glTransitions.ts b/src/glTransitions.ts index 0387b308..1f22585b 100644 --- a/src/glTransitions.ts +++ b/src/glTransitions.ts @@ -1,10 +1,10 @@ -import GL from 'gl'; -import ndarray from 'ndarray'; -import createBuffer from 'gl-buffer'; -import glTransitions from 'gl-transitions'; -import glTransition from 'gl-transition'; -import createTexture from 'gl-texture2d'; -import { TransitionParams } from './types.js'; +import GL from "gl"; +import createBuffer from "gl-buffer"; +import createTexture from "gl-texture2d"; +import glTransition from "gl-transition"; +import glTransitions from "gl-transitions"; +import ndarray from "ndarray"; +import { TransitionParams } from "./types.js"; const { default: createTransition } = glTransition; @@ -14,34 +14,47 @@ export type RunTransitionOptions = { progress: number; transitionName?: string; transitionParams?: TransitionParams; -} +}; -export default ({ width, height, channels }: { width: number, height: number, channels: number }) => { +export default ({ + width, + height, + channels, +}: { + width: number; + height: number; + channels: number; +}) => { const gl = GL(width, height); if (!gl) { - throw new Error('gl returned null, this probably means that some dependencies are not installed. See README.'); + throw new Error( + "gl returned null, this probably means that some dependencies are not installed. See README.", + ); } - function runTransitionOnFrame({ fromFrame, toFrame, progress, transitionName, transitionParams = {} }: RunTransitionOptions) { + function runTransitionOnFrame({ + fromFrame, + toFrame, + progress, + transitionName, + transitionParams = {}, + }: RunTransitionOptions) { function convertFrame(buf: Buffer) { // @see https://github.com/stackgl/gl-texture2d/issues/16 return ndarray(buf, [width, height, channels], [channels, width * channels, 1]); } - const buffer = createBuffer( - gl, - [-1, -1, -1, 4, 4, -1], - gl.ARRAY_BUFFER, - gl.STATIC_DRAW, - ); + const buffer = createBuffer(gl, [-1, -1, -1, 4, 4, -1], gl.ARRAY_BUFFER, gl.STATIC_DRAW); let transition; try { - const resizeMode = 'stretch'; + const resizeMode = "stretch"; - const transitionSource = glTransitions.find((t) => t.name.toLowerCase() === transitionName?.toLowerCase()); + const transitionSource = glTransitions.find( + (t) => t.name.toLowerCase() === transitionName?.toLowerCase(), + ); transition = createTransition(gl, transitionSource!, { resizeMode }); @@ -60,7 +73,14 @@ export default ({ width, height, channels }: { width: number, height: number, ch textureTo.magFilter = gl.LINEAR; buffer.bind(); - transition.draw(progress, textureFrom, textureTo, gl.drawingBufferWidth, gl.drawingBufferHeight, transitionParams); + transition.draw( + progress, + textureFrom, + textureTo, + gl.drawingBufferWidth, + gl.drawingBufferHeight, + transitionParams, + ); textureFrom.dispose(); textureTo.dispose(); diff --git a/src/index.ts b/src/index.ts index 23782ab6..32d84681 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,21 +1,21 @@ -import { ExecaChildProcess } from 'execa'; -import assert from 'assert'; -import JSON5 from 'json5'; -import fsExtra from 'fs-extra'; - -import { configureFf, ffmpeg, parseFps } from './ffmpeg.js'; -import { multipleOf2, assertFileValid } from './util.js'; -import { createFabricCanvas, rgbaToFabricImage } from './sources/fabric.js'; -import { createFrameSource } from './frameSource.js'; -import parseConfig, { ProcessedClip } from './parseConfig.js'; -import GlTransitions, { type RunTransitionOptions } from './glTransitions.js'; -import Audio from './audio.js'; -import { Configuration, type ConfigurationOptions } from './configuration.js'; -import type { RenderSingleFrameConfig } from './types.js'; +import assert from "assert"; +import { ExecaChildProcess } from "execa"; +import fsExtra from "fs-extra"; +import JSON5 from "json5"; + +import Audio from "./audio.js"; +import { Configuration, type ConfigurationOptions } from "./configuration.js"; +import { configureFf, ffmpeg, parseFps } from "./ffmpeg.js"; +import { createFrameSource } from "./frameSource.js"; +import GlTransitions, { type RunTransitionOptions } from "./glTransitions.js"; +import parseConfig, { ProcessedClip } from "./parseConfig.js"; +import { createFabricCanvas, rgbaToFabricImage } from "./sources/fabric.js"; +import type { RenderSingleFrameConfig } from "./types.js"; +import { assertFileValid, multipleOf2 } from "./util.js"; const channels = 4; -export type * from './types.js'; +export type * from "./types.js"; /** * Edit and render videos. @@ -47,7 +47,7 @@ async function Editly(input: ConfigurationOptions): Promise { outputVolume, customOutputArgs, isGif, - tmpDir + tmpDir, } = config; await configureFf(config); @@ -56,31 +56,51 @@ async function Editly(input: ConfigurationOptions): Promise { if (verbose) console.log(JSON5.stringify(config, null, 2)); - const { clips, arbitraryAudio } = await parseConfig({ clips: clipsIn, arbitraryAudio: arbitraryAudioIn, backgroundAudioPath, backgroundAudioVolume, loopAudio, allowRemoteRequests }); - if (verbose) console.log('Calculated', JSON5.stringify({ clips, arbitraryAudio }, null, 2)); + const { clips, arbitraryAudio } = await parseConfig({ + clips: clipsIn, + arbitraryAudio: arbitraryAudioIn, + backgroundAudioPath, + backgroundAudioVolume, + loopAudio, + allowRemoteRequests, + }); + if (verbose) console.log("Calculated", JSON5.stringify({ clips, arbitraryAudio }, null, 2)); if (verbose) console.log({ tmpDir }); await fsExtra.mkdirp(tmpDir); const { editAudio } = Audio({ verbose, tmpDir }); - const audioFilePath = !isGif ? await editAudio({ keepSourceAudio, arbitraryAudio, clipsAudioVolume, clips, audioNorm, outputVolume }) : undefined; + const audioFilePath = !isGif + ? await editAudio({ + keepSourceAudio, + arbitraryAudio, + clipsAudioVolume, + clips, + audioNorm, + outputVolume, + }) + : undefined; // Try to detect parameters from first video let firstVideoWidth; let firstVideoHeight; let firstVideoFramerateStr; - clips.find((clip) => clip && clip.layers.find((layer) => { - if (layer.type === 'video') { - firstVideoWidth = layer.inputWidth; - firstVideoHeight = layer.inputHeight; - firstVideoFramerateStr = layer.framerateStr; + clips.find( + (clip) => + clip && + clip.layers.find((layer) => { + if (layer.type === "video") { + firstVideoWidth = layer.inputWidth; + firstVideoHeight = layer.inputHeight; + firstVideoFramerateStr = layer.framerateStr; - return true; - } - return false; - })); + return true; + } + return false; + }), + ); let width: number; let height: number; @@ -124,8 +144,8 @@ async function Editly(input: ConfigurationOptions): Promise { height = roundDimension(numPixelsEachDirection * Math.sqrt(1 / aspectRatio)); } - assert(width, 'Width not specified or detected'); - assert(height, 'Height not specified or detected'); + assert(width, "Width not specified or detected"); + assert(height, "Height not specified or detected"); if (!isGif) { // x264 requires multiple of 2, eg minimum 2 @@ -139,7 +159,7 @@ async function Editly(input: ConfigurationOptions): Promise { if (fast) { fps = 15; framerateStr = String(fps); - } else if (requestedFps && typeof requestedFps === 'number') { + } else if (requestedFps && typeof requestedFps === "number") { fps = requestedFps; framerateStr = String(requestedFps); } else if (isGif) { @@ -153,69 +173,109 @@ async function Editly(input: ConfigurationOptions): Promise { framerateStr = String(fps); } - assert(fps, 'FPS not specified or detected'); + assert(fps, "FPS not specified or detected"); console.log(`${width}x${height} ${fps}fps`); - const estimatedTotalFrames = fps * clips.reduce((acc, c, i) => { - let newAcc = acc + c.duration; - if (i !== clips.length - 1) newAcc -= c.transition.duration; - return newAcc; - }, 0); - - const { runTransitionOnFrame: runGlTransitionOnFrame } = GlTransitions({ width, height, channels }); + const estimatedTotalFrames = + fps * + clips.reduce((acc, c, i) => { + let newAcc = acc + c.duration; + if (i !== clips.length - 1) newAcc -= c.transition.duration; + return newAcc; + }, 0); + + const { runTransitionOnFrame: runGlTransitionOnFrame } = GlTransitions({ + width, + height, + channels, + }); - function runTransitionOnFrame({ fromFrame, toFrame, progress, transitionName, transitionParams }: RunTransitionOptions) { + function runTransitionOnFrame({ + fromFrame, + toFrame, + progress, + transitionName, + transitionParams, + }: RunTransitionOptions) { // A dummy transition can be used to have an audio transition without a video transition // (Note: You will lose a portion from both clips due to overlap) - if (transitionName === 'dummy') return progress > 0.5 ? toFrame : fromFrame; - return runGlTransitionOnFrame({ fromFrame, toFrame, progress, transitionName, transitionParams }); + if (transitionName === "dummy") return progress > 0.5 ? toFrame : fromFrame; + return runGlTransitionOnFrame({ + fromFrame, + toFrame, + progress, + transitionName, + transitionParams, + }); } function getOutputArgs() { if (customOutputArgs) { - assert(Array.isArray(customOutputArgs), 'customOutputArgs must be an array of arguments'); + assert(Array.isArray(customOutputArgs), "customOutputArgs must be an array of arguments"); return customOutputArgs; } // https://superuser.com/questions/556029/how-do-i-convert-a-video-to-gif-using-ffmpeg-with-reasonable-quality - const videoOutputArgs = isGif ? [ - '-vf', `format=rgb24,fps=${fps},scale=${width}:${height}:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse`, - '-loop', '0', - ] : [ - '-vf', 'format=yuv420p', - '-vcodec', 'libx264', - '-profile:v', 'high', - ...(fast ? ['-preset:v', 'ultrafast'] : ['-preset:v', 'medium']), - '-crf', '18', - - '-movflags', 'faststart', - ]; - - const audioOutputArgs = audioFilePath ? ['-acodec', 'aac', '-b:a', '128k'] : []; + const videoOutputArgs = isGif + ? [ + "-vf", + `format=rgb24,fps=${fps},scale=${width}:${height}:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse`, + "-loop", + "0", + ] + : [ + "-vf", + "format=yuv420p", + "-vcodec", + "libx264", + "-profile:v", + "high", + ...(fast ? ["-preset:v", "ultrafast"] : ["-preset:v", "medium"]), + "-crf", + "18", + + "-movflags", + "faststart", + ]; + + const audioOutputArgs = audioFilePath ? ["-acodec", "aac", "-b:a", "128k"] : []; return [...audioOutputArgs, ...videoOutputArgs]; } function startFfmpegWriterProcess() { const args = [ - '-f', 'rawvideo', - '-vcodec', 'rawvideo', - '-pix_fmt', 'rgba', - '-s', `${width}x${height}`, - '-r', framerateStr, - '-i', '-', - - ...(audioFilePath ? ['-i', audioFilePath] : []), - - ...(!isGif ? ['-map', '0:v:0'] : []), - ...(audioFilePath ? ['-map', '1:a:0'] : []), + "-f", + "rawvideo", + "-vcodec", + "rawvideo", + "-pix_fmt", + "rgba", + "-s", + `${width}x${height}`, + "-r", + framerateStr, + "-i", + "-", + + ...(audioFilePath ? ["-i", audioFilePath] : []), + + ...(!isGif ? ["-map", "0:v:0"] : []), + ...(audioFilePath ? ["-map", "1:a:0"] : []), ...getOutputArgs(), - '-y', outPath, + "-y", + outPath, ]; - return ffmpeg(args, { encoding: null, buffer: false, stdin: 'pipe', stdout: process.stdout, stderr: process.stderr }); + return ffmpeg(args, { + encoding: null, + buffer: false, + stdin: "pipe", + stdout: process.stdout, + stderr: process.stderr, + }); } let outProcess: ExecaChildProcess> | undefined = undefined; @@ -236,9 +296,21 @@ async function Editly(input: ConfigurationOptions): Promise { const getTransitionFromClip = () => clips[transitionFromClipId]; const getTransitionToClip = () => clips[getTransitionToClipId()]; - const getSource = async (clip: ProcessedClip, clipIndex: number) => createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, framerateStr }); - const getTransitionFromSource = async () => getSource(getTransitionFromClip(), transitionFromClipId); - const getTransitionToSource = async () => (getTransitionToClip() && getSource(getTransitionToClip(), getTransitionToClipId())); + const getSource = async (clip: ProcessedClip, clipIndex: number) => + createFrameSource({ + clip, + clipIndex, + width, + height, + channels, + verbose, + logTimes, + framerateStr, + }); + const getTransitionFromSource = async () => + getSource(getTransitionFromClip(), transitionFromClipId); + const getTransitionToSource = async () => + getTransitionToClip() && getSource(getTransitionToClip(), getTransitionToClipId()); try { try { @@ -246,8 +318,8 @@ async function Editly(input: ConfigurationOptions): Promise { let outProcessError; - outProcess.on('exit', (code) => { - if (verbose) console.log('Output ffmpeg exited', code); + outProcess.on("exit", (code) => { + if (verbose) console.log("Output ffmpeg exited", code); outProcessExitCode = code; }); @@ -275,13 +347,22 @@ async function Editly(input: ConfigurationOptions): Promise { const transitionNumFrames = Math.round(currentTransition.duration! * fps); // Each clip has two transitions, make sure we leave enough room: - const transitionNumFramesSafe = Math.floor(Math.min(Math.min(fromClipNumFrames, toClipNumFrames != null ? toClipNumFrames : Number.MAX_SAFE_INTEGER) / 2, transitionNumFrames)); + const transitionNumFramesSafe = Math.floor( + Math.min( + Math.min( + fromClipNumFrames, + toClipNumFrames != null ? toClipNumFrames : Number.MAX_SAFE_INTEGER, + ) / 2, + transitionNumFrames, + ), + ); // How many frames into the transition are we? negative means not yet started const transitionFrameAt = fromClipFrameAt - (fromClipNumFrames - transitionNumFramesSafe); if (!verbose) { const percentDone = Math.floor(100 * (totalFramesWritten / estimatedTotalFrames)); - if (totalFramesWritten % 10 === 0) process.stdout.write(`${String(percentDone).padStart(3, ' ')}% `); + if (totalFramesWritten % 10 === 0) + process.stdout.write(`${String(percentDone).padStart(3, " ")}% `); } // console.log({ transitionFrameAt, transitionNumFramesSafe }) @@ -291,10 +372,12 @@ async function Editly(input: ConfigurationOptions): Promise { // Done with transition? if (transitionFrameAt >= transitionLastFrameIndex) { transitionFromClipId += 1; - console.log(`Done with transition, switching to next transitionFromClip (${transitionFromClipId})`); + console.log( + `Done with transition, switching to next transitionFromClip (${transitionFromClipId})`, + ); if (!getTransitionFromClip()) { - console.log('No more transitionFromClip, done'); + console.log("No more transitionFromClip, done"); break; } @@ -309,32 +392,39 @@ async function Editly(input: ConfigurationOptions): Promise { continue; } - if (logTimes) console.time('Read frameSource1'); + if (logTimes) console.time("Read frameSource1"); const newFrameSource1Data = await frameSource1.readNextFrame({ time: fromClipTime }); - if (logTimes) console.timeEnd('Read frameSource1'); + if (logTimes) console.timeEnd("Read frameSource1"); // If we got no data, use the old data // TODO maybe abort? if (newFrameSource1Data) frameSource1Data = newFrameSource1Data; - else console.warn('No frame data returned, using last frame'); + else console.warn("No frame data returned, using last frame"); - const isInTransition = frameSource2 && transitionNumFramesSafe > 0 && transitionFrameAt >= 0; + const isInTransition = + frameSource2 && transitionNumFramesSafe > 0 && transitionFrameAt >= 0; let outFrameData; if (isInTransition) { - if (logTimes) console.time('Read frameSource2'); + if (logTimes) console.time("Read frameSource2"); const frameSource2Data = await frameSource2.readNextFrame({ time: toClipTime }); - if (logTimes) console.timeEnd('Read frameSource2'); + if (logTimes) console.timeEnd("Read frameSource2"); if (frameSource2Data) { const progress = transitionFrameAt / transitionNumFramesSafe; const easedProgress = currentTransition.easingFunction(progress); - if (logTimes) console.time('runTransitionOnFrame'); - outFrameData = runTransitionOnFrame({ fromFrame: frameSource1Data!, toFrame: frameSource2Data, progress: easedProgress, transitionName: currentTransition.name, transitionParams: currentTransition.params }); - if (logTimes) console.timeEnd('runTransitionOnFrame'); + if (logTimes) console.time("runTransitionOnFrame"); + outFrameData = runTransitionOnFrame({ + fromFrame: frameSource1Data!, + toFrame: frameSource2Data, + progress: easedProgress, + transitionName: currentTransition.name, + transitionParams: currentTransition.params, + }); + if (logTimes) console.timeEnd("runTransitionOnFrame"); } else { - console.warn('Got no frame data from transitionToClip!'); + console.warn("Got no frame data from transitionToClip!"); // We have probably reached end of clip2 but transition is not complete. Just pass thru clip1 outFrameData = frameSource1Data; } @@ -344,19 +434,38 @@ async function Editly(input: ConfigurationOptions): Promise { } if (verbose) { - if (isInTransition) console.log('Writing frame:', totalFramesWritten, 'from clip', transitionFromClipId, `(frame ${fromClipFrameAt})`, 'to clip', getTransitionToClipId(), `(frame ${toClipFrameAt} / ${transitionNumFramesSafe})`, currentTransition.name, `${currentTransition.duration}s`); - else console.log('Writing frame:', totalFramesWritten, 'from clip', transitionFromClipId, `(frame ${fromClipFrameAt})`); + if (isInTransition) + console.log( + "Writing frame:", + totalFramesWritten, + "from clip", + transitionFromClipId, + `(frame ${fromClipFrameAt})`, + "to clip", + getTransitionToClipId(), + `(frame ${toClipFrameAt} / ${transitionNumFramesSafe})`, + currentTransition.name, + `${currentTransition.duration}s`, + ); + else + console.log( + "Writing frame:", + totalFramesWritten, + "from clip", + transitionFromClipId, + `(frame ${fromClipFrameAt})`, + ); // console.log(outFrameData.length / 1e6, 'MB'); } const nullOutput = false; - if (logTimes) console.time('outProcess.write'); + if (logTimes) console.time("outProcess.write"); // If we don't wait, then we get EINVAL when dealing with high resolution files (big writes) if (!nullOutput) await new Promise((r) => outProcess?.stdin?.write(outFrameData, r)); - if (logTimes) console.timeEnd('outProcess.write'); + if (logTimes) console.timeEnd("outProcess.write"); if (outProcessError) break; @@ -370,13 +479,13 @@ async function Editly(input: ConfigurationOptions): Promise { outProcess?.kill(); throw err; } finally { - if (verbose) console.log('Cleanup'); + if (verbose) console.log("Cleanup"); if (frameSource1) await frameSource1.close(); if (frameSource2) await frameSource2.close(); } try { - if (verbose) console.log('Waiting for output ffmpeg process to finish'); + if (verbose) console.log("Waiting for output ffmpeg process to finish"); await outProcess; } catch (err) { // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -387,7 +496,7 @@ async function Editly(input: ConfigurationOptions): Promise { } console.log(); - console.log('Done. Output file can be found at:'); + console.log("Done. Output file can be found at:"); console.log(outPath); } @@ -422,9 +531,18 @@ export async function renderSingleFrame(input: RenderSingleFrameConfig): Promise clipStartTime += c.duration; return false; }); - assert(clip, 'No clip found at requested time'); + assert(clip, "No clip found at requested time"); const clipIndex = clips.indexOf(clip); - const frameSource = await createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, framerateStr: '1' }); + const frameSource = await createFrameSource({ + clip, + clipIndex, + width, + height, + channels, + verbose, + logTimes, + framerateStr: "1", + }); const rgba = await frameSource.readNextFrame({ time: time - clipStartTime }); // TODO converting rgba to png can be done more easily? @@ -433,7 +551,7 @@ export async function renderSingleFrame(input: RenderSingleFrameConfig): Promise canvas.add(fabricImage); canvas.renderAll(); const internalCanvas = canvas.getNodeCanvas(); - await fsExtra.writeFile(outPath, internalCanvas.toBuffer('image/png')); + await fsExtra.writeFile(outPath, internalCanvas.toBuffer("image/png")); canvas.clear(); canvas.dispose(); await frameSource.close(); diff --git a/src/parseConfig.ts b/src/parseConfig.ts index daf8dea7..e47f488a 100644 --- a/src/parseConfig.ts +++ b/src/parseConfig.ts @@ -1,23 +1,39 @@ -import pMap from 'p-map'; -import { basename } from 'path'; -import flatMap from 'lodash-es/flatMap.js'; -import assert from 'assert'; -import { assertFileValid } from './util.js'; -import { readVideoFileInfo, readDuration } from './ffmpeg.js'; -import { registerFont } from 'canvas'; -import { calcTransition, type CalculatedTransition } from './transitions.js'; -import type { AudioTrack, CanvasLayer, FabricLayer, ImageLayer, ImageOverlayLayer, Layer, NewsTitleLayer, SlideInTextLayer, SubtitleLayer, TitleLayer, Clip, VideoLayer } from './types.js'; +import assert from "assert"; +import { registerFont } from "canvas"; +import flatMap from "lodash-es/flatMap.js"; +import pMap from "p-map"; +import { basename } from "path"; +import { readDuration, readVideoFileInfo } from "./ffmpeg.js"; +import { calcTransition, type CalculatedTransition } from "./transitions.js"; +import type { + AudioTrack, + CanvasLayer, + Clip, + FabricLayer, + ImageLayer, + ImageOverlayLayer, + Layer, + NewsTitleLayer, + SlideInTextLayer, + SubtitleLayer, + TitleLayer, + VideoLayer, +} from "./types.js"; +import { assertFileValid } from "./util.js"; export type ProcessedClip = { layers: Layer[]; duration: number; transition: CalculatedTransition; -} +}; // Cache const loadedFonts: string[] = []; -async function validateArbitraryAudio(audio: AudioTrack[] | undefined, allowRemoteRequests?: boolean) { +async function validateArbitraryAudio( + audio: AudioTrack[] | undefined, + allowRemoteRequests?: boolean, +) { assert(audio === undefined || Array.isArray(audio)); if (audio) { @@ -41,32 +57,57 @@ type ParseConfigOptions = { arbitraryAudio: AudioTrack[]; }; -export default async function parseConfig({ clips, arbitraryAudio: arbitraryAudioIn, backgroundAudioPath, backgroundAudioVolume, loopAudio, allowRemoteRequests }: ParseConfigOptions) { +export default async function parseConfig({ + clips, + arbitraryAudio: arbitraryAudioIn, + backgroundAudioPath, + backgroundAudioVolume, + loopAudio, + allowRemoteRequests, +}: ParseConfigOptions) { async function handleLayer(layer: Layer): Promise { // https://github.com/mifi/editly/issues/39 - if (layer.type === 'image' || layer.type === 'image-overlay') { - await assertFileValid((layer as (ImageOverlayLayer | ImageLayer)).path, allowRemoteRequests); - } else if (layer.type === 'gl') { + if (layer.type === "image" || layer.type === "image-overlay") { + await assertFileValid((layer as ImageOverlayLayer | ImageLayer).path, allowRemoteRequests); + } else if (layer.type === "gl") { await assertFileValid(layer.fragmentPath, allowRemoteRequests); } - if (['fabric', 'canvas'].includes(layer.type)) { - assert(typeof (layer as FabricLayer | CanvasLayer).func === 'function', '"func" must be a function'); + if (["fabric", "canvas"].includes(layer.type)) { + assert( + typeof (layer as FabricLayer | CanvasLayer).func === "function", + '"func" must be a function', + ); } - if (['image', 'image-overlay', 'fabric', 'canvas', 'gl', 'radial-gradient', 'linear-gradient', 'fill-color'].includes(layer.type)) { + if ( + [ + "image", + "image-overlay", + "fabric", + "canvas", + "gl", + "radial-gradient", + "linear-gradient", + "fill-color", + ].includes(layer.type) + ) { return layer; } - if (['title', 'subtitle', 'news-title', 'slide-in-text'].includes(layer.type)) { - const { fontPath, ...rest } = layer as TitleLayer | SubtitleLayer | NewsTitleLayer | SlideInTextLayer; - assert(rest.text, 'Please specify a text'); + if (["title", "subtitle", "news-title", "slide-in-text"].includes(layer.type)) { + const { fontPath, ...rest } = layer as + | TitleLayer + | SubtitleLayer + | NewsTitleLayer + | SlideInTextLayer; + assert(rest.text, "Please specify a text"); let { fontFamily } = rest; if (fontPath) { - fontFamily = Buffer.from(basename(fontPath)).toString('base64'); + fontFamily = Buffer.from(basename(fontPath)).toString("base64"); if (!loadedFonts.includes(fontFamily)) { - registerFont(fontPath, { family: fontFamily, weight: 'regular', style: 'normal' }); + registerFont(fontPath, { family: fontFamily, weight: "regular", style: "normal" }); loadedFonts.push(fontFamily); } } @@ -78,117 +119,149 @@ export default async function parseConfig({ clips, arbitraryAudio: arbitraryAudi const detachedAudioByClip: Record = {}; - let clipsOut: ProcessedClip[] = await pMap(clips, async (clip, clipIndex) => { - const { transition: userTransition, duration, layers } = clip; - - const videoLayers = layers.filter((layer) => layer.type === 'video'); - - if (videoLayers.length === 0) assert(duration, `Duration parameter is required for videoless clip ${clipIndex}`); - - const transition = calcTransition(userTransition, clipIndex === clips.length - 1); - - let layersOut = flatMap(await pMap(layers, async (layer: T) => { - if (layer.type === 'video') { - const { duration: fileDuration, width: widthIn, height: heightIn, framerateStr, rotation } = await readVideoFileInfo(layer.path); - let { cutFrom, cutTo } = layer; - if (!cutFrom) cutFrom = 0; - cutFrom = Math.max(cutFrom, 0); - cutFrom = Math.min(cutFrom, fileDuration); - - if (!cutTo) cutTo = fileDuration; - cutTo = Math.max(cutTo, cutFrom); - cutTo = Math.min(cutTo, fileDuration); - assert(cutFrom < cutTo, 'cutFrom must be lower than cutTo'); - - const layerDuration = cutTo - cutFrom; - - const isRotated = rotation && [-90, 90, 270, -270].includes(rotation); - const inputWidth = isRotated ? heightIn : widthIn; - const inputHeight = isRotated ? widthIn : heightIn; - - return { ...layer, cutFrom, cutTo, layerDuration, framerateStr, inputWidth, inputHeight } as T; - } - - // Audio is handled later - if (['audio', 'detached-audio'].includes(layer.type)) return layer; - - return handleLayer(layer); - }, { concurrency: 1 })); - - let clipDuration = duration; - - const firstVideoLayer = layersOut.find((layer): layer is VideoLayer => layer.type === 'video'); - if (firstVideoLayer && !duration) clipDuration = firstVideoLayer.layerDuration!; - assert(clipDuration); - - // We need to map again, because for audio, we need to know the correct clipDuration - layersOut = (await pMap(layersOut, async (layerIn: T) => { - if (!layerIn.start) layerIn.start = 0 - - // This feature allows the user to show another layer overlayed (or replacing) parts of the lower layers (start - stop) - const layerDuration = ((layerIn.stop || clipDuration) - layerIn.start); - assert(layerDuration > 0 && layerDuration <= clipDuration, `Invalid start ${layerIn.start} or stop ${layerIn.stop} (${clipDuration})`); - // TODO Also need to handle video layers (speedFactor etc) - // TODO handle audio in case of start/stop - - const layer: T = { ...layerIn, layerDuration }; - - if (layer.type === 'audio') { - const fileDuration = await readDuration(layer.path); - let { cutFrom, cutTo } = layer; - - // console.log({ cutFrom, cutTo, fileDuration, clipDuration }); - - if (!cutFrom) cutFrom = 0; - cutFrom = Math.max(cutFrom, 0); - cutFrom = Math.min(cutFrom, fileDuration); - - if (!cutTo) cutTo = cutFrom + clipDuration; - cutTo = Math.max(cutTo, cutFrom); - cutTo = Math.min(cutTo, fileDuration); - assert(cutFrom < cutTo, 'cutFrom must be lower than cutTo'); - - const layerDuration = cutTo - cutFrom; - - const speedFactor = clipDuration / layerDuration; - - return { ...layer, cutFrom, cutTo, speedFactor }; - } - - if (layer.type === 'video') { - let speedFactor; - - // If user explicitly specified duration for clip, it means that should be the output duration of the video - if (duration) { - // Later we will speed up or slow down video using this factor - speedFactor = duration / layerDuration; - } else { - speedFactor = 1; - } - - return { ...layer, speedFactor }; - } - - // These audio tracks are detached from the clips (can run over multiple clips) - // This is useful so we can have audio start relative to their parent clip's start time - if (layer.type === 'detached-audio') { - if (!detachedAudioByClip[clipIndex]) detachedAudioByClip[clipIndex] = []; - detachedAudioByClip[clipIndex].push(layer); - return undefined; // Will be filtered out - } - - return layer; - })).filter((l) => l !== undefined); - - // Filter out deleted layers - layersOut = layersOut.filter((l) => l); - - return { - transition, - duration: clipDuration, - layers: layersOut, - }; - }, { concurrency: 1 }); + let clipsOut: ProcessedClip[] = await pMap( + clips, + async (clip, clipIndex) => { + const { transition: userTransition, duration, layers } = clip; + + const videoLayers = layers.filter((layer) => layer.type === "video"); + + if (videoLayers.length === 0) + assert(duration, `Duration parameter is required for videoless clip ${clipIndex}`); + + const transition = calcTransition(userTransition, clipIndex === clips.length - 1); + + let layersOut = flatMap( + await pMap( + layers, + async (layer: T) => { + if (layer.type === "video") { + const { + duration: fileDuration, + width: widthIn, + height: heightIn, + framerateStr, + rotation, + } = await readVideoFileInfo(layer.path); + let { cutFrom, cutTo } = layer; + if (!cutFrom) cutFrom = 0; + cutFrom = Math.max(cutFrom, 0); + cutFrom = Math.min(cutFrom, fileDuration); + + if (!cutTo) cutTo = fileDuration; + cutTo = Math.max(cutTo, cutFrom); + cutTo = Math.min(cutTo, fileDuration); + assert(cutFrom < cutTo, "cutFrom must be lower than cutTo"); + + const layerDuration = cutTo - cutFrom; + + const isRotated = rotation && [-90, 90, 270, -270].includes(rotation); + const inputWidth = isRotated ? heightIn : widthIn; + const inputHeight = isRotated ? widthIn : heightIn; + + return { + ...layer, + cutFrom, + cutTo, + layerDuration, + framerateStr, + inputWidth, + inputHeight, + } as T; + } + + // Audio is handled later + if (["audio", "detached-audio"].includes(layer.type)) return layer; + + return handleLayer(layer); + }, + { concurrency: 1 }, + ), + ); + + let clipDuration = duration; + + const firstVideoLayer = layersOut.find( + (layer): layer is VideoLayer => layer.type === "video", + ); + if (firstVideoLayer && !duration) clipDuration = firstVideoLayer.layerDuration!; + assert(clipDuration); + + // We need to map again, because for audio, we need to know the correct clipDuration + layersOut = ( + await pMap(layersOut, async (layerIn: T) => { + if (!layerIn.start) layerIn.start = 0; + + // This feature allows the user to show another layer overlayed (or replacing) parts of the lower layers (start - stop) + const layerDuration = (layerIn.stop || clipDuration) - layerIn.start; + assert( + layerDuration > 0 && layerDuration <= clipDuration, + `Invalid start ${layerIn.start} or stop ${layerIn.stop} (${clipDuration})`, + ); + // TODO Also need to handle video layers (speedFactor etc) + // TODO handle audio in case of start/stop + + const layer: T = { ...layerIn, layerDuration }; + + if (layer.type === "audio") { + const fileDuration = await readDuration(layer.path); + let { cutFrom, cutTo } = layer; + + // console.log({ cutFrom, cutTo, fileDuration, clipDuration }); + + if (!cutFrom) cutFrom = 0; + cutFrom = Math.max(cutFrom, 0); + cutFrom = Math.min(cutFrom, fileDuration); + + if (!cutTo) cutTo = cutFrom + clipDuration; + cutTo = Math.max(cutTo, cutFrom); + cutTo = Math.min(cutTo, fileDuration); + assert(cutFrom < cutTo, "cutFrom must be lower than cutTo"); + + const layerDuration = cutTo - cutFrom; + + const speedFactor = clipDuration / layerDuration; + + return { ...layer, cutFrom, cutTo, speedFactor }; + } + + if (layer.type === "video") { + let speedFactor; + + // If user explicitly specified duration for clip, it means that should be the output duration of the video + if (duration) { + // Later we will speed up or slow down video using this factor + speedFactor = duration / layerDuration; + } else { + speedFactor = 1; + } + + return { ...layer, speedFactor }; + } + + // These audio tracks are detached from the clips (can run over multiple clips) + // This is useful so we can have audio start relative to their parent clip's start time + if (layer.type === "detached-audio") { + if (!detachedAudioByClip[clipIndex]) detachedAudioByClip[clipIndex] = []; + detachedAudioByClip[clipIndex].push(layer); + return undefined; // Will be filtered out + } + + return layer; + }) + ).filter((l) => l !== undefined); + + // Filter out deleted layers + layersOut = layersOut.filter((l) => l); + + return { + transition, + duration: clipDuration, + layers: layersOut, + }; + }, + { concurrency: 1 }, + ); let totalClipDuration = 0; const clipDetachedAudio: AudioTrack[] = []; @@ -203,11 +276,15 @@ export default async function parseConfig({ clips, arbitraryAudio: arbitraryAudi let safeTransitionDuration = 0; if (nextClip) { // Each clip can have two transitions, make sure we leave enough room: - safeTransitionDuration = Math.min(clip.duration / 2, nextClip.duration / 2, clip.transition!.duration!); + safeTransitionDuration = Math.min( + clip.duration / 2, + nextClip.duration / 2, + clip.transition!.duration!, + ); } // We now know all clip durations so we can calculate the offset for detached audio tracks - for (const { start, ...rest } of (detachedAudioByClip[i] || [])) { + for (const { start, ...rest } of detachedAudioByClip[i] || []) { clipDetachedAudio.push({ ...rest, start: totalClipDuration + (start || 0) }); } @@ -225,7 +302,15 @@ export default async function parseConfig({ clips, arbitraryAudio: arbitraryAudi // Audio can either come from `audioFilePath`, `audio` or from "detached" audio layers from clips const arbitraryAudio = [ // Background audio is treated just like arbitrary audio - ...(backgroundAudioPath ? [{ path: backgroundAudioPath, mixVolume: backgroundAudioVolume != null ? backgroundAudioVolume : 1, loop: loopAudio ? -1 : 0 }] : []), + ...(backgroundAudioPath + ? [ + { + path: backgroundAudioPath, + mixVolume: backgroundAudioVolume != null ? backgroundAudioVolume : 1, + loop: loopAudio ? -1 : 0, + }, + ] + : []), ...arbitraryAudioIn, ...clipDetachedAudio, ]; diff --git a/src/sources/canvas.ts b/src/sources/canvas.ts index 23ceab1a..9f9c32e5 100644 --- a/src/sources/canvas.ts +++ b/src/sources/canvas.ts @@ -1,13 +1,13 @@ -import { createCanvas } from 'canvas'; -import type { CanvasLayer } from '../types.js'; -import { canvasToRgba } from './fabric.js'; -import { defineFrameSource } from '../api/index.js'; +import { createCanvas } from "canvas"; +import { defineFrameSource } from "../api/index.js"; +import type { CanvasLayer } from "../types.js"; +import { canvasToRgba } from "./fabric.js"; -export default defineFrameSource('canvas', async ({ width, height, params }) => { +export default defineFrameSource("canvas", async ({ width, height, params }) => { const canvas = createCanvas(width, height); - const context = canvas.getContext('2d'); + const context = canvas.getContext("2d"); - const { onClose, onRender } = await params.func(({ width, height, canvas })); + const { onClose, onRender } = await params.func({ width, height, canvas }); async function readNextFrame(progress: number) { context.clearRect(0, 0, canvas.width, canvas.height); diff --git a/src/sources/fabric.ts b/src/sources/fabric.ts index 459c286d..3351fb55 100644 --- a/src/sources/fabric.ts +++ b/src/sources/fabric.ts @@ -1,8 +1,8 @@ -import * as fabric from 'fabric/node'; -import { type CanvasRenderingContext2D, createCanvas, ImageData } from 'canvas'; -import { boxBlurImage } from '../BoxBlur.js'; -import { defineFrameSource } from '../api/index.js'; -import type { FabricLayer } from '../types.js'; +import { type CanvasRenderingContext2D, createCanvas, ImageData } from "canvas"; +import * as fabric from "fabric/node"; +import { boxBlurImage } from "../BoxBlur.js"; +import { defineFrameSource } from "../api/index.js"; +import type { FabricLayer } from "../types.js"; // Fabric is used as a fundament for compositing layers in editly @@ -16,12 +16,12 @@ export function canvasToRgba(ctx: CanvasRenderingContext2D) { export function fabricCanvasToRgba(fabricCanvas: fabric.StaticCanvas) { const internalCanvas = fabricCanvas.getNodeCanvas(); - const ctx = internalCanvas.getContext('2d'); + const ctx = internalCanvas.getContext("2d"); return canvasToRgba(ctx); } -export function createFabricCanvas({ width, height }: { width: number, height: number }) { +export function createFabricCanvas({ width, height }: { width: number; height: number }) { return new fabric.StaticCanvas(null, { width, height }); } @@ -46,7 +46,15 @@ export function toUint8ClampedArray(buffer: Buffer) { return data; } -export async function rgbaToFabricImage({ width, height, rgba }: { width: number, height: number, rgba: Buffer }) { +export async function rgbaToFabricImage({ + width, + height, + rgba, +}: { + width: number; + height: number; + rgba: Buffer; +}) { const canvas = createCanvas(width, height); // FIXME: Fabric tries to add a class to this, but DOM is not defined. Because node? @@ -54,7 +62,7 @@ export async function rgbaToFabricImage({ width, height, rgba }: { width: number // eslint-disable-next-line @typescript-eslint/no-explicit-any (canvas as any).classList = new Set(); - const ctx = canvas.getContext('2d'); + const ctx = canvas.getContext("2d"); // https://developer.mozilla.org/en-US/docs/Web/API/ImageData/ImageData // https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/putImageData ctx.putImageData(new ImageData(toUint8ClampedArray(rgba), width, height), 0, 0); @@ -63,29 +71,29 @@ export async function rgbaToFabricImage({ width, height, rgba }: { width: number } export type BlurImageOptions = { - mutableImg: fabric.FabricImage, - width: number, - height: number, -} + mutableImg: fabric.FabricImage; + width: number; + height: number; +}; export async function blurImage({ mutableImg, width, height }: BlurImageOptions) { mutableImg.set({ scaleX: width / mutableImg.width, scaleY: height / mutableImg.height }); const canvas = mutableImg.toCanvasElement(); - const ctx = canvas.getContext('2d'); + const ctx = canvas.getContext("2d"); const blurAmount = Math.min(100, Math.max(width, height) / 10); // More than 100 seems to cause issues const passes = 1; boxBlurImage(ctx, width, height, blurAmount, false, passes); return new fabric.FabricImage(canvas); -}// http://fabricjs.com/kitchensink +} // http://fabricjs.com/kitchensink -export default defineFrameSource('fabric', async ({ width, height, params }) => { - const { onRender, onClose } = await params.func(({ width, height, fabric, params })); +export default defineFrameSource("fabric", async ({ width, height, params }) => { + const { onRender, onClose } = await params.func({ width, height, fabric, params }); return { readNextFrame: onRender, - close: onClose - } -}) + close: onClose, + }; +}); diff --git a/src/sources/fill-color.ts b/src/sources/fill-color.ts index 51e7aa94..604bff57 100644 --- a/src/sources/fill-color.ts +++ b/src/sources/fill-color.ts @@ -1,23 +1,26 @@ -import { Rect } from 'fabric/node'; -import { getRandomColors } from '../colors.js'; -import type { FillColorLayer } from '../types.js'; -import { defineFrameSource } from '../api/index.js'; +import { Rect } from "fabric/node"; +import { defineFrameSource } from "../api/index.js"; +import { getRandomColors } from "../colors.js"; +import type { FillColorLayer } from "../types.js"; -export default defineFrameSource('fill-color', async ({ params, width, height }) => { - const { color } = params; +export default defineFrameSource( + "fill-color", + async ({ params, width, height }) => { + const { color } = params; - const randomColor = getRandomColors(1)[0]; + const randomColor = getRandomColors(1)[0]; - return { - async readNextFrame(_, canvas) { - const rect = new Rect({ - left: 0, - right: 0, - width, - height, - fill: color || randomColor, - }); - canvas.add(rect); - } - }; -}); + return { + async readNextFrame(_, canvas) { + const rect = new Rect({ + left: 0, + right: 0, + width, + height, + fill: color || randomColor, + }); + canvas.add(rect); + }, + }; + }, +); diff --git a/src/sources/gl.ts b/src/sources/gl.ts index e2231257..44feb5de 100644 --- a/src/sources/gl.ts +++ b/src/sources/gl.ts @@ -1,12 +1,12 @@ -import GL from 'gl'; -import createShader from 'gl-shader'; -import { readFile } from 'node:fs/promises'; -import type { GlLayer } from '../types.js'; -import { defineFrameSource } from '../api/index.js'; +import GL from "gl"; +import createShader from "gl-shader"; +import { readFile } from "node:fs/promises"; +import { defineFrameSource } from "../api/index.js"; +import type { GlLayer } from "../types.js"; // I have no idea what I'm doing but it works ¯\_(ツ)_/¯ -export default defineFrameSource('gl', async ({ width, height, channels, params }) => { +export default defineFrameSource("gl", async ({ width, height, channels, params }) => { const gl = GL(width, height); const defaultVertexSrc = ` @@ -20,7 +20,7 @@ export default defineFrameSource('gl', async ({ width, height, channels fragmentPath, vertexSrc: vertexSrcIn, fragmentSrc: fragmentSrcIn, - speed = 1 + speed = 1, } = params; let fragmentSrc = fragmentSrcIn; @@ -31,7 +31,7 @@ export default defineFrameSource('gl', async ({ width, height, channels if (!vertexSrc) vertexSrc = defaultVertexSrc; - const shader = createShader(gl, vertexSrc, fragmentSrc ?? ''); + const shader = createShader(gl, vertexSrc, fragmentSrc ?? ""); const buffer = gl.createBuffer(); gl.bindBuffer(gl.ARRAY_BUFFER, buffer); // https://blog.mayflower.de/4584-Playing-around-with-pixel-shaders-in-WebGL.html @@ -63,6 +63,6 @@ export default defineFrameSource('gl', async ({ width, height, channels } return { - readNextFrame + readNextFrame, }; }); diff --git a/src/sources/image-overlay.ts b/src/sources/image-overlay.ts index 09efedf3..88e61b22 100644 --- a/src/sources/image-overlay.ts +++ b/src/sources/image-overlay.ts @@ -1,33 +1,42 @@ -import * as fabric from 'fabric/node'; -import type { ImageOverlayLayer } from '../types.js'; -import { loadImage, getPositionProps, getZoomParams, getTranslationParams } from '../util.js'; -import { defineFrameSource } from '../api/index.js'; - -export default defineFrameSource('image-overlay', async ({ params, width, height }) => { - const { path, position, width: relWidth, height: relHeight, zoomDirection, zoomAmount = 0.1 } = params; - - const imgData = await loadImage(path); - - - const img = new fabric.FabricImage(imgData, getPositionProps({ position, width, height })); - - return { - async readNextFrame(progress, canvas) { - const scaleFactor = getZoomParams({ progress, zoomDirection, zoomAmount }); - - const translationParams = getTranslationParams({ progress, zoomDirection, zoomAmount }); - img.left = width / 2 + translationParams; - - if (relWidth != null) { - img.scaleToWidth(relWidth * width * scaleFactor); - } else if (relHeight != null) { - img.scaleToHeight(relHeight * height * scaleFactor); - } else { - // Default to screen width - img.scaleToWidth(width * scaleFactor); - } - - canvas.add(img); - } - }; -}); +import * as fabric from "fabric/node"; +import { defineFrameSource } from "../api/index.js"; +import type { ImageOverlayLayer } from "../types.js"; +import { getPositionProps, getTranslationParams, getZoomParams, loadImage } from "../util.js"; + +export default defineFrameSource( + "image-overlay", + async ({ params, width, height }) => { + const { + path, + position, + width: relWidth, + height: relHeight, + zoomDirection, + zoomAmount = 0.1, + } = params; + + const imgData = await loadImage(path); + + const img = new fabric.FabricImage(imgData, getPositionProps({ position, width, height })); + + return { + async readNextFrame(progress, canvas) { + const scaleFactor = getZoomParams({ progress, zoomDirection, zoomAmount }); + + const translationParams = getTranslationParams({ progress, zoomDirection, zoomAmount }); + img.left = width / 2 + translationParams; + + if (relWidth != null) { + img.scaleToWidth(relWidth * width * scaleFactor); + } else if (relHeight != null) { + img.scaleToHeight(relHeight * height * scaleFactor); + } else { + // Default to screen width + img.scaleToWidth(width * scaleFactor); + } + + canvas.add(img); + }, + }; + }, +); diff --git a/src/sources/image.ts b/src/sources/image.ts index 72dbf71e..109f6054 100644 --- a/src/sources/image.ts +++ b/src/sources/image.ts @@ -1,67 +1,74 @@ -import { FabricImage } from 'fabric/node'; -import { blurImage } from './fabric.js'; -import { getZoomParams, getTranslationParams, loadImage } from '../util.js'; -import { defineFrameSource } from '../api/index.js'; -import type { ImageLayer } from '../types.js'; +import { FabricImage } from "fabric/node"; +import { defineFrameSource } from "../api/index.js"; +import type { ImageLayer } from "../types.js"; +import { getTranslationParams, getZoomParams, loadImage } from "../util.js"; +import { blurImage } from "./fabric.js"; -export default defineFrameSource('image', async ({ verbose, params, width, height }) => { - const { path, zoomDirection = 'in', zoomAmount = 0.1, resizeMode = 'contain-blur' } = params; +export default defineFrameSource( + "image", + async ({ verbose, params, width, height }) => { + const { path, zoomDirection = "in", zoomAmount = 0.1, resizeMode = "contain-blur" } = params; - if (verbose) console.log('Loading', path); + if (verbose) console.log("Loading", path); - const imgData = await loadImage(path); + const imgData = await loadImage(path); - const createImg = () => new FabricImage(imgData, { - originX: 'center', - originY: 'center', - left: width / 2, - top: height / 2, - }); + const createImg = () => + new FabricImage(imgData, { + originX: "center", + originY: "center", + left: width / 2, + top: height / 2, + }); - let blurredImg: FabricImage; - // Blurred version - if (resizeMode === 'contain-blur') { - // If we dispose mutableImg, seems to cause issues with the rendering of blurredImg - const mutableImg = createImg(); - if (verbose) console.log('Blurring background'); - blurredImg = await blurImage({ mutableImg, width, height }); - } + let blurredImg: FabricImage; + // Blurred version + if (resizeMode === "contain-blur") { + // If we dispose mutableImg, seems to cause issues with the rendering of blurredImg + const mutableImg = createImg(); + if (verbose) console.log("Blurring background"); + blurredImg = await blurImage({ mutableImg, width, height }); + } - return { - async readNextFrame(progress, canvas) { - const img = createImg(); + return { + async readNextFrame(progress, canvas) { + const img = createImg(); - const scaleFactor = getZoomParams({ progress, zoomDirection, zoomAmount }); - const translationParams = getTranslationParams({ progress, zoomDirection, zoomAmount }); + const scaleFactor = getZoomParams({ progress, zoomDirection, zoomAmount }); + const translationParams = getTranslationParams({ progress, zoomDirection, zoomAmount }); - const ratioW = width / img.width; - const ratioH = height / img.height; + const ratioW = width / img.width; + const ratioH = height / img.height; - img.left = width / 2 + translationParams; + img.left = width / 2 + translationParams; - if (['contain', 'contain-blur'].includes(resizeMode)) { - if (ratioW > ratioH) { - img.scaleToHeight(height * scaleFactor); - } else { - img.scaleToWidth(width * scaleFactor); - } - } else if (resizeMode === 'cover') { - if (ratioW > ratioH) { - img.scaleToWidth(width * scaleFactor); - } else { - img.scaleToHeight(height * scaleFactor); + if (["contain", "contain-blur"].includes(resizeMode)) { + if (ratioW > ratioH) { + img.scaleToHeight(height * scaleFactor); + } else { + img.scaleToWidth(width * scaleFactor); + } + } else if (resizeMode === "cover") { + if (ratioW > ratioH) { + img.scaleToWidth(width * scaleFactor); + } else { + img.scaleToHeight(height * scaleFactor); + } + } else if (resizeMode === "stretch") { + img.set({ + scaleX: (width / img.width) * scaleFactor, + scaleY: (height / img.height) * scaleFactor, + }); } - } else if (resizeMode === 'stretch') { - img.set({ scaleX: (width / img.width) * scaleFactor, scaleY: (height / img.height) * scaleFactor }); - } - if (blurredImg) canvas.add(blurredImg); - canvas.add(img); - }, + if (blurredImg) canvas.add(blurredImg); + canvas.add(img); + }, - close() { - if (blurredImg) blurredImg.dispose(); - // imgData.dispose(); - } - }; -}); + close() { + if (blurredImg) blurredImg.dispose(); + // imgData.dispose(); + }, + }; + }, +); diff --git a/src/sources/index.ts b/src/sources/index.ts index 617c8f33..b9837f4f 100644 --- a/src/sources/index.ts +++ b/src/sources/index.ts @@ -1,25 +1,32 @@ -import assert from 'assert'; +import assert from "assert"; -import canvas from './canvas.js'; -import fabric from './fabric.js'; -import fillColor from './fill-color.js'; -import gl from './gl.js'; -import imageOverlay from './image-overlay.js'; -import image from './image.js'; -import linearGradient from './linear-gradient.js'; -import newsTitle from './news-title.js'; -import radialGradient from './radial-gradient.js'; -import slideInText from './slide-in-text.js'; -import subtitle from './subtitle.js'; -import title from './title.js'; -import video from './video.js'; -import { join } from 'path'; -import { fileURLToPath } from 'url'; +import { join } from "path"; +import { fileURLToPath } from "url"; +import canvas from "./canvas.js"; +import fabric from "./fabric.js"; +import fillColor from "./fill-color.js"; +import gl from "./gl.js"; +import imageOverlay from "./image-overlay.js"; +import image from "./image.js"; +import linearGradient from "./linear-gradient.js"; +import newsTitle from "./news-title.js"; +import radialGradient from "./radial-gradient.js"; +import slideInText from "./slide-in-text.js"; +import subtitle from "./subtitle.js"; +import title from "./title.js"; +import video from "./video.js"; -import type { CreateFrameSourceOptions, FrameSourceFactory } from '../api/index.js'; -import type { BaseLayer, FillColorLayer, GlLayer, Layer, LinearGradientLayer, TitleLayer } from '../types.js'; +import type { CreateFrameSourceOptions, FrameSourceFactory } from "../api/index.js"; +import type { + BaseLayer, + FillColorLayer, + GlLayer, + Layer, + LinearGradientLayer, + TitleLayer, +} from "../types.js"; -const dirname = fileURLToPath(new URL('..', import.meta.url)); +const dirname = fileURLToPath(new URL("..", import.meta.url)); const sources = [ canvas, @@ -39,40 +46,43 @@ const sources = [ export async function createLayerSource(options: CreateFrameSourceOptions) { const layer = options.params; - const source = sources.find(({ type }) => type == layer.type) as FrameSourceFactory | undefined; + const source = sources.find(({ type }) => type == layer.type) as + | FrameSourceFactory + | undefined; assert(source, `Invalid type ${layer.type}`); return await source.setup(options); } export function expandLayerAliases(params: Layer): Layer[] { - if (params.type === 'editly-banner') { + if (params.type === "editly-banner") { return [ - { type: 'linear-gradient' } as LinearGradientLayer, - { ...params, type: 'title', text: 'Made with\nEDITLY\nmifi.no' } as TitleLayer, + { type: "linear-gradient" } as LinearGradientLayer, + { ...params, type: "title", text: "Made with\nEDITLY\nmifi.no" } as TitleLayer, ]; } - if (params.type === 'title-background') { - const backgroundTypes: ('radial-gradient' | 'linear-gradient' | 'fill-color')[] = ['radial-gradient', 'linear-gradient', 'fill-color']; + if (params.type === "title-background") { + const backgroundTypes: ("radial-gradient" | "linear-gradient" | "fill-color")[] = [ + "radial-gradient", + "linear-gradient", + "fill-color", + ]; const { background = { type: backgroundTypes[Math.floor(Math.random() * backgroundTypes.length)] }, ...title } = params; - return [ - background, - { ...title, type: 'title' }, - ]; + return [background, { ...title, type: "title" }]; } // TODO if random-background radial-gradient linear etc - if (params.type === 'pause') { - return [{ ...params, type: 'fill-color' } as FillColorLayer]; + if (params.type === "pause") { + return [{ ...params, type: "fill-color" } as FillColorLayer]; } - if (params.type === 'rainbow-colors') { - return [{ type: 'gl', fragmentPath: join(dirname, 'shaders/rainbow-colors.frag') } as GlLayer]; + if (params.type === "rainbow-colors") { + return [{ type: "gl", fragmentPath: join(dirname, "shaders/rainbow-colors.frag") } as GlLayer]; } return [params]; } diff --git a/src/sources/linear-gradient.ts b/src/sources/linear-gradient.ts index 09c92b34..65650f32 100644 --- a/src/sources/linear-gradient.ts +++ b/src/sources/linear-gradient.ts @@ -1,32 +1,38 @@ -import { Gradient } from 'fabric/node'; -import { getRandomGradient } from '../colors.js'; -import type { LinearGradientLayer } from '../types.js'; -import { getRekt } from '../util.js'; -import { defineFrameSource } from '../api/index.js'; +import { Gradient } from "fabric/node"; +import { defineFrameSource } from "../api/index.js"; +import { getRandomGradient } from "../colors.js"; +import type { LinearGradientLayer } from "../types.js"; +import { getRekt } from "../util.js"; -export default defineFrameSource('linear-gradient', async ({ width, height, params }) => { - const { colors: inColors } = params; - const colors = inColors && inColors.length === 2 ? inColors : getRandomGradient(); +export default defineFrameSource( + "linear-gradient", + async ({ width, height, params }) => { + const { colors: inColors } = params; + const colors = inColors && inColors.length === 2 ? inColors : getRandomGradient(); - return { - async readNextFrame(progress, canvas) { - const rect = getRekt(width, height); + return { + async readNextFrame(progress, canvas) { + const rect = getRekt(width, height); - rect.set('fill', new Gradient({ - coords: { - x1: 0, - y1: 0, - x2: width, - y2: height, - }, - colorStops: [ - { offset: 0, color: colors[0] }, - { offset: 1, color: colors[1] }, - ], - })); + rect.set( + "fill", + new Gradient({ + coords: { + x1: 0, + y1: 0, + x2: width, + y2: height, + }, + colorStops: [ + { offset: 0, color: colors[0] }, + { offset: 1, color: colors[1] }, + ], + }), + ); - rect.rotate(progress * 30); - canvas.add(rect); - } - }; -}); + rect.rotate(progress * 30); + canvas.add(rect); + }, + }; + }, +); diff --git a/src/sources/news-title.ts b/src/sources/news-title.ts index 54730e54..34ba96d5 100644 --- a/src/sources/news-title.ts +++ b/src/sources/news-title.ts @@ -1,46 +1,62 @@ -import { Rect, FabricText } from 'fabric/node'; -import { easeOutExpo } from '../transitions.js'; -import type { NewsTitleLayer } from '../types.js'; -import { defaultFontFamily } from '../util.js'; -import { defineFrameSource } from '../api/index.js'; +import { FabricText, Rect } from "fabric/node"; +import { defineFrameSource } from "../api/index.js"; +import { easeOutExpo } from "../transitions.js"; +import type { NewsTitleLayer } from "../types.js"; +import { defaultFontFamily } from "../util.js"; -export default defineFrameSource('news-title', async ({ width, height, params }) => { - const { text, textColor = '#ffffff', backgroundColor = '#d02a42', fontFamily = defaultFontFamily, delay = 0, speed = 1 } = params; - const min = Math.min(width, height); - const fontSize = Math.round(min * 0.05); +export default defineFrameSource( + "news-title", + async ({ width, height, params }) => { + const { + text, + textColor = "#ffffff", + backgroundColor = "#d02a42", + fontFamily = defaultFontFamily, + delay = 0, + speed = 1, + } = params; + const min = Math.min(width, height); + const fontSize = Math.round(min * 0.05); - return { - async readNextFrame(progress, canvas) { - const easedBgProgress = easeOutExpo(Math.max(0, Math.min((progress - delay) * speed * 3, 1))); - const easedTextProgress = easeOutExpo(Math.max(0, Math.min((progress - delay - 0.02) * speed * 4, 1))); - const easedTextOpacityProgress = easeOutExpo(Math.max(0, Math.min((progress - delay - 0.07) * speed * 4, 1))); + return { + async readNextFrame(progress, canvas) { + const easedBgProgress = easeOutExpo( + Math.max(0, Math.min((progress - delay) * speed * 3, 1)), + ); + const easedTextProgress = easeOutExpo( + Math.max(0, Math.min((progress - delay - 0.02) * speed * 4, 1)), + ); + const easedTextOpacityProgress = easeOutExpo( + Math.max(0, Math.min((progress - delay - 0.07) * speed * 4, 1)), + ); - const top = height * 0.08; + const top = height * 0.08; - const paddingV = 0.07 * min; - const paddingH = 0.03 * min; + const paddingV = 0.07 * min; + const paddingH = 0.03 * min; - const textBox = new FabricText(text, { - top, - left: paddingV + (easedTextProgress - 1) * width, - fill: textColor, - opacity: easedTextOpacityProgress, - fontFamily, - fontSize, - charSpacing: width * 0.1, - }); + const textBox = new FabricText(text, { + top, + left: paddingV + (easedTextProgress - 1) * width, + fill: textColor, + opacity: easedTextOpacityProgress, + fontFamily, + fontSize, + charSpacing: width * 0.1, + }); - const bgWidth = textBox.width + (paddingV * 2); - const rect = new Rect({ - top: top - paddingH, - left: (easedBgProgress - 1) * bgWidth, - width: bgWidth, - height: textBox.height + (paddingH * 2), - fill: backgroundColor, - }); + const bgWidth = textBox.width + paddingV * 2; + const rect = new Rect({ + top: top - paddingH, + left: (easedBgProgress - 1) * bgWidth, + width: bgWidth, + height: textBox.height + paddingH * 2, + fill: backgroundColor, + }); - canvas.add(rect); - canvas.add(textBox); - } - }; -}); + canvas.add(rect); + canvas.add(textBox); + }, + }; + }, +); diff --git a/src/sources/radial-gradient.ts b/src/sources/radial-gradient.ts index fb35f77c..1bfd36b7 100644 --- a/src/sources/radial-gradient.ts +++ b/src/sources/radial-gradient.ts @@ -1,45 +1,50 @@ -import * as fabric from 'fabric/node'; -import { getRandomGradient } from '../colors.js'; -import type { RadialGradientLayer } from '../types.js'; -import { getRekt } from '../util.js'; -import { defineFrameSource } from '../api/index.js'; - -export default defineFrameSource('radial-gradient', async ({ width, height, params }) => { - const { colors: inColors } = params; - - const colors = inColors && inColors.length === 2 ? inColors : getRandomGradient(); - - return { - async readNextFrame(progress, canvas) { - // console.log('progress', progress); - const max = Math.max(width, height); - - - const r1 = 0; - const r2 = max * (1 + progress) * 0.6; - - const rect = getRekt(width, height); - - const cx = 0.5 * rect.width; - const cy = 0.5 * rect.height; - - rect.set('fill', new fabric.Gradient({ - type: 'radial', - coords: { - r1, - r2, - x1: cx, - y1: cy, - x2: cx, - y2: cy, - }, - colorStops: [ - { offset: 0, color: colors[0] }, - { offset: 1, color: colors[1] }, - ], - })); - - canvas.add(rect); - } - }; -}); +import * as fabric from "fabric/node"; +import { defineFrameSource } from "../api/index.js"; +import { getRandomGradient } from "../colors.js"; +import type { RadialGradientLayer } from "../types.js"; +import { getRekt } from "../util.js"; + +export default defineFrameSource( + "radial-gradient", + async ({ width, height, params }) => { + const { colors: inColors } = params; + + const colors = inColors && inColors.length === 2 ? inColors : getRandomGradient(); + + return { + async readNextFrame(progress, canvas) { + // console.log('progress', progress); + const max = Math.max(width, height); + + const r1 = 0; + const r2 = max * (1 + progress) * 0.6; + + const rect = getRekt(width, height); + + const cx = 0.5 * rect.width; + const cy = 0.5 * rect.height; + + rect.set( + "fill", + new fabric.Gradient({ + type: "radial", + coords: { + r1, + r2, + x1: cx, + y1: cy, + x2: cx, + y2: cy, + }, + colorStops: [ + { offset: 0, color: colors[0] }, + { offset: 1, color: colors[1] }, + ], + }), + ); + + canvas.add(rect); + }, + }; + }, +); diff --git a/src/sources/slide-in-text.ts b/src/sources/slide-in-text.ts index c936920f..67b7e409 100644 --- a/src/sources/slide-in-text.ts +++ b/src/sources/slide-in-text.ts @@ -1,51 +1,74 @@ -import * as fabric from 'fabric/node'; -import { easeInOutCubic } from '../transitions.js'; -import type { SlideInTextLayer } from '../types.js'; -import { getPositionProps, getFrameByKeyFrames, defaultFontFamily } from '../util.js'; -import { defineFrameSource } from '../api/index.js'; +import * as fabric from "fabric/node"; +import { defineFrameSource } from "../api/index.js"; +import { easeInOutCubic } from "../transitions.js"; +import type { SlideInTextLayer } from "../types.js"; +import { defaultFontFamily, getFrameByKeyFrames, getPositionProps } from "../util.js"; -export default defineFrameSource('slide-in-text', async ({ width, height, params }) => { - const { position, text, fontSize = 0.05, charSpacing = 0.1, textColor = '#ffffff', color = undefined, fontFamily = defaultFontFamily } = params; +export default defineFrameSource( + "slide-in-text", + async ({ width, height, params }) => { + const { + position, + text, + fontSize = 0.05, + charSpacing = 0.1, + textColor = "#ffffff", + color = undefined, + fontFamily = defaultFontFamily, + } = params; - if (color) { - console.warn('slide-in-text: color is deprecated, use textColor.'); - } + if (color) { + console.warn("slide-in-text: color is deprecated, use textColor."); + } - const fontSizeAbs = Math.round(width * fontSize); + const fontSizeAbs = Math.round(width * fontSize); - const { left, top, originX, originY } = getPositionProps({ position, width, height }); + const { left, top, originX, originY } = getPositionProps({ position, width, height }); - return { - async readNextFrame(progress, canvas) { - const textBox = new fabric.FabricText(text, { - fill: color ?? textColor, - fontFamily, - fontSize: fontSizeAbs, - charSpacing: width * charSpacing, - }); + return { + async readNextFrame(progress, canvas) { + const textBox = new fabric.FabricText(text, { + fill: color ?? textColor, + fontFamily, + fontSize: fontSizeAbs, + charSpacing: width * charSpacing, + }); - const { opacity, textSlide } = getFrameByKeyFrames([ - { t: 0.1, props: { opacity: 1, textSlide: 0 } }, - { t: 0.3, props: { opacity: 1, textSlide: 1 } }, - { t: 0.8, props: { opacity: 1, textSlide: 1 } }, - { t: 0.9, props: { opacity: 0, textSlide: 1 } }, - ], progress); + const { opacity, textSlide } = getFrameByKeyFrames( + [ + { t: 0.1, props: { opacity: 1, textSlide: 0 } }, + { t: 0.3, props: { opacity: 1, textSlide: 1 } }, + { t: 0.8, props: { opacity: 1, textSlide: 1 } }, + { t: 0.9, props: { opacity: 0, textSlide: 1 } }, + ], + progress, + ); - const fadedObject = await getFadedObject({ object: textBox, progress: easeInOutCubic(textSlide) }); - fadedObject.set({ - originX, - originY, - top, - left, - opacity, - }); + const fadedObject = await getFadedObject({ + object: textBox, + progress: easeInOutCubic(textSlide), + }); + fadedObject.set({ + originX, + originY, + top, + left, + opacity, + }); - canvas.add(fadedObject); - } - }; -}); + canvas.add(fadedObject); + }, + }; + }, +); -async function getFadedObject({ object, progress }: { object: T; progress: number; }) { +async function getFadedObject({ + object, + progress, +}: { + object: T; + progress: number; +}) { const rect = new fabric.Rect({ left: 0, width: object.width, @@ -53,26 +76,31 @@ async function getFadedObject({ object, progress top: 0, }); - rect.set('fill', new fabric.Gradient({ - coords: { - x1: 0, - y1: 0, - x2: object.width, - y2: 0, - }, - colorStops: [ - { offset: Math.max(0, (progress * (1 + 0.2)) - 0.2), color: 'rgba(255,255,255,1)' }, - { offset: Math.min(1, (progress * (1 + 0.2))), color: 'rgba(255,255,255,0)' }, - ], - })); + rect.set( + "fill", + new fabric.Gradient({ + coords: { + x1: 0, + y1: 0, + x2: object.width, + y2: 0, + }, + colorStops: [ + { offset: Math.max(0, progress * (1 + 0.2) - 0.2), color: "rgba(255,255,255,1)" }, + { offset: Math.min(1, progress * (1 + 0.2)), color: "rgba(255,255,255,0)" }, + ], + }), + ); const gradientMaskImg = rect.cloneAsImage({}); const fadedImage = object.cloneAsImage({}); - fadedImage.filters.push(new fabric.filters.BlendImage({ - image: gradientMaskImg, - mode: 'multiply', - })); + fadedImage.filters.push( + new fabric.filters.BlendImage({ + image: gradientMaskImg, + mode: "multiply", + }), + ); fadedImage.applyFilters(); diff --git a/src/sources/subtitle.ts b/src/sources/subtitle.ts index c55281af..c406a278 100644 --- a/src/sources/subtitle.ts +++ b/src/sources/subtitle.ts @@ -1,11 +1,18 @@ -import { Rect, Textbox } from 'fabric/node'; -import { easeOutExpo } from '../transitions.js'; -import type { SubtitleLayer } from '../types.js'; -import { defaultFontFamily } from '../util.js'; -import { defineFrameSource } from '../api/index.js'; +import { Rect, Textbox } from "fabric/node"; +import { defineFrameSource } from "../api/index.js"; +import { easeOutExpo } from "../transitions.js"; +import type { SubtitleLayer } from "../types.js"; +import { defaultFontFamily } from "../util.js"; -export default defineFrameSource('subtitle', async ({ width, height, params }) => { - const { text, textColor = '#ffffff', backgroundColor = 'rgba(0,0,0,0.3)', fontFamily = defaultFontFamily, delay = 0, speed = 1 } = params; +export default defineFrameSource("subtitle", async ({ width, height, params }) => { + const { + text, + textColor = "#ffffff", + backgroundColor = "rgba(0,0,0,0.3)", + fontFamily = defaultFontFamily, + delay = 0, + speed = 1, + } = params; return { async readNextFrame(progress, canvas) { @@ -19,11 +26,11 @@ export default defineFrameSource('subtitle', async ({ width, heig fontFamily, fontSize: min / 20, - textAlign: 'left', + textAlign: "left", width: width - padding * 2, - originX: 'center', - originY: 'bottom', - left: (width / 2) + (-1 + easedProgress) * padding, + originX: "center", + originY: "bottom", + left: width / 2 + (-1 + easedProgress) * padding, top: height - padding, opacity: easedProgress, }); @@ -33,13 +40,13 @@ export default defineFrameSource('subtitle', async ({ width, heig width, height: textBox.height + padding * 2, top: height, - originY: 'bottom', + originY: "bottom", fill: backgroundColor, opacity: easedProgress, }); canvas.add(rect); canvas.add(textBox); - } - } + }, + }; }); diff --git a/src/sources/title.ts b/src/sources/title.ts index 2c114308..d6298413 100644 --- a/src/sources/title.ts +++ b/src/sources/title.ts @@ -1,18 +1,29 @@ -import { Textbox } from 'fabric/node'; -import type { TitleLayer } from '../types.js'; -import { getPositionProps } from '../util.js'; -import { defaultFontFamily, getZoomParams, getTranslationParams } from '../util.js'; -import { defineFrameSource } from '../api//index.js'; +import { Textbox } from "fabric/node"; +import { defineFrameSource } from "../api//index.js"; +import type { TitleLayer } from "../types.js"; +import { + defaultFontFamily, + getPositionProps, + getTranslationParams, + getZoomParams, +} from "../util.js"; -export default defineFrameSource('title', async ({ width, height, params }) => { - const { text, textColor = '#ffffff', fontFamily = defaultFontFamily, position = 'center', zoomDirection = 'in', zoomAmount = 0.2 } = params; +export default defineFrameSource("title", async ({ width, height, params }) => { + const { + text, + textColor = "#ffffff", + fontFamily = defaultFontFamily, + position = "center", + zoomDirection = "in", + zoomAmount = 0.2, + } = params; const fontSize = Math.round(Math.min(width, height) * 0.1); const textBox = new Textbox(text, { fill: textColor, fontFamily, fontSize, - textAlign: 'center', + textAlign: "center", width: width * 0.8, }); @@ -36,6 +47,6 @@ export default defineFrameSource('title', async ({ width, height, pa }); canvas.add(textImage); - } + }, }; }); diff --git a/src/sources/video.ts b/src/sources/video.ts index 8f71703f..2168759b 100644 --- a/src/sources/video.ts +++ b/src/sources/video.ts @@ -1,15 +1,44 @@ -import assert from 'assert'; -import * as fabric from 'fabric/node'; -import { ffmpeg, readFileStreams } from '../ffmpeg.js'; -import { rgbaToFabricImage, blurImage } from './fabric.js'; -import { defineFrameSource } from '../api/index.js'; -import type { VideoLayer } from '../types.js'; - -export default defineFrameSource('video', async ({ width: canvasWidth, height: canvasHeight, channels, framerateStr, verbose, logTimes, params }) => { - const { path, cutFrom, cutTo, resizeMode = 'contain-blur', speedFactor, inputWidth, inputHeight, width: requestedWidthRel, height: requestedHeightRel, left: leftRel = 0, top: topRel = 0, originX = 'left', originY = 'top', fabricImagePostProcessing = null } = params; - - const requestedWidth = requestedWidthRel ? Math.round(requestedWidthRel * canvasWidth) : canvasWidth; - const requestedHeight = requestedHeightRel ? Math.round(requestedHeightRel * canvasHeight) : canvasHeight; +import assert from "assert"; +import * as fabric from "fabric/node"; +import { defineFrameSource } from "../api/index.js"; +import { ffmpeg, readFileStreams } from "../ffmpeg.js"; +import type { VideoLayer } from "../types.js"; +import { blurImage, rgbaToFabricImage } from "./fabric.js"; + +export default defineFrameSource("video", async (options) => { + const { + width: canvasWidth, + height: canvasHeight, + channels, + framerateStr, + verbose, + logTimes, + params, + } = options; + + const { + path, + cutFrom, + cutTo, + resizeMode = "contain-blur", + speedFactor, + inputWidth, + inputHeight, + width: requestedWidthRel, + height: requestedHeightRel, + left: leftRel = 0, + top: topRel = 0, + originX = "left", + originY = "top", + fabricImagePostProcessing = null, + } = params; + + const requestedWidth = requestedWidthRel + ? Math.round(requestedWidthRel * canvasWidth) + : canvasWidth; + const requestedHeight = requestedHeightRel + ? Math.round(requestedHeightRel * canvasHeight) + : canvasHeight; const left = leftRel * canvasWidth; const top = topRel * canvasHeight; @@ -22,7 +51,7 @@ export default defineFrameSource('video', async ({ width: canvasWidt let targetHeight = requestedHeight; let scaleFilter; - if (['contain', 'contain-blur'].includes(resizeMode)) { + if (["contain", "contain-blur"].includes(resizeMode)) { if (ratioW > ratioH) { targetHeight = requestedHeight; targetWidth = Math.round(requestedHeight * inputAspectRatio); @@ -32,7 +61,7 @@ export default defineFrameSource('video', async ({ width: canvasWidt } scaleFilter = `scale=${targetWidth}:${targetHeight}`; - } else if (resizeMode === 'cover') { + } else if (resizeMode === "cover") { let scaledWidth; let scaledHeight; @@ -46,15 +75,16 @@ export default defineFrameSource('video', async ({ width: canvasWidt // TODO improve performance by crop first, then scale? scaleFilter = `scale=${scaledWidth}:${scaledHeight},crop=${targetWidth}:${targetHeight}`; - } else { // 'stretch' + } else { + // 'stretch' scaleFilter = `scale=${targetWidth}:${targetHeight}`; } if (verbose) console.log(scaleFilter); - let ptsFilter = ''; + let ptsFilter = ""; if (speedFactor !== 1) { - if (verbose) console.log('speedFactor', speedFactor); + if (verbose) console.log("speedFactor", speedFactor); ptsFilter = `setpts=${speedFactor}*PTS,`; } @@ -69,40 +99,52 @@ export default defineFrameSource('video', async ({ width: canvasWidt // https://forum.unity.com/threads/settings-for-importing-a-video-with-an-alpha-channel.457657/ const streams = await readFileStreams(path); - const firstVideoStream = streams.find((s) => s.codec_type === 'video'); + const firstVideoStream = streams.find((s) => s.codec_type === "video"); // https://superuser.com/a/1116905/658247 let inputCodec; - if (firstVideoStream?.codec_name === 'vp8') inputCodec = 'libvpx'; - else if (firstVideoStream?.codec_name === 'vp9') inputCodec = 'libvpx-vp9'; + if (firstVideoStream?.codec_name === "vp8") inputCodec = "libvpx"; + else if (firstVideoStream?.codec_name === "vp9") inputCodec = "libvpx-vp9"; // http://zulko.github.io/blog/2013/09/27/read-and-write-video-frames-in-python-using-ffmpeg/ // Testing: ffmpeg -i 'vid.mov' -t 1 -vcodec rawvideo -pix_fmt rgba -f image2pipe - | ffmpeg -f rawvideo -vcodec rawvideo -pix_fmt rgba -s 2166x1650 -i - -vf format=yuv420p -vcodec libx264 -y out.mp4 // https://trac.ffmpeg.org/wiki/ChangingFrameRate const args = [ - '-nostdin', - ...(inputCodec ? ['-vcodec', inputCodec] : []), - ...(cutFrom ? ['-ss', cutFrom.toString()] : []), - '-i', path, - ...(cutTo ? ['-t', ((cutTo - cutFrom!) * speedFactor!).toString()] : []), - '-vf', `${ptsFilter}fps=${framerateStr},${scaleFilter}`, - '-map', 'v:0', - '-vcodec', 'rawvideo', - '-pix_fmt', 'rgba', - '-f', 'image2pipe', - '-', + "-nostdin", + ...(inputCodec ? ["-vcodec", inputCodec] : []), + ...(cutFrom ? ["-ss", cutFrom.toString()] : []), + "-i", + path, + ...(cutTo ? ["-t", ((cutTo - cutFrom!) * speedFactor!).toString()] : []), + "-vf", + `${ptsFilter}fps=${framerateStr},${scaleFilter}`, + "-map", + "v:0", + "-vcodec", + "rawvideo", + "-pix_fmt", + "rgba", + "-f", + "image2pipe", + "-", ]; - const ps = ffmpeg(args, { encoding: null, buffer: false, stdin: 'ignore', stdout: 'pipe', stderr: process.stderr }); + const ps = ffmpeg(args, { + encoding: null, + buffer: false, + stdin: "ignore", + stdout: "pipe", + stderr: process.stderr, + }); const stream = ps.stdout!; let timeout: NodeJS.Timeout; let ended = false; - stream.once('end', () => { + stream.once("end", () => { clearTimeout(timeout); - if (verbose) console.log(path, 'ffmpeg video stream ended'); + if (verbose) console.log(path, "ffmpeg video stream ended"); ended = true; }); @@ -128,16 +170,16 @@ export default defineFrameSource('video', async ({ width: canvasWidt } if (ended) { - console.log(path, 'Tried to read next video frame after ffmpeg video stream ended'); + console.log(path, "Tried to read next video frame after ffmpeg video stream ended"); resolve(); return; } function cleanup() { stream.pause(); - stream.removeListener('data', handleChunk); - stream.removeListener('end', resolve); - stream.removeListener('error', reject); + stream.removeListener("data", handleChunk); + stream.removeListener("end", resolve); + stream.removeListener("error", reject); } function handleChunk(chunk: Buffer) { @@ -147,10 +189,10 @@ export default defineFrameSource('video', async ({ width: canvasWidt const out = getNextFrame(); const restLength = chunk.length - nCopied; if (restLength > 0) { - if (verbose) console.log('Left over data', nCopied, chunk.length, restLength); + if (verbose) console.log("Left over data", nCopied, chunk.length, restLength); // make sure the buffer can store all chunk data if (chunk.length > buf.length) { - if (verbose) console.log('resizing buffer', buf.length, chunk.length); + if (verbose) console.log("resizing buffer", buf.length, chunk.length); const newBuf = Buffer.allocUnsafe(chunk.length); buf.copy(newBuf, 0, 0, length); buf = newBuf; @@ -167,14 +209,14 @@ export default defineFrameSource('video', async ({ width: canvasWidt } timeout = setTimeout(() => { - console.warn('Timeout on read video frame'); + console.warn("Timeout on read video frame"); cleanup(); resolve(); }, 60000); - stream.on('data', handleChunk); - stream.on('end', resolve); - stream.on('error', reject); + stream.on("data", handleChunk); + stream.on("end", resolve); + stream.on("error", reject); stream.resume(); }); @@ -182,9 +224,9 @@ export default defineFrameSource('video', async ({ width: canvasWidt assert(rgba.length === frameByteSize); - if (logTimes) console.time('rgbaToFabricImage'); + if (logTimes) console.time("rgbaToFabricImage"); const img = await rgbaToFabricImage({ width: targetWidth, height: targetHeight, rgba }); - if (logTimes) console.timeEnd('rgbaToFabricImage'); + if (logTimes) console.timeEnd("rgbaToFabricImage"); img.set({ originX, @@ -193,9 +235,9 @@ export default defineFrameSource('video', async ({ width: canvasWidt let centerOffsetX = 0; let centerOffsetY = 0; - if (resizeMode === 'contain' || resizeMode === 'contain-blur') { - const dirX = originX === 'left' ? 1 : -1; - const dirY = originY === 'top' ? 1 : -1; + if (resizeMode === "contain" || resizeMode === "contain-blur") { + const dirX = originX === "left" ? 1 : -1; + const dirY = originY === "top" ? 1 : -1; centerOffsetX = (dirX * (requestedWidth - targetWidth)) / 2; centerOffsetY = (dirY * (requestedHeight - targetHeight)) / 2; } @@ -205,9 +247,13 @@ export default defineFrameSource('video', async ({ width: canvasWidt top: top + centerOffsetY, }); - if (resizeMode === 'contain-blur') { + if (resizeMode === "contain-blur") { const mutableImg = img.cloneAsImage({}); - const blurredImg = await blurImage({ mutableImg, width: requestedWidth, height: requestedHeight }); + const blurredImg = await blurImage({ + mutableImg, + width: requestedWidth, + height: requestedHeight, + }); blurredImg.set({ left, top, @@ -225,7 +271,7 @@ export default defineFrameSource('video', async ({ width: canvasWidt } const close = () => { - if (verbose) console.log('Close', path); + if (verbose) console.log("Close", path); ps.cancel(); }; diff --git a/src/transitions.ts b/src/transitions.ts index 979a2d8e..213970f9 100644 --- a/src/transitions.ts +++ b/src/transitions.ts @@ -1,14 +1,28 @@ -import assert from 'assert'; -import type { Transition } from './types.js'; +import assert from "assert"; +import type { Transition } from "./types.js"; export type EasingFunction = (progress: number) => number; export type CalculatedTransition = Transition & { duration: number; easingFunction: EasingFunction; -} +}; -const randomTransitionsSet = ['fade', 'fadegrayscale', 'directionalwarp', 'crosswarp', 'dreamyzoom', 'burn', 'crosszoom', 'simplezoom', 'linearblur', 'directional-left', 'directional-right', 'directional-up', 'directional-down']; +const randomTransitionsSet = [ + "fade", + "fadegrayscale", + "directionalwarp", + "crosswarp", + "dreamyzoom", + "burn", + "crosszoom", + "simplezoom", + "linearblur", + "directional-left", + "directional-right", + "directional-up", + "directional-down", +]; function getRandomTransition() { return randomTransitionsSet[Math.floor(Math.random() * randomTransitionsSet.length)]; @@ -17,39 +31,48 @@ function getRandomTransition() { // https://easings.net/ export function easeOutExpo(x: number) { - return x === 1 ? 1 : 1 - (2 ** (-10 * x)); + return x === 1 ? 1 : 1 - 2 ** (-10 * x); } export function easeInOutCubic(x: number) { - return x < 0.5 ? 4 * x * x * x : 1 - ((-2 * x + 2) ** 3) / 2; + return x < 0.5 ? 4 * x * x * x : 1 - (-2 * x + 2) ** 3 / 2; } export function linear(x: number) { return x; } -function getTransitionEasingFunction(easing: string | null | undefined, transitionName?: string): EasingFunction { +function getTransitionEasingFunction( + easing: string | null | undefined, + transitionName?: string, +): EasingFunction { if (easing !== null) { // FIXME[TS]: `easing` always appears to be null or undefined, so this never gets called if (easing) return { easeOutExpo }[easing] || linear; - if (transitionName === 'directional') return easeOutExpo; + if (transitionName === "directional") return easeOutExpo; } return linear; } const TransitionAliases: Record> = { - 'directional-left': { name: 'directional', params: { direction: [1, 0] } }, - 'directional-right': { name: 'directional', params: { direction: [-1, 0] } }, - 'directional-down': { name: 'directional', params: { direction: [0, 1] } }, - 'directional-up': { name: 'directional', params: { direction: [0, -1] } }, -} + "directional-left": { name: "directional", params: { direction: [1, 0] } }, + "directional-right": { name: "directional", params: { direction: [-1, 0] } }, + "directional-down": { name: "directional", params: { direction: [0, 1] } }, + "directional-up": { name: "directional", params: { direction: [0, -1] } }, +}; -export function calcTransition(transition: Transition | null | undefined, isLastClip: boolean): CalculatedTransition { +export function calcTransition( + transition: Transition | null | undefined, + isLastClip: boolean, +): CalculatedTransition { if (!transition || isLastClip) return { duration: 0, easingFunction: linear }; - assert(!transition.duration || transition.name, 'Please specify transition name or set duration to 0'); + assert( + !transition.duration || transition.name, + "Please specify transition name or set duration to 0", + ); - if (transition.name === 'random' && transition.duration) { + if (transition.name === "random" && transition.duration) { transition = { ...transition, name: getRandomTransition() }; } diff --git a/src/types.ts b/src/types.ts index fd7c6c73..4d390b0c 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,8 +1,8 @@ // TODO[ts]: Move these elsewhere -import type * as Fabric from 'fabric/node'; -import type { Canvas } from "canvas" -import { ConfigurationOptions } from './configuration.js'; +import type { Canvas } from "canvas"; +import type * as Fabric from "fabric/node"; +import { ConfigurationOptions } from "./configuration.js"; /** Little utility */ export type OptionalPromise = Promise | T; @@ -22,17 +22,12 @@ export type OriginY = Fabric.TOriginY; * @see [Example 'image.json5']{@link https://github.com/mifi/editly/blob/master/examples/image.json5} * @see [Example 'videos.json5']{@link https://github.com/mifi/editly/blob/master/examples/videos.json5} */ -export type ResizeMode = - 'contain' | - 'contain-blur' | - 'cover' | - 'stretch'; +export type ResizeMode = "contain" | "contain-blur" | "cover" | "stretch"; /** * An object, where `{ x: 0, y: 0 }` is the upper left corner of the screen and `{ x: 1, y: 1 }` is the lower right corner. */ export interface PositionObject { - /** * X-position relative to video width. */ @@ -52,7 +47,6 @@ export interface PositionObject { * Y-anchor position of the object. */ originY?: OriginY; - } /** @@ -62,71 +56,69 @@ export interface PositionObject { * @see [Example 'position.json5']{@link https://github.com/mifi/editly/blob/master/examples/position.json5} */ export type Position = - 'top' | - 'top-left' | - 'top-right' | - 'center' | - 'center-left' | - 'center-right' | - 'bottom' | - 'bottom-left' | - 'bottom-right' | - PositionObject; + | "top" + | "top-left" + | "top-right" + | "center" + | "center-left" + | "center-right" + | "bottom" + | "bottom-left" + | "bottom-right" + | PositionObject; /** * @see [Curve types]{@link https://trac.ffmpeg.org/wiki/AfadeCurves} */ export type CurveType = - 'tri' | - 'qsin' | - 'hsin' | - 'esin' | - 'log' | - 'ipar' | - 'qua' | - 'cub' | - 'squ' | - 'cbr' | - 'par' | - 'exp' | - 'iqsin' | - 'ihsin' | - 'dese' | - 'desi' | - 'losi' | - 'nofade' | - string; + | "tri" + | "qsin" + | "hsin" + | "esin" + | "log" + | "ipar" + | "qua" + | "cub" + | "squ" + | "cbr" + | "par" + | "exp" + | "iqsin" + | "ihsin" + | "dese" + | "desi" + | "losi" + | "nofade" + | string; /** * @see [Transition types]{@link https://github.com/mifi/editly#transition-types} */ export type TransitionType = - 'directional-left' | - 'directional-right' | - 'directional-up' | - 'directional-down' | - 'random' | - 'dummy' | - string; + | "directional-left" + | "directional-right" + | "directional-up" + | "directional-down" + | "random" + | "dummy" + | string; /** * WARNING: Undocumented feature! */ export type GLTextureLike = { - bind: (unit: number) => number, - shape: [number, number], + bind: (unit: number) => number; + shape: [number, number]; }; /** * WARNING: Undocumented feature! */ export interface TransitionParams { - /** * WARNING: Undocumented feature! */ [key: string]: number | boolean | GLTextureLike | number[]; - } export interface Transition { @@ -168,14 +160,12 @@ export interface Transition { * WARNING: Undocumented feature! */ params?: TransitionParams; - } /** * @see [Arbitrary audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} */ export interface AudioTrack { - /** * File path for this track. */ @@ -206,19 +196,17 @@ export interface AudioTrack { * @default 0 */ start?: number; - } /** * @see [Ken Burns parameters]{@link https://github.com/mifi/editly#ken-burns-parameters} */ export interface KenBurns { - /** * Zoom direction for Ken Burns effect. * Use `null` to disable. */ - zoomDirection?: 'in' | 'out' | 'left' | `right` | null; + zoomDirection?: "in" | "out" | "left" | `right` | null; /** * Zoom amount for Ken Burns effect. @@ -226,32 +214,30 @@ export interface KenBurns { * @default 0.1 */ zoomAmount?: number; - } export type LayerType = - 'video' | - 'audio' | - 'detached-audio' | - 'image' | - 'image-overlay' | - 'title' | - 'subtitle' | - 'title-background' | - 'news-title' | - 'slide-in-text' | - 'fill-color' | - 'pause' | - 'radial-gradient' | - 'linear-gradient' | - 'rainbow-colors' | - 'canvas' | - 'fabric' | - 'gl' | - 'editly-banner'; + | "video" + | "audio" + | "detached-audio" + | "image" + | "image-overlay" + | "title" + | "subtitle" + | "title-background" + | "news-title" + | "slide-in-text" + | "fill-color" + | "pause" + | "radial-gradient" + | "linear-gradient" + | "rainbow-colors" + | "canvas" + | "fabric" + | "gl" + | "editly-banner"; export interface BaseLayer { - /** * Layer type. */ @@ -303,7 +289,7 @@ export interface TextLayer extends BaseLayer { export interface VideoPostProcessingFunctionArgs { canvas: Fabric.StaticCanvas; image: Fabric.FabricImage; - fabric: typeof Fabric, + fabric: typeof Fabric; progress: number; time: number; } @@ -314,11 +300,10 @@ export interface VideoPostProcessingFunctionArgs { * If the layer has audio, it will be kept (and mixed with other audio layers if present). */ export interface VideoLayer extends BaseLayer { - /** * Layer type. */ - type: 'video'; + type: "video"; /** * Path to video file. @@ -417,11 +402,10 @@ export interface VideoLayer extends BaseLayer { * The slow down/speed-up operation is limited to values between `0.5x` and `100x`. */ export interface AudioLayer extends BaseLayer { - /** * Layer type. */ - type: 'audio'; + type: "audio"; /** * Path to audio file. @@ -447,7 +431,6 @@ export interface AudioLayer extends BaseLayer { * @default 1 */ mixVolume?: number | string; - } /** @@ -458,23 +441,20 @@ export interface AudioLayer extends BaseLayer { * except `start` time is relative to the clip's start. */ export interface DetachedAudioLayer extends BaseLayer, AudioTrack { - /** * Layer type. */ - type: 'detached-audio'; - + type: "detached-audio"; } /** * Full screen image. */ export interface ImageLayer extends BaseLayer, KenBurns { - /** * Layer type. */ - type: 'image'; + type: "image"; /** * Path to image file. @@ -490,18 +470,16 @@ export interface ImageLayer extends BaseLayer, KenBurns { * WARNING: Undocumented feature! */ duration?: number; - } /** * Image overlay with a custom position and size on the screen. */ export interface ImageOverlayLayer extends BaseLayer, KenBurns { - /** * Layer type. */ - type: 'image-overlay'; + type: "image-overlay"; /** * Path to image file. @@ -522,29 +500,25 @@ export interface ImageOverlayLayer extends BaseLayer, KenBurns { * Height (from 0 to 1) where 1 is screen height. */ height?: number; - } export interface TitleLayer extends TextLayer, KenBurns { - /** * Layer type. */ - type: 'title'; + type: "title"; /** * Position. */ position?: Position; - } export interface SubtitleLayer extends TextLayer { - /** * Layer type. */ - type: 'subtitle'; + type: "subtitle"; /** * WARNING: Undocumented feature! @@ -559,26 +533,23 @@ export interface SubtitleLayer extends TextLayer { * Title with background. */ export interface TitleBackgroundLayer extends TextLayer { - /** * Layer type. */ - type: 'title-background'; + type: "title-background"; /** * Background layer. * Defaults to random background. */ background?: BackgroundLayer; - } export interface NewsTitleLayer extends TextLayer { - /** * Layer type. */ - type: 'news-title'; + type: "news-title"; /** * Background color. @@ -596,11 +567,10 @@ export interface NewsTitleLayer extends TextLayer { } export interface SlideInTextLayer extends TextLayer { - /** * Layer type. */ - type: 'slide-in-text'; + type: "slide-in-text"; /** * Font size. @@ -622,76 +592,65 @@ export interface SlideInTextLayer extends TextLayer { * Position. */ position?: Position; - } export interface FillColorLayer extends BaseLayer { - /** * Layer type. */ - type: 'fill-color'; + type: "fill-color"; /** * Color to fill background. * Defaults to random color. */ color?: string; - } export interface PauseLayer extends BaseLayer { - /** * Layer type. */ - type: 'pause'; + type: "pause"; /** * Color to fill background. * Defaults to random color. */ color?: string; - } export interface RadialGradientLayer extends BaseLayer { - /** * Layer type. */ - type: 'radial-gradient'; + type: "radial-gradient"; /** * Array of two colors. * Defaults to random colors. */ colors?: [string, string]; - } export interface LinearGradientLayer extends BaseLayer { - /** * Layer type. */ - type: 'linear-gradient'; + type: "linear-gradient"; /** * Array of two colors. * Defaults to random colors. */ colors?: [string, string]; - } export interface RainbowColorsLayer extends BaseLayer { - /** * Layer type. */ - type: 'rainbow-colors'; - + type: "rainbow-colors"; } export interface CustomFabricFunctionCallbacks { @@ -710,20 +669,20 @@ export interface CustomCanvasFunctionCallbacks { onClose?: () => OptionalPromise; } -export type CustomCanvasFunction = (args: CustomCanvasFunctionArgs) => OptionalPromise; +export type CustomCanvasFunction = ( + args: CustomCanvasFunctionArgs, +) => OptionalPromise; export interface CanvasLayer extends BaseLayer { - /** * Layer type. */ - type: 'canvas'; + type: "canvas"; /** * Custom JavaScript function. */ func: CustomCanvasFunction; - } export interface CustomFabricFunctionArgs { @@ -733,28 +692,27 @@ export interface CustomFabricFunctionArgs { params: unknown; } -export type CustomFabricFunction = (args: CustomFabricFunctionArgs) => OptionalPromise; +export type CustomFabricFunction = ( + args: CustomFabricFunctionArgs, +) => OptionalPromise; export interface FabricLayer extends BaseLayer { - /** * Layer type. */ - type: 'fabric'; + type: "fabric"; /** * Custom JavaScript function. */ func: CustomFabricFunction; - } export interface GlLayer extends BaseLayer { - /** * Layer type. */ - type: 'gl'; + type: "gl"; /** * Fragment path (`.frag` file) @@ -779,11 +737,10 @@ export interface GlLayer extends BaseLayer { * WARNING: Undocumented feature! */ export interface EditlyBannerLayer extends BaseLayer { - /** * Layer type. */ - type: 'editly-banner'; + type: "editly-banner"; /** * Set font (`.ttf`). @@ -797,36 +754,32 @@ export interface EditlyBannerLayer extends BaseLayer { * @see [Example 'commonFeatures.json5']{@link https://github.com/mifi/editly/blob/master/examples/commonFeatures.json5} */ export type Layer = - VideoLayer | - AudioLayer | - DetachedAudioLayer | - ImageLayer | - ImageOverlayLayer | - TitleLayer | - SubtitleLayer | - TitleBackgroundLayer | - NewsTitleLayer | - SlideInTextLayer | - FillColorLayer | - PauseLayer | - RadialGradientLayer | - LinearGradientLayer | - RainbowColorsLayer | - CanvasLayer | - FabricLayer | - GlLayer | - EditlyBannerLayer; + | VideoLayer + | AudioLayer + | DetachedAudioLayer + | ImageLayer + | ImageOverlayLayer + | TitleLayer + | SubtitleLayer + | TitleBackgroundLayer + | NewsTitleLayer + | SlideInTextLayer + | FillColorLayer + | PauseLayer + | RadialGradientLayer + | LinearGradientLayer + | RainbowColorsLayer + | CanvasLayer + | FabricLayer + | GlLayer + | EditlyBannerLayer; /** * Special layers that can be used f.e. in the 'title-background' layer. */ -export type BackgroundLayer = - RadialGradientLayer | - LinearGradientLayer | - FillColorLayer; +export type BackgroundLayer = RadialGradientLayer | LinearGradientLayer | FillColorLayer; export interface Clip { - /** * List of layers within the current clip that will be overlaid in their natural order (final layer on top). */ @@ -845,11 +798,9 @@ export interface Clip { * Set to `null` to disable transitions. */ transition?: Transition | null; - } export interface DefaultLayerOptions { - /** * Set default font (`.ttf`). * Defaults to system font. @@ -861,20 +812,16 @@ export interface DefaultLayerOptions { */ // FIXME[ts]: Define a type for this [key: string]: unknown; - } export type DefaultLayerTypeOptions = { - /** * Set any layer parameter that all layers of the same type (specified in key) will inherit. */ - [P in LayerType]?: Partial, 'type'>>; - -} + [P in LayerType]?: Partial, "type">>; +}; export interface DefaultOptions { - /** * Set default clip duration for clips that don't have an own duration (in seconds). * @@ -897,7 +844,6 @@ export interface DefaultOptions { * Set to `null` to disable transitions. */ transition?: Transition | null; - } /** @@ -908,7 +854,6 @@ export interface DefaultOptions { * @see [Example of audio ducking]{@link https://github.com/mifi/editly/blob/master/examples/audio2.json5} */ export interface AudioNormalizationOptions { - /** * Enable audio normalization? * @@ -932,11 +877,9 @@ export interface AudioNormalizationOptions { * @see [Audio normalization]{@link https://github.com/mifi/editly#audio-normalization} */ maxGain?: number; - } export interface RenderSingleFrameConfig extends ConfigurationOptions { - /** * Output path (`.mp4` or `.mkv`, can also be a `.gif`). */ @@ -946,7 +889,6 @@ export interface RenderSingleFrameConfig extends ConfigurationOptions { * Timestamp to render. */ time?: number; - } // Internal types diff --git a/src/types/gl-buffer.d.ts b/src/types/gl-buffer.d.ts index 517a313d..f72ad897 100644 --- a/src/types/gl-buffer.d.ts +++ b/src/types/gl-buffer.d.ts @@ -1,3 +1,8 @@ -declare module 'gl-buffer' { - export default function createBuffer(gl: WebGLRenderingContext, data: number[], target: number, usage: number): WebGLBuffer; +declare module "gl-buffer" { + export default function createBuffer( + gl: WebGLRenderingContext, + data: number[], + target: number, + usage: number, + ): WebGLBuffer; } diff --git a/src/types/gl-texture2d.d.ts b/src/types/gl-texture2d.d.ts index a0edb0fc..ffb90d83 100644 --- a/src/types/gl-texture2d.d.ts +++ b/src/types/gl-texture2d.d.ts @@ -1,5 +1,5 @@ -declare module 'gl-texture2d' { - import ndarray from 'ndarray'; +declare module "gl-texture2d" { + import ndarray from "ndarray"; // There are other overloads for this function, but we only care about this one. declare function createTexture(gl: WebGLRenderingContext, data: ndarray): WebGLTexture; diff --git a/src/types/gl-transition.d.ts b/src/types/gl-transition.d.ts index 63138b97..8153244c 100644 --- a/src/types/gl-transition.d.ts +++ b/src/types/gl-transition.d.ts @@ -1,24 +1,23 @@ -declare module 'gl-transition' { +declare module "gl-transition" { type TransitionObjectLike = { - glsl: string, - defaultParams: { [key: string]: mixed }, - paramsTypes: { [key: string]: string }, + glsl: string; + defaultParams: { [key: string]: mixed }; + paramsTypes: { [key: string]: string }; }; - type GLTextureLike = { - bind: (unit: number) => number, - shape: [number, number], + bind: (unit: number) => number; + shape: [number, number]; }; type Options = { - resizeMode?: "cover" | "contain" | "stretch", + resizeMode?: "cover" | "contain" | "stretch"; }; declare function createTransition( gl: WebGLRenderingContext, transition: TransitionObjectLike, - options: Options = {} + options: Options = {}, ): { // renders one frame of the transition (up to you to run the animation loop the way you want) draw: ( @@ -27,10 +26,10 @@ declare module 'gl-transition' { to: GLTextureLike, width: number = gl.drawingBufferWidth, height: number = gl.drawingBufferHeight, - params: { [key: string]: number | number[] | boolean | GLTextureLike } = {} - ) => void, + params: { [key: string]: number | number[] | boolean | GLTextureLike } = {}, + ) => void; // dispose and destroy all objects created by the function call. - dispose: () => void, + dispose: () => void; }; export = { default: createTransition }; diff --git a/src/types/gl-transitions.d.ts b/src/types/gl-transitions.d.ts index e739d2a7..dea80f33 100644 --- a/src/types/gl-transitions.d.ts +++ b/src/types/gl-transitions.d.ts @@ -1,14 +1,14 @@ -declare module 'gl-transitions' { +declare module "gl-transitions" { type GlTransition = { - name: string, - author: string, - license: string, - glsl: string, - defaultParams: { [key: string]: mixed }, - paramsTypes: { [key: string]: string }, - createdAt: string, - updatedAt: string, - } + name: string; + author: string; + license: string; + glsl: string; + defaultParams: { [key: string]: mixed }; + paramsTypes: { [key: string]: string }; + createdAt: string; + updatedAt: string; + }; declare const _default: GlTransition[]; export default _default; diff --git a/src/util.ts b/src/util.ts index 8d7bc4c0..9aa54247 100644 --- a/src/util.ts +++ b/src/util.ts @@ -1,10 +1,10 @@ -import assert from 'assert'; -import { sortBy } from 'lodash-es'; -import { pathExists } from 'fs-extra'; -import * as fabric from 'fabric/node'; -import fileUrl from 'file-url'; -import type { KenBurns, Keyframe, Position, PositionObject } from './types.js'; -import type { TOriginX, TOriginY } from 'fabric'; +import assert from "assert"; +import type { TOriginX, TOriginY } from "fabric"; +import * as fabric from "fabric/node"; +import fileUrl from "file-url"; +import { pathExists } from "fs-extra"; +import { sortBy } from "lodash-es"; +import type { KenBurns, Keyframe, Position, PositionObject } from "./types.js"; export function toArrayInteger(buffer: Buffer) { if (buffer.length > 0) { @@ -20,61 +20,69 @@ export function toArrayInteger(buffer: Buffer) { // x264 requires multiple of 2 export const multipleOf2 = (x: number) => Math.round(x / 2) * 2; -export function getPositionProps({ position, width, height }: { position?: Position | PositionObject, width: number, height: number }) { - let originY: TOriginY = 'center'; - let originX: TOriginX = 'center'; +export function getPositionProps({ + position, + width, + height, +}: { + position?: Position | PositionObject; + width: number; + height: number; +}) { + let originY: TOriginY = "center"; + let originX: TOriginX = "center"; let top = height / 2; let left = width / 2; const margin = 0.05; - if (typeof position === 'string') { - if (position === 'top') { - originY = 'top'; + if (typeof position === "string") { + if (position === "top") { + originY = "top"; top = height * margin; - } else if (position === 'bottom') { - originY = 'bottom'; + } else if (position === "bottom") { + originY = "bottom"; top = height * (1 - margin); - } else if (position === 'center') { - originY = 'center'; + } else if (position === "center") { + originY = "center"; top = height / 2; - } else if (position === 'top-left') { - originX = 'left'; - originY = 'top'; + } else if (position === "top-left") { + originX = "left"; + originY = "top"; left = width * margin; top = height * margin; - } else if (position === 'top-right') { - originX = 'right'; - originY = 'top'; + } else if (position === "top-right") { + originX = "right"; + originY = "top"; left = width * (1 - margin); top = height * margin; - } else if (position === 'center-left') { - originX = 'left'; - originY = 'center'; + } else if (position === "center-left") { + originX = "left"; + originY = "center"; left = width * margin; top = height / 2; - } else if (position === 'center-right') { - originX = 'right'; - originY = 'center'; + } else if (position === "center-right") { + originX = "right"; + originY = "center"; left = width * (1 - margin); top = height / 2; - } else if (position === 'bottom-left') { - originX = 'left'; - originY = 'bottom'; + } else if (position === "bottom-left") { + originX = "left"; + originY = "bottom"; left = width * margin; top = height * (1 - margin); - } else if (position === 'bottom-right') { - originX = 'right'; - originY = 'bottom'; + } else if (position === "bottom-right") { + originX = "right"; + originY = "bottom"; left = width * (1 - margin); top = height * (1 - margin); } } else { if (position?.x != null) { - originX = position.originX || 'left'; + originX = position.originX || "left"; left = width * position.x; } if (position?.y != null) { - originY = position.originY || 'top'; + originY = position.originY || "top"; top = height * position.y; } } @@ -83,8 +91,8 @@ export function getPositionProps({ position, width, height }: { position?: Posit } export function getFrameByKeyFrames(keyframes: Keyframe[], progress: number) { - if (keyframes.length < 2) throw new Error('Keyframes must be at least 2'); - const sortedKeyframes = sortBy(keyframes, 't'); + if (keyframes.length < 2) throw new Error("Keyframes must be at least 2"); + const sortedKeyframes = sortBy(keyframes, "t"); // TODO check that max is 1 // TODO check that all keyframes have all props @@ -94,7 +102,7 @@ export function getFrameByKeyFrames(keyframes: Keyframe[], progress: number) { if (i === 0) return false; return k.t === sortedKeyframes[i - 1].t; }); - if (invalidKeyframe) throw new Error('Invalid keyframe'); + if (invalidKeyframe) throw new Error("Invalid keyframe"); let prevKeyframe = [...sortedKeyframes].reverse().find((k) => k.t < progress); if (!prevKeyframe) prevKeyframe = sortedKeyframes[0]; @@ -105,40 +113,62 @@ export function getFrameByKeyFrames(keyframes: Keyframe[], progress: number) { if (nextKeyframe.t === prevKeyframe.t) return prevKeyframe.props; const interProgress = (progress - prevKeyframe.t) / (nextKeyframe.t - prevKeyframe.t); - return Object.fromEntries(Object.entries(prevKeyframe.props).map(([propName, prevVal]) => ([propName, prevVal + ((nextKeyframe.props[propName] - prevVal) * interProgress)]))); + return Object.fromEntries( + Object.entries(prevKeyframe.props).map(([propName, prevVal]) => [ + propName, + prevVal + (nextKeyframe.props[propName] - prevVal) * interProgress, + ]), + ); } export const isUrl = (path: string) => /^https?:\/\//.test(path); export const assertFileValid = async (path: string, allowRemoteRequests?: boolean) => { if (isUrl(path)) { - assert(allowRemoteRequests, 'Remote requests are not allowed'); + assert(allowRemoteRequests, "Remote requests are not allowed"); return; } assert(await pathExists(path), `File does not exist ${path}`); }; -export const loadImage = (pathOrUrl: string) => fabric.util.loadImage(isUrl(pathOrUrl) ? pathOrUrl : fileUrl(pathOrUrl)); export const defaultFontFamily = 'sans-serif'; +export const loadImage = (pathOrUrl: string) => + fabric.util.loadImage(isUrl(pathOrUrl) ? pathOrUrl : fileUrl(pathOrUrl)); +export const defaultFontFamily = "sans-serif"; -export function getZoomParams({ progress, zoomDirection, zoomAmount = 0.1 }: KenBurns & { progress: number; }) { +export function getZoomParams({ + progress, + zoomDirection, + zoomAmount = 0.1, +}: KenBurns & { progress: number }) { let scaleFactor = 1; - if (zoomDirection === 'left' || zoomDirection === 'right') return 1.3 + zoomAmount; - if (zoomDirection === 'in') scaleFactor = (1 + zoomAmount * progress); - else if (zoomDirection === 'out') scaleFactor = (1 + zoomAmount * (1 - progress)); + if (zoomDirection === "left" || zoomDirection === "right") return 1.3 + zoomAmount; + if (zoomDirection === "in") scaleFactor = 1 + zoomAmount * progress; + else if (zoomDirection === "out") scaleFactor = 1 + zoomAmount * (1 - progress); return scaleFactor; } -export function getTranslationParams({ progress, zoomDirection, zoomAmount = 0.1 }: KenBurns & { progress: number; }) { +export function getTranslationParams({ + progress, + zoomDirection, + zoomAmount = 0.1, +}: KenBurns & { progress: number }) { let translation = 0; const range = zoomAmount * 1000; - if (zoomDirection === 'right') translation = (progress) * range - range / 2; - else if (zoomDirection === 'left') translation = -((progress) * range - range / 2); + if (zoomDirection === "right") translation = progress * range - range / 2; + else if (zoomDirection === "left") translation = -(progress * range - range / 2); return translation; } export function getRekt(width: number, height: number) { // width and height with room to rotate - return new fabric.Rect({ originX: 'center', originY: 'center', left: width / 2, top: height / 2, width: width * 2, height: height * 2 }); + return new fabric.Rect({ + originX: "center", + originY: "center", + left: width / 2, + top: height / 2, + width: width * 2, + height: height * 2, + }); } diff --git a/test/configuration.test.ts b/test/configuration.test.ts index a73a1e25..b0980579 100644 --- a/test/configuration.test.ts +++ b/test/configuration.test.ts @@ -1,59 +1,92 @@ -import { Configuration } from '../src/configuration.js'; -import { describe, test, expect } from 'vitest'; +import { describe, expect, test } from "vitest"; +import { Configuration } from "../src/configuration.js"; // eslint-disable-next-line @typescript-eslint/no-explicit-any type BadData = any; -describe('Configuration', () => { - const input = { outPath: 'test.mp4', clips: [{ layers: [{ type: "title", text: "Hello World" }] }] }; +describe("Configuration", () => { + const input = { + outPath: "test.mp4", + clips: [{ layers: [{ type: "title", text: "Hello World" }] }], + }; - test('requires outPath', () => { - expect(() => new Configuration({ ...input, outPath: undefined } as BadData)).toThrow('Please provide an output path') + test("requires outPath", () => { + expect(() => new Configuration({ ...input, outPath: undefined } as BadData)).toThrow( + "Please provide an output path", + ); }); - test('requires clip with at least one layer', () => { - expect(() => new Configuration({ ...input, clips: undefined } as BadData)).toThrow('Please provide at least 1 clip'); - expect(() => new Configuration({ ...input, clips: [] })).toThrow('Please provide at least 1 clip'); - expect(() => new Configuration({ ...input, clips: [{}] } as BadData)).toThrow(/clip.layers must be an array/); + test("requires clip with at least one layer", () => { + expect(() => new Configuration({ ...input, clips: undefined } as BadData)).toThrow( + "Please provide at least 1 clip", + ); + expect(() => new Configuration({ ...input, clips: [] })).toThrow( + "Please provide at least 1 clip", + ); + expect(() => new Configuration({ ...input, clips: [{}] } as BadData)).toThrow( + /clip.layers must be an array/, + ); }); - test('layers must have a type', () => { - expect(() => new Configuration({ ...input, clips: [{ layers: { title: "Nope" } }] } as BadData)).toThrow('All "layers" must have a type'); + test("layers must have a type", () => { + expect( + () => new Configuration({ ...input, clips: [{ layers: { title: "Nope" } }] } as BadData), + ).toThrow('All "layers" must have a type'); }); - test('allows single layer for backward compatibility', () => { - const config = new Configuration({ ...input, clips: [{ layers: input.clips[0].layers[0] }] } as BadData) + test("allows single layer for backward compatibility", () => { + const config = new Configuration({ + ...input, + clips: [{ layers: input.clips[0].layers[0] }], + } as BadData); expect(config.clips[0].layers.length).toBe(1); - }) + }); - test('customOutputArgs must be an array', () => { - expect(() => new Configuration({ ...input, customOutputArgs: 'test' } as BadData)).toThrow('customOutputArgs must be an array of arguments') - expect(new Configuration({ ...input, customOutputArgs: ['test'] } as BadData).customOutputArgs).toEqual(['test']) + test("customOutputArgs must be an array", () => { + expect(() => new Configuration({ ...input, customOutputArgs: "test" } as BadData)).toThrow( + "customOutputArgs must be an array of arguments", + ); + expect( + new Configuration({ ...input, customOutputArgs: ["test"] } as BadData).customOutputArgs, + ).toEqual(["test"]); }); describe("defaults", () => { - test('merges defaults on layers', () => { + test("merges defaults on layers", () => { const config = new Configuration({ ...input, clips: [ { layers: [{ type: "title", text: "Clip with duration" }], duration: 3 }, - { layers: [{ type: "title", text: "Clip with transition" }], transition: { duration: 1, name: 'random' } } + { + layers: [{ type: "title", text: "Clip with transition" }], + transition: { duration: 1, name: "random" }, + }, ], defaults: { duration: 5, transition: { duration: 0.5, - name: 'fade', - audioOutCurve: 'qsin', + name: "fade", + audioOutCurve: "qsin", }, - } + }, }); - expect(config.clips[0].duration).toBe(3) - expect(config.clips[0].transition!).toEqual({ duration: 0.5, name: 'fade', audioOutCurve: 'qsin', audioInCurve: 'tri' }) + expect(config.clips[0].duration).toBe(3); + expect(config.clips[0].transition!).toEqual({ + duration: 0.5, + name: "fade", + audioOutCurve: "qsin", + audioInCurve: "tri", + }); expect(config.clips[1].duration).toBe(5); - expect(config.clips[1].transition).toEqual({ duration: 1, name: 'random', audioOutCurve: 'qsin', audioInCurve: 'tri' }); + expect(config.clips[1].transition).toEqual({ + duration: 1, + name: "random", + audioOutCurve: "qsin", + audioInCurve: "tri", + }); }); - }) + }); }); diff --git a/test/integration.test.ts b/test/integration.test.ts index c4f9fac2..f6d0f1f1 100644 --- a/test/integration.test.ts +++ b/test/integration.test.ts @@ -1,19 +1,23 @@ -import { execa } from 'execa'; -import { test, expect } from 'vitest' -import { readDuration } from '../src/ffmpeg.js'; +import { execa } from "execa"; +import { expect, test } from "vitest"; +import { readDuration } from "../src/ffmpeg.js"; -test("works", async () => { - await execa('npx', [ - 'tsx', - 'src/cli.ts', - '--allow-remote-requests', - "title:'My video'", - 'https://raw.githubusercontent.com/mifi/editly-assets/main/overlay.svg', - "title:'THE END'", - '--fast', - '--audio-file-path', - 'https://github.com/mifi/editly-assets/raw/main/winxp.mp3', - ]); +test( + "works", + async () => { + await execa("npx", [ + "tsx", + "src/cli.ts", + "--allow-remote-requests", + "title:'My video'", + "https://raw.githubusercontent.com/mifi/editly-assets/main/overlay.svg", + "title:'THE END'", + "--fast", + "--audio-file-path", + "https://github.com/mifi/editly-assets/raw/main/winxp.mp3", + ]); - expect(await readDuration('editly-out.mp4')).toBe(11); -}, 60 * 1000); + expect(await readDuration("editly-out.mp4")).toBe(11); + }, + 60 * 1000, +);