-
-
Notifications
You must be signed in to change notification settings - Fork 312
/
parseConfig.js
292 lines (229 loc) · 11.1 KB
/
parseConfig.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
import pMap from 'p-map';
import { basename, join } from 'path';
import flatMap from 'lodash-es/flatMap.js';
import assert from 'assert';
import { fileURLToPath } from 'url';
import {
readVideoFileInfo,
readAudioFileInfo,
assertFileValid,
checkTransition,
} from './util.js';
import { registerFont } from './sources/fabric.js';
import { calcTransition } from './transitions.js';
const dirname = fileURLToPath(new URL('.', import.meta.url));
// Cache
const loadedFonts = [];
async function validateArbitraryAudio(audio, allowRemoteRequests) {
assert(audio === undefined || Array.isArray(audio));
if (audio) {
// eslint-disable-next-line no-restricted-syntax
for (const { path, cutFrom, cutTo, start } of audio) {
await assertFileValid(path, allowRemoteRequests);
if (cutFrom != null && cutTo != null) assert(cutTo > cutFrom);
if (cutFrom != null) assert(cutFrom >= 0);
if (cutTo != null) assert(cutTo >= 0);
assert(start == null || start >= 0, `Invalid "start" ${start}`);
}
}
}
export default async function parseConfig({ defaults: defaultsIn = {}, clips, arbitraryAudio: arbitraryAudioIn, backgroundAudioPath, backgroundAudioVolume, loopAudio, allowRemoteRequests, ffprobePath }) {
const defaults = {
duration: 4,
...defaultsIn,
transition: defaultsIn.transition === null ? null : {
duration: 0.5,
name: 'random',
audioOutCurve: 'tri',
audioInCurve: 'tri',
...defaultsIn.transition,
},
};
async function handleLayer(layer) {
const { type, ...restLayer } = layer;
// https://github.com/mifi/editly/issues/39
if (['image', 'image-overlay'].includes(type)) {
await assertFileValid(restLayer.path, allowRemoteRequests);
} else if (type === 'gl') {
await assertFileValid(restLayer.fragmentPath, allowRemoteRequests);
}
if (['fabric', 'canvas'].includes(type)) assert(typeof layer.func === 'function', '"func" must be a function');
if (['image', 'image-overlay', 'fabric', 'canvas', 'gl', 'radial-gradient', 'linear-gradient', 'fill-color'].includes(type)) return layer;
// TODO if random-background radial-gradient linear etc
if (type === 'pause') return handleLayer({ ...restLayer, type: 'fill-color' });
if (type === 'rainbow-colors') return handleLayer({ type: 'gl', fragmentPath: join(dirname, 'shaders/rainbow-colors.frag') });
if (type === 'editly-banner') {
const { fontPath } = layer;
return [
await handleLayer({ type: 'linear-gradient' }),
await handleLayer({ fontPath, type: 'title', text: 'Made with\nEDITLY\nmifi.no' }),
];
}
// For convenience
if (type === 'title-background') {
const { text, textColor, background, fontFamily, fontPath } = layer;
const outLayers = [];
if (background) {
if (background.type === 'radial-gradient') outLayers.push(await handleLayer({ type: 'radial-gradient', colors: background.colors }));
else if (background.type === 'linear-gradient') outLayers.push(await handleLayer({ type: 'linear-gradient', colors: background.colors }));
else if (background.color) outLayers.push(await handleLayer({ type: 'fill-color', color: background.color }));
} else {
const backgroundTypes = ['radial-gradient', 'linear-gradient', 'fill-color'];
const randomType = backgroundTypes[Math.floor(Math.random() * backgroundTypes.length)];
outLayers.push(await handleLayer({ type: randomType }));
}
outLayers.push(await handleLayer({ type: 'title', fontFamily, fontPath, text, textColor }));
return outLayers;
}
if (['title', 'subtitle', 'news-title', 'slide-in-text'].includes(type)) {
assert(layer.text, 'Please specify a text');
let { fontFamily } = layer;
const { fontPath, ...rest } = layer;
if (fontPath) {
fontFamily = Buffer.from(basename(fontPath)).toString('base64');
if (!loadedFonts.includes(fontFamily)) {
registerFont(fontPath, { family: fontFamily, weight: 'regular', style: 'normal' });
loadedFonts.push(fontFamily);
}
}
return { ...rest, fontFamily };
}
throw new Error(`Invalid layer type ${type}`);
}
const detachedAudioByClip = {};
let clipsOut = await pMap(clips, async (clip, clipIndex) => {
assert(typeof clip === 'object', '"clips" must contain objects with one or more layers');
const { transition: userTransition, duration: userClipDuration, layers: layersIn } = clip;
// Validation
let layers = layersIn;
if (!Array.isArray(layers)) layers = [layers]; // Allow single layer for convenience
assert(layers.every((layer) => layer != null && typeof layer === 'object'), '"clip.layers" must contain one or more objects');
assert(layers.every((layer) => layer.type != null), 'All "layers" must have a type');
checkTransition(userTransition);
const videoLayers = layers.filter((layer) => layer.type === 'video');
const userClipDurationOrDefault = userClipDuration || defaults.duration;
if (videoLayers.length === 0) assert(userClipDurationOrDefault, `Duration parameter is required for videoless clip ${clipIndex}`);
const transition = calcTransition(defaults, userTransition, clipIndex === clips.length - 1);
let layersOut = flatMap(await pMap(layers, async (layerIn) => {
const globalLayerDefaults = defaults.layer || {};
const thisLayerDefaults = (defaults.layerType || {})[layerIn.type];
const layer = { ...globalLayerDefaults, ...thisLayerDefaults, ...layerIn };
const { type, path } = layer;
if (type === 'video') {
const { duration: fileDuration, width: widthIn, height: heightIn, framerateStr, rotation } = await readVideoFileInfo(ffprobePath, path);
let { cutFrom, cutTo } = layer;
if (!cutFrom) cutFrom = 0;
cutFrom = Math.max(cutFrom, 0);
cutFrom = Math.min(cutFrom, fileDuration);
if (!cutTo) cutTo = fileDuration;
cutTo = Math.max(cutTo, cutFrom);
cutTo = Math.min(cutTo, fileDuration);
assert(cutFrom < cutTo, 'cutFrom must be lower than cutTo');
const inputDuration = cutTo - cutFrom;
const isRotated = [-90, 90, 270, -270].includes(rotation);
const inputWidth = isRotated ? heightIn : widthIn;
const inputHeight = isRotated ? widthIn : heightIn;
return { ...layer, cutFrom, cutTo, inputDuration, framerateStr, inputWidth, inputHeight };
}
// Audio is handled later
if (['audio', 'detached-audio'].includes(type)) return layer;
return handleLayer(layer);
}, { concurrency: 1 }));
let clipDuration = userClipDurationOrDefault;
const firstVideoLayer = layersOut.find((layer) => layer.type === 'video');
if (firstVideoLayer && !userClipDuration) clipDuration = firstVideoLayer.inputDuration;
assert(clipDuration);
// We need to map again, because for audio, we need to know the correct clipDuration
layersOut = await pMap(layersOut, async (layerIn) => {
const { type, path, stop, start = 0 } = layerIn;
// This feature allows the user to show another layer overlayed (or replacing) parts of the lower layers (start - stop)
const layerDuration = ((stop || clipDuration) - start);
assert(layerDuration > 0 && layerDuration <= clipDuration, `Invalid start ${start} or stop ${stop} (${clipDuration})`);
// TODO Also need to handle video layers (speedFactor etc)
// TODO handle audio in case of start/stop
const layer = { ...layerIn, start, layerDuration };
if (type === 'audio') {
const { duration: fileDuration } = await readAudioFileInfo(ffprobePath, path);
let { cutFrom, cutTo } = layer;
// console.log({ cutFrom, cutTo, fileDuration, clipDuration });
if (!cutFrom) cutFrom = 0;
cutFrom = Math.max(cutFrom, 0);
cutFrom = Math.min(cutFrom, fileDuration);
if (!cutTo) cutTo = cutFrom + clipDuration;
cutTo = Math.max(cutTo, cutFrom);
cutTo = Math.min(cutTo, fileDuration);
assert(cutFrom < cutTo, 'cutFrom must be lower than cutTo');
const inputDuration = cutTo - cutFrom;
const speedFactor = clipDuration / inputDuration;
return { ...layer, cutFrom, cutTo, speedFactor };
}
if (type === 'video') {
const { inputDuration } = layer;
let speedFactor;
// If user explicitly specified duration for clip, it means that should be the output duration of the video
if (userClipDuration) {
// Later we will speed up or slow down video using this factor
speedFactor = userClipDuration / inputDuration;
} else {
speedFactor = 1;
}
return { ...layer, speedFactor };
}
// These audio tracks are detached from the clips (can run over multiple clips)
// This is useful so we can have audio start relative to their parent clip's start time
if (type === 'detached-audio') {
const { cutFrom, cutTo, mixVolume } = layer;
if (!detachedAudioByClip[clipIndex]) detachedAudioByClip[clipIndex] = [];
detachedAudioByClip[clipIndex].push({ path, cutFrom, cutTo, mixVolume, start });
return undefined; // Will be filtered out
}
return layer;
});
// Filter out deleted layers
layersOut = layersOut.filter((l) => l);
return {
transition,
duration: clipDuration,
layers: layersOut,
};
}, { concurrency: 1 });
let totalClipDuration = 0;
const clipDetachedAudio = [];
// Need to map again because now we know all clip durations, and we can adjust transitions so they are safe
clipsOut = await pMap(clipsOut, async (clip, i) => {
const nextClip = clipsOut[i + 1];
// We clamp all transitions to half the length of every clip. If not, we risk that clips that are too short,
// will be eaten by transitions and could cause de-sync issues with audio/video
// NOTE: similar logic is duplicated in index.js
let safeTransitionDuration = 0;
if (nextClip) {
// Each clip can have two transitions, make sure we leave enough room:
safeTransitionDuration = Math.min(clip.duration / 2, nextClip.duration / 2, clip.transition.duration);
}
// We now know all clip durations so we can calculate the offset for detached audio tracks
// eslint-disable-next-line no-restricted-syntax
for (const { start, ...rest } of (detachedAudioByClip[i] || [])) {
clipDetachedAudio.push({ ...rest, start: totalClipDuration + (start || 0) });
}
totalClipDuration += clip.duration - safeTransitionDuration;
return {
...clip,
transition: {
...clip.transition,
duration: safeTransitionDuration,
},
};
});
// Audio can either come from `audioFilePath`, `audio` or from "detached" audio layers from clips
const arbitraryAudio = [
// Background audio is treated just like arbitrary audio
...(backgroundAudioPath ? [{ path: backgroundAudioPath, mixVolume: backgroundAudioVolume != null ? backgroundAudioVolume : 1, loop: loopAudio ? -1 : 0 }] : []),
...arbitraryAudioIn,
...clipDetachedAudio,
];
await validateArbitraryAudio(arbitraryAudio, allowRemoteRequests);
return {
clips: clipsOut,
arbitraryAudio,
};
}