diff --git a/cli.js b/cli.js old mode 100644 new mode 100755 diff --git a/index.d.ts b/index.d.ts index 6895c815..d24cd25a 100644 --- a/index.d.ts +++ b/index.d.ts @@ -321,6 +321,8 @@ declare namespace Editly { */ cutFrom?: number; + loop?: boolean; + /** * Time value to cut to (in seconds). * Defaults to *end of video*. diff --git a/parseConfig.js b/parseConfig.js index cbdf5811..9ec316fa 100644 --- a/parseConfig.js +++ b/parseConfig.js @@ -12,6 +12,7 @@ import { } from './util.js'; import { registerFont } from './sources/fabric.js'; import { calcTransition } from './transitions.js'; +import fabric from 'fabric'; const dirname = fileURLToPath(new URL('.', import.meta.url)); @@ -47,6 +48,15 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar }, }; + // register default font + // console.log(defaultsIn); + const { layer } = defaultsIn; + if(layer !== undefined && layer.fontPath !== undefined && layer.fontFamilly !== undefined){ + registerFont(layer.fontPath, { family: layer.fontFamilly, weight: 'regular', style: 'normal' }); + loadedFonts.push(layer.fontFamilly); + console.log('Default font registered'); + } + async function handleLayer(layer) { const { type, ...restLayer } = layer; @@ -135,9 +145,10 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar const thisLayerDefaults = (defaults.layerType || {})[layerIn.type]; const layer = { ...globalLayerDefaults, ...thisLayerDefaults, ...layerIn }; - const { type, path } = layer; + const { type, path, loop } = layer; if (type === 'video') { + const { duration: fileDuration, width: widthIn, height: heightIn, framerateStr, rotation } = await readVideoFileInfo(ffprobePath, path); let { cutFrom, cutTo } = layer; if (!cutFrom) cutFrom = 0; @@ -155,7 +166,7 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar const inputWidth = isRotated ? heightIn : widthIn; const inputHeight = isRotated ? widthIn : heightIn; - return { ...layer, cutFrom, cutTo, inputDuration, framerateStr, inputWidth, inputHeight }; + return { ...layer, cutFrom, cutTo, inputDuration, framerateStr, inputWidth, inputHeight, loop }; } // Audio is handled later @@ -205,12 +216,15 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar } if (type === 'video') { - const { inputDuration } = layer; + const { inputDuration, loop } = layer; + console.log('loop: ', loop); let speedFactor; // If user explicitly specified duration for clip, it means that should be the output duration of the video - if (userClipDuration) { + if (loop){ + speedFactor = 1; + } else if (userClipDuration) { // Later we will speed up or slow down video using this factor speedFactor = userClipDuration / inputDuration; } else { diff --git a/sources/videoFrameSource.js b/sources/videoFrameSource.js index d3a55e41..4f45263a 100644 --- a/sources/videoFrameSource.js +++ b/sources/videoFrameSource.js @@ -9,8 +9,8 @@ import { blurImage, } from './fabric.js'; -export default async ({ width: canvasWidth, height: canvasHeight, channels, framerateStr, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, params }) => { - const { path, cutFrom, cutTo, resizeMode = 'contain-blur', speedFactor, inputWidth, inputHeight, width: requestedWidthRel, height: requestedHeightRel, left: leftRel = 0, top: topRel = 0, originX = 'left', originY = 'top', fabricImagePostProcessing = null } = params; +export default async ({ width: canvasWidth, height: canvasHeight, channels, framerateStr, duration, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, params }) => { + const { path, cutFrom, cutTo, resizeMode = 'contain-blur', speedFactor, inputWidth, inputHeight, width: requestedWidthRel, height: requestedHeightRel, left: leftRel = 0, top: topRel = 0, originX = 'left', originY = 'top', fabricImagePostProcessing = null, loop = false } = params; const requestedWidth = requestedWidthRel ? Math.round(requestedWidthRel * canvasWidth) : canvasWidth; const requestedHeight = requestedHeightRel ? Math.round(requestedHeightRel * canvasHeight) : canvasHeight; @@ -83,12 +83,15 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram // http://zulko.github.io/blog/2013/09/27/read-and-write-video-frames-in-python-using-ffmpeg/ // Testing: ffmpeg -i 'vid.mov' -t 1 -vcodec rawvideo -pix_fmt rgba -f image2pipe - | ffmpeg -f rawvideo -vcodec rawvideo -pix_fmt rgba -s 2166x1650 -i - -vf format=yuv420p -vcodec libx264 -y out.mp4 // https://trac.ffmpeg.org/wiki/ChangingFrameRate + + // TODO: maybe change to -vf loop=loop=XXXXX const args = [ ...getFfmpegCommonArgs({ enableFfmpegLog }), ...(inputCodec ? ['-vcodec', inputCodec] : []), ...(cutFrom ? ['-ss', cutFrom] : []), + ...(loop ? ['-stream_loop','-1'] : []), '-i', path, - ...(cutTo ? ['-t', (cutTo - cutFrom) * speedFactor] : []), + ...(loop ? ['-t',duration] : cutTo ? ['-t', (cutTo - cutFrom) * speedFactor] : []), '-vf', `${ptsFilter}fps=${framerateStr},${scaleFilter}`, '-map', 'v:0', '-vcodec', 'rawvideo', @@ -121,6 +124,7 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram length -= frameByteSize; return frameBuffer; } + // TODO: is this is here to get the end of the video? return null; }