diff --git a/.gitignore b/.gitignore index cf0b25e..e3345d2 100644 --- a/.gitignore +++ b/.gitignore @@ -11,3 +11,6 @@ node_modules *.mp4 *.mkv video-sync +batch.sh +*.csv +.vscode/ diff --git a/README.md b/README.md index dd488a6..0aedb23 100644 --- a/README.md +++ b/README.md @@ -1,28 +1,68 @@ # video-sync ---- - - -- [video-sync](#video-sync) - - [Usage](#usage) - - [Arguments](#arguments) - +A tool for automating the process of muxing additional audio tracks into videos ## Usage - + +```sh-session +video-sync [DESTINATION] [SOURCE] +``` + +## Description + +This tool requires the two input videos, the one where you want to add the additional tracks *to* (the destination video) and the one where you take the additional tracks *from* (the source video). +It then tries to find the exact same frame in both videos, in order to synchronize them (in case one of them is longer or shorter than the other). +It allows you to pick the audio and subtitle tracks you want to add to the destination and specify the output file. + +There's an interactive mode (simply don't pass any arguments, flags work) and a CLI mode (pass the two arguments listed at the top). + +## Examples ```sh-session $ video-sync # interactive mode ... -$ video-sync video1 offset1 video2 offset2 -o output # CLI mode +$ video-sync video1 video2 -o output # CLI mode +... +$ video-sync -a 0,en -s 2,ger # sync the audio track with mkvmerge ID `0` and all additional english audio tracks, and also the subtitle track with ID `2` and all additional german subtitle tracks +... +$ video-sync -e 300 -f # don't sync the videos, instead use the offset estimate (source `300` ms ahead of destination) as the final/forced offset ... $ video-sync -h # help page ... ``` - - ## Arguments - - +- `DESTINATION` video where tracks should be added to +- `SOURCE` video where the tracks are copied from + +## Options + +- `-o, --output=` output file path + +- `-a, --audioTracks=` audio tracks to sync over to the destination video. comma-separated list of mkvmerge IDs or ISO 639-2 language tags (track matching that language will be synced). if omitted, all audio tracks will be synced. + +- `-s, --subsTracks=` subtitle tracks to sync over to the destination video. comma-separated list of mkvmerge IDs or ISO 639-2 language tags (track matching that language will be synced). if omitted, all subtitle tracks will be synced + +- `-e, --offsetEstimate=` estimated offset between the two videos (in ms) for video syncing. positive values means that the source video is ahead of the destination video + +- `-f, --forceOffset` use the estimated offset as the final offset, no synching + +- `-x, --exclusiveDirection=` only search the matching frame offset in one direction. 'ahead' means that the source video scene comes *before* the destination video scene. (requires algorithm=matching-scene) + +- `-g, --algorithm=` [default: matching-scene] search algorithm to use for video syncing + +- `-m, --maxOffset=` [default: 120] maximum considered offset between the videos (in seconds) for video syncing. + +- `-r, --searchResolution=` [default: 80] resolution of the search region (in frames) for video syncing. increases accuracy at the cost of longer runtime (requires algorithm=simple) +- `-i, --iterations=` [default: 2] number of iterations to perform for video syncing (requires algorithm=simple) +- `-t, --threshold=` [default: 0.6] minimum confidence threshold for video syncing. (requires algorithm=simple) +- `-w, --searchWidth=` [default: 20] width of the search region (in seconds) for video syncing. the program will find the closest matching frame in this region, 'sourceOffset' being the center (requires algorithm=simple) + +- `-y, --confirm` automatically confirm missing tracks, low confidence scores and overwrite prompts + +- `-v, --verbose` output additional logs + +- `-h, --help` show CLI help + +- `--version` show CLI version diff --git a/package.json b/package.json index 3fba8b8..10a515e 100644 --- a/package.json +++ b/package.json @@ -41,7 +41,12 @@ "./src/**/*.js", "./util/**/*.js" ], - "outputPath": "dist" + "outputPath": "dist", + "targets": [ + "linux-x64", + "win-x64", + "macos-x64" + ] }, "homepage": "https://github.com/Chaphasilor/video-sync", "keywords": [ diff --git a/src/index.js b/src/index.js index 8ca5a97..6844d64 100644 --- a/src/index.js +++ b/src/index.js @@ -13,8 +13,10 @@ const ora = require('ora'); const ms = require(`ms`) const { ALGORITHMS, calcOffset } = require(`../util/calc-offset`) +const { calculateOffset } = require(`../util/find-offset-new`) const merge = require(`../util/merge-tracks`) const tracks = require(`../util/tracks`) +const { validateOffset } = require('../util/warping') class VideoSyncCommand extends Command { async run() { @@ -26,20 +28,7 @@ class VideoSyncCommand extends Command { // console.warn(`args:`, args) // console.warn(`flags:`, flags) - let algorithm - switch (flags.algorithm) { - case `ssim`: - algorithm = ALGORITHMS.SSIM - break; - case `matching-pixels`: - algorithm = ALGORITHMS.MISMATCHED_PIXELS - break; - default: - algorithm = ALGORITHMS.SSIM - break; - } - - let prompt = Object.values(args).filter(x => x !== undefined).length !== 4 + let prompt = Object.values(args).filter(x => x !== undefined).length < 2 let answers if (prompt) { @@ -56,25 +45,6 @@ class VideoSyncCommand extends Command { return true; }, }, - { - type: `input`, - message: `Enter the offset (in ms or with units) for the destination file (where to start looking for matching frames while synching)`, - name: `destinationOffset`, - validate: (formattedInput) => { - if (formattedInput === undefined) { - return `Didn't recognize that time string! Valid units: ms, s, m, h.` - } else if (formattedInput < 0) { - return `Only positive offsets are supported. '0' is the beginning of the video.` - } else { - return true - } - }, - filter: (input) => { - let matches = input.match(/(\-\s*)\d/g)?.map(x => x.slice(0, -1)) || [] - input = matches.reduce((sum, cur) => sum.replace(cur, `-`), input) - return input.split(` `).reduce((sum, cur) => sum + ms(cur), 0) - }, - }, { type: `input`, message: `Enter the source file (that contains the new tracks to be synced over)`, @@ -86,25 +56,6 @@ class VideoSyncCommand extends Command { return true; }, }, - { - type: `input`, - message: `Enter the offset (in ms or with units) for the source file (where to start looking for matching frames while syncing)`, - name: `sourceOffset`, - validate: (formattedInput) => { - if (formattedInput === undefined || isNaN(formattedInput)) { - return `Didn't recognize that time string! Valid units: ms, s, m, h.` - } else if (formattedInput < 0) { - return `Only positive offsets are supported. '0' is the beginning of the video.` - } else { - return true - } - }, - filter: (input) => { - let matches = input.match(/(\-\s*)\d/g)?.map(x => x.slice(0, -1)) || [] - input = matches.reduce((sum, cur) => sum.replace(cur, `-`), input) - return input.split(` `).reduce((sum, cur) => sum + ms(cur), 0) - }, - }, { type: `input`, message: `Specify the output file (where the synced and muxed video gets written to)`, @@ -257,21 +208,67 @@ class VideoSyncCommand extends Command { subs: selectedTracks.subs ?? [], } - const { videoOffset, confidence } = await calcOffset(answers.destination, answers.source, answers.destinationOffset, answers.sourceOffset, { - algorithm, - iterations: flags.iterations, - searchWidth: flags.searchWidth, - searchResolution: flags.searchResolution, - }) - // let videoOffset = 0, confidence = 1 + let videoOffset + let confidence + if (flags.forceOffset) { + videoOffset = flags.offsetEstimate + confidence = 1 + } else { + let result + + if (flags.algorithm === `simple`) { + result = await calcOffset(answers.destination, answers.source, { + comparisonAlgorithm: ALGORITHMS.SSIM, + iterations: flags.iterations, + searchWidth: flags.searchWidth, + searchResolution: flags.searchResolution, + maxOffset: flags.maxOffset, + offsetEstimate: flags.offsetEstimate, + threshold: flags.threshold, + }) + } else { + result = await calculateOffset(answers.destination, answers.source, { + maxOffset: flags.maxOffset * 1000, + offsetEstimate: flags.offsetEstimate, + }) + } + + videoOffset = result.videoOffset + confidence = result.confidence + } + // check if one of the videos is warped + let videoWarped = false + const offsetValidationSpinner = ora(`Checking if found offset applies to the whole video...`).start(); + try { + videoWarped = ! await validateOffset(args.destination, args.source, videoOffset) + } catch (err) { + console.error(`Error while checking if found offset applies to the whole video:`, err) + } + + // log warning about warped video + if (videoWarped && flags.confirm) { + offsetValidationSpinner.warn(`Syncing the tracks might not work well because one of the videos appears to be warped.`) + } else if (!videoWarped) { + offsetValidationSpinner.succeed(`Offset is valid.`) + } else { + offsetValidationSpinner.stop() + } + let continueWithMerging = answers.output !== undefined && (selectedTracks.audio.length > 0 || selectedTracks.subs.length > 0) - if (!flags.confirm && flags.algorithm === `ssim` && confidence < 0.5) { + if (continueWithMerging && (!flags.confirm && flags.algorithm === `ssim` && confidence < 0.6)) { continueWithMerging = (await inquirer.prompt([{ type: `confirm`, name: `continue`, - message: `Syncing confidence is very low (${confidence}). Do you want to continue?`, + message: `Syncing confidence is very low (${confidence}). Do you want to continue anyway?`, + }])).continue + } else if (continueWithMerging && videoWarped && !flags.confirm) { + continueWithMerging = (await inquirer.prompt([{ + type: `confirm`, + name: `continue`, + message: `It seems like one of the videos might be warped (slightly sped up or slowed down). This might make synchronization impossible. Do you want to continue anyway?`, + default: false, }])).continue } @@ -289,9 +286,11 @@ class VideoSyncCommand extends Command { } } -VideoSyncCommand.description = `Describe the command here -... -Extra documentation goes here +VideoSyncCommand.description = `video-sync - a tool for automating the process of muxing additional audio tracks into videos +This tool requires the two input videos, the one where you want to add the additional tracks *to* (the destination video) and the one where you take the additional tracks *from* (the source video). +It then tries to find the exact same frame in both videos, in order to synchronize them (in case one of them is longer or shorter than the other). +It allows you to pick the audio and subtitle tracks you want to add to the destination and specify the output file. +There's an interactive mode (simply don't pass any arguments, flags work) and a CLI mode (pass the two arguments listed at the top). ` VideoSyncCommand.args = [ @@ -300,21 +299,11 @@ VideoSyncCommand.args = [ required: false, description: `video where tracks should be added to`, }, - { - name: `destinationOffset`, - required: false, - description: `frame offset for the destination video`, - }, { name: `source`, required: false, description: `video where the tracks are copied from`, }, - { - name: `sourceOffset`, - required: false, - description: `frame offset for the source video`, - }, ] VideoSyncCommand.flags = { @@ -348,24 +337,51 @@ VideoSyncCommand.flags = { }), algorithm: flags.enum({ char: `g`, - description: `matching algorithm to use for video syncing`, - options: [`ssim`, `matching-pixels`], - default: `ssim`, + description: `search algorithm to use for video syncing`, + options: [`simple`, `matching-scene`], + default: `matching-scene`, }), iterations: flags.integer({ char: `i`, - description: `number of iterations to perform for video syncing`, + description: `number of iterations to perform for video syncing (requires algorithm=simple)`, default: 2, }), searchWidth: flags.integer({ char: `w`, - description: `width of the search region (in seconds) for video syncing. the program will find the closest matching frame in this region, 'sourceOffset' being the center`, - default: 10, + description: `width of the search region (in seconds) for video syncing. the program will find the closest matching frame in this region, 'sourceOffset' being the center (requires algorithm=simple)`, + default: 20, + }), + maxOffset: flags.integer({ + char: `m`, + description: `maximum considered offset between the videos (in seconds) for video syncing.`, + default: 120, + }), + offsetEstimate: flags.integer({ + char: `e`, + description: `estimated offset between the two videos (in ms) for video syncing. positive values means that the source video is ahead of the destination video`, + default: 0, + }), + forceOffset: flags.boolean({ + char: `f`, + description: `use the estimated offset as the final offset, no synching`, + default: false, + }), + exclusiveDirection: flags.string({ + char: `x`, + description: `only search the matching frame offset in one direction. 'ahead' means that the source video scene comes *before* the destination video scene. (requires algorithm=matching-scene)`, + parse: (input) => input ? (input === `ahead` ? -1 : 1) : false, + default: undefined, + }), + threshold: flags.string({ + char: `t`, + description: `minimum confidence threshold for video syncing. (requires algorithm=simple)`, + parse: (input) => parseFloat(input), + default: 0.6, }), searchResolution: flags.integer({ char: `r`, - description: `resolution of the search region (in frames) for video syncing. increases accuracy at the cost of longer runtime`, - default: 40, + description: `resolution of the search region (in frames) for video syncing. increases accuracy at the cost of longer runtime (requires algorithm=simple)`, + default: 80, }), verbose: flags.boolean({ char: `v`, diff --git a/util/calc-offset.js b/util/calc-offset.js index 4418607..de58e9f 100644 --- a/util/calc-offset.js +++ b/util/calc-offset.js @@ -17,12 +17,44 @@ function* offsetGenerator(start = 0, step = 1) { } } +function generateRandomSearchCenter(totalLength, padding) { + + console.log(`totalLength:`, totalLength) + console.log(`padding:`, padding) + let availableLength = totalLength - 2*padding + let offset = Math.random() * availableLength + padding + console.log(`offset:`, offset) + return Number(offset.toFixed(2)) + +} + +/** + * Generates the static frame to be used (offset A) + * @param {*} totalLength + * @param {*} padding + * @returns + */ +async function generateRandomStaticFrame(offset, staticFrameInput, staticFrameDir) { + + console.log(`offset:`, offset) + let staticFrame = (await extractFrames({ + input: staticFrameInput, + outputDir: staticFrameDir, + offsets: [offset], + }))[0] + return `${staticFrameDir}/${staticFrame.filename}` + +} + module.exports.ALGORITHMS = ALGORITHMS -module.exports.calcOffset = async function(video1Path, video2Path, offset1, offset2, options = { - algorithm: ALGORITHMS.SSIM, +module.exports.calcOffset = async function(video1Path, video2Path, options = { + comparisonAlgorithm: ALGORITHMS.SSIM, }) { + //TODO expose this + const threshold = options.threshold + const spinner = ora(`Syncing the videos...`).start(); let staticFrameDir = await fs.mkdtemp(`${os.tmpdir()}/static`) @@ -30,23 +62,24 @@ module.exports.calcOffset = async function(video1Path, video2Path, offset1, offs console.log(`staticFrameDir:`, staticFrameDir) console.log(`rollingFramesDir:`, rollingFramesDir) - const videoDimensions = await getVideoDimensions(video1Path, video2Path) + const videoInfo = await getVideoInfo(video1Path, video2Path) + const videoDimensions = videoInfo.dimensions const video1IsLarger = videoDimensions[0].width >= videoDimensions[1].width const staticFrameInput = video1IsLarger ? video1Path : video2Path const rollingFrameInput = video1IsLarger ? video2Path : video1Path - const staticFrameOffset = parseInt(video1IsLarger ? offset1 : offset2) - const rollingFrameOffset = parseInt(video1IsLarger ? offset2 : offset1) + // const staticFrameOffset = parseInt(video1IsLarger ? offset1 : offset2) + // const rollingFrameOffset = parseInt(video1IsLarger ? offset2 : offset1) - let staticFrame = (await extractFrames({ - input: staticFrameInput, - outputDir: staticFrameDir, - offsets: [staticFrameOffset], - }))[0] - const staticFramePath = `${staticFrameDir}/${staticFrame.filename}` + // generate the static frame using the video length and a padding of 1% + console.log(`videoInfo.lengths:`, videoInfo.lengths) + let staticLength = videoInfo.lengths[video1IsLarger ? 0 : 1] + let staticFrameOffset = Math.round(staticLength/2) + console.log(`staticFrameOffset:`, staticFrameOffset) + let staticFramePath = await generateRandomStaticFrame(staticFrameOffset, staticFrameInput, staticFrameDir) - //TODO blackbar detection and removal + //TODO !!! blackbar detection and removal await fs.writeFile(staticFramePath, await resizeImg(await fs.readFile(staticFramePath), { format: `bmp`, width: videoDimensions[1].width, @@ -60,11 +93,19 @@ module.exports.calcOffset = async function(video1Path, video2Path, offset1, offs // if the confidence is too low at the end => restart with different offset // otherwise sync should work just fine - let searchCenter = rollingFrameOffset // in milliseconds + // let searchCenter = rollingFrameOffset // in milliseconds + let offsetB = options.offsetEstimate + let searchCenter = staticFrameOffset + offsetB + console.log(`searchCenter:`, searchCenter) let searchResolution = parseInt((options.searchResolution)) let closestMatch + let restarts = 0 for (let iteration = 1; iteration <= options.iterations; iteration++) { + if (restarts > 5) { + throw new Error(`Couldn't determine the correct offset.`) + } + console.log(`iteration:`, iteration) let searchWidth = options.searchWidth / iteration @@ -85,7 +126,25 @@ module.exports.calcOffset = async function(video1Path, video2Path, offset1, offs console.debug(`exportedFrames:`, exportedFrames) - closestMatch = await findClosestFrame(staticFramePath, rollingFramesDir, options.algorithm) + try { + closestMatch = await findClosestFrame(staticFramePath, rollingFramesDir, options.comparisonAlgorithm, iteration === 0) + } catch (err) { + // finding an extact match not possible, use different offsets + console.log(`Error while trying to find the closest matching frame: ${err.message}`) + + // generate the static frame using the video length and a padding of 1% + let staticLength = videoInfo.lengths[video1IsLarger ? 0 : 1] + let staticPadding = parseInt((staticLength* 0.01).toFixed(0)) + staticFrameOffset = generateRandomSearchCenter(staticLength, staticPadding) + staticFramePath = await generateRandomStaticFrame(staticFrameOffset, staticFrameInput, staticFrameDir) + searchCenter = staticFrameOffset + offsetB + + // restart iterating + restarts++ + iteration = 0 + continue + + } console.debug(`closestMatch:`, closestMatch) @@ -95,6 +154,18 @@ module.exports.calcOffset = async function(video1Path, video2Path, offset1, offs if (closestMatch.value === 1) { break + } else if (closestMatch.value < threshold) { + + console.log(`Didn't find a closely matching frame. Retrying with different offsets...`) + //TODO expose interval + // offset B + offsetB = (Math.random() > 0.5 ? 1 : -1) * generateRandomSearchCenter(options.maxOffset * 1000, 0) + searchCenter = staticFrameOffset + offsetB + + restarts++ + iteration = 0 + continue + } } @@ -115,30 +186,36 @@ module.exports.calcOffset = async function(video1Path, video2Path, offset1, offs } -async function getVideoDimensions(vid1, vid2) { +async function getVideoInfo(vid1, vid2) { - console.log(`vid1:`, vid1) - console.log(`vid2:`, vid2) let vid1Data = await probe(vid1) let vid2Data = await probe(vid2) - console.log(`vid2:`, vid2) + console.log(`vid1Data:`, vid1Data) + console.log(`vid2Data:`, vid2Data) - console.log(`Video 1: width: ${vid1Data.streams[0].width}, height: ${vid1Data.streams[0].height}`) - console.log(`Video 2: width: ${vid2Data.streams[0].width}, height: ${vid2Data.streams[0].height}`) + console.debug(`Video 1: width: ${vid1Data.streams[0].width}, height: ${vid1Data.streams[0].height}`) + console.debug(`Video 2: width: ${vid2Data.streams[0].width}, height: ${vid2Data.streams[0].height}`) if (vid1Data.streams[0].width > vid2Data.streams[0].width && vid1Data.streams[0].height < vid2Data.streams[0].height) { console.warn(`Videos have different aspect ratios. You might get worse results.`) } - return [ - { - width: vid1Data.streams[0].width, - height: vid1Data.streams[0].height, - }, - { - width: vid2Data.streams[0].width, - height: vid2Data.streams[0].height, - }, - ] + return { + lengths: [ + parseInt((Number(vid1Data.format.duration) * 1000).toFixed(0)), + parseInt((Number(vid2Data.format.duration) * 1000).toFixed(0)), + ], + dimensions: [ + { + width: vid1Data.streams[0].width, + height: vid1Data.streams[0].height, + }, + { + width: vid2Data.streams[0].width, + height: vid2Data.streams[0].height, + }, + ], + } -} \ No newline at end of file +} +module.exports.getVideoInfo = getVideoInfo \ No newline at end of file diff --git a/util/find-closest-frame.js b/util/find-closest-frame.js index 62de53f..65f9249 100644 --- a/util/find-closest-frame.js +++ b/util/find-closest-frame.js @@ -5,13 +5,15 @@ const pixelmatch = require('pixelmatch') const resizeImg = require('resize-img') const ssim = require(`ssim.js`).default +const { checkStaticScene } = require(`./static-scenes`) + const ALGORITHMS = { MISMATCHED_PIXELS: `matching-pixels`, SSIM: `ssim`, } module.exports.ALGORITHMS = ALGORITHMS -module.exports.findClosestFrame = async function findClosestFrame(inputFile, frameInputDir, selectedAlg = ALGORITHMS.SSIM) { +module.exports.findClosestFrame = async function findClosestFrame(inputFile, frameInputDir, selectedAlg = ALGORITHMS.SSIM, checkForStaticScene) { const inputImage = bmp.decode(await fs.readFile(inputFile)) const { width, height } = inputImage @@ -28,6 +30,8 @@ module.exports.findClosestFrame = async function findClosestFrame(inputFile, fra value: selectedAlg === ALGORITHMS.SSIM ? -1 : Infinity, } + let results = [] + for (const file of files) { let imageToCompare = bmp.decode(await fs.readFile(`${frameInputDir}/${file.name}`)); @@ -43,36 +47,43 @@ module.exports.findClosestFrame = async function findClosestFrame(inputFile, fra let result if (selectedAlg === ALGORITHMS.SSIM) { - result = ssim(inputImage, imageToCompare); + result = ssim(inputImage, imageToCompare).mssim; } else { result = pixelmatch(inputImage.data, imageToCompare.data, null, width, height, {threshold: 0.1}); } + + results.push(result) - //TODO if closestMatch.value doesn't change at all, somethings fishy (e.g. static scene) - // either try again with different offsets or prompt the user, but the former option would be more robust + //TODO also trigger on very slight fluctuations for high confidence scores (actual static scenes) + // if closestMatch.value doesn't change at all, somethings fishy (e.g. static scene) + // => try again with different offsets + //TODO check if twice in a row works (because frames aren't exactly timed) or the limit has to be increased + if (result === closestMatch.value) { + throw new Error(`Got the same result twice, possible static scene!`) + } + + console.log(`result:`, result) + + // update the new best result/closest match if ( - (selectedAlg === ALGORITHMS.SSIM && result.mssim > closestMatch.value) || + (selectedAlg === ALGORITHMS.SSIM && result > closestMatch.value) || (selectedAlg === ALGORITHMS.MISMATCHED_PIXELS && result < closestMatch.value) ) { switch (selectedAlg) { case ALGORITHMS.SSIM: - result = ssim(inputImage, imageToCompare); - console.log(`result:`, result.mssim) - if (result.mssim > closestMatch.value) { + if (result > closestMatch.value) { closestMatch = { filename: file.name, - value: result.mssim, + value: result, } } break; case ALGORITHMS.MISMATCHED_PIXELS: - result = pixelmatch(inputImage.data, imageToCompare.data, null, width, height, {threshold: 0.1}); - console.log(`result:`, result) if (result < closestMatch.value) { closestMatch = { filename: file.name, - value: result.mssim, + value: result, } } break; @@ -89,6 +100,10 @@ module.exports.findClosestFrame = async function findClosestFrame(inputFile, fra } } + + if (checkForStaticScene && checkStaticScene({data: results})) { + throw new Error(`Static scene detected!`) + } return closestMatch diff --git a/util/find-offset-new.js b/util/find-offset-new.js new file mode 100644 index 0000000..7c86d02 --- /dev/null +++ b/util/find-offset-new.js @@ -0,0 +1,356 @@ +const fs = require('fs/promises') +const { exec } = require(`node-exec-promise`) +const { performance } = require(`perf_hooks`) +const ffprobe = require(`node-ffprobe`) +const bmp = require(`bmp-js`) +const resizeImg = require('resize-img') +const ssim = require(`ssim.js`).default +const ms = require(`ms`) +const ora = require('ora'); + +const stepSizeSmall = 25 +const stepSizeMedium = 150 +const stepSizeLarge = 1000 + +let = checkedOffsets = [] + +async function findNextSceneChange(video, startOffset, endOffset) { + + const direction = startOffset <= endOffset ? 1 : -1 + let currentStepSize = direction * stepSizeLarge + + let framesDir = await fs.mkdtemp(`tmp/frames`) + + console.debug(`seekPosition:`, ms(startOffset)) + let seekPosition = startOffset / 1000.0 + let fullOutputPath = `${framesDir}/screenshot_${performance.now()*10000000000000}.bmp` + + // console.log(`ffmpeg -accurate_seek -ss ${seekPosition} -i "${video}" -vframes 200 -vf cropdetect=24:2:0 -f null - -y`); + let blackBarResult = await exec(`ffmpeg -accurate_seek -ss ${seekPosition} -i "${video}" -vframes 200 -vf cropdetect=24:2:0 -f null - -y`) + let cropValue = blackBarResult.stderr.split(`\n`).splice(-4, 1)[0].match(/crop=(\d+\:\d+:\d+:\d+)/)[1] + console.debug(`cropValue:`, cropValue) + + // extract the first frame + await exec(`ffmpeg -accurate_seek -ss ${seekPosition} -i "${video}" -vf crop=${cropValue} -frames:v 1 "${fullOutputPath}" -y -loglevel error`) + console.debug(`fullOutputPath:`, fullOutputPath) + + let previousFrameData = bmp.decode(await fs.readFile(fullOutputPath), { + format: `bmp`, + }) + + // extract first frame + let previousFrame = { + offset: startOffset, + path: fullOutputPath, + data: previousFrameData, + } + let currentFrame + let currentFrameOffset = startOffset + currentStepSize + let currentFrameData + let previousMaxOffset = currentFrameOffset + let delta + + while ( + (direction === 1 && currentFrameOffset < endOffset) || + (direction === -1 && currentFrameOffset > endOffset) + ) { + + // console.log(`currentFrameOffset:`, currentFrameOffset) + checkedOffsets.push(currentFrameOffset) + seekPosition = currentFrameOffset / 1000.0 + fullOutputPath = `${framesDir}/screenshot_${performance.now()*10000000000000}.bmp` + + // extract the frame + await exec(`ffmpeg -accurate_seek -ss ${seekPosition} -i "${video}" -vf crop=${cropValue} -frames:v 1 "${fullOutputPath}" -y -loglevel error`) + // console.log(`fullOutputPath:`, fullOutputPath) + + currentFrameData = bmp.decode(await fs.readFile(fullOutputPath), { + format: `bmp`, + }) + + currentFrame = { + offset: currentFrameOffset, + path: fullOutputPath, + data: currentFrameData, + } + + delta = 1 - ssim(previousFrame.data, currentFrame.data).mssim; + console.debug(`delta:`, delta) + + if (delta > 0.5) { + // scene change detected + + if (currentStepSize === direction * stepSizeSmall) { + // already in high-accuracy mode + + return { + preSceneChangeFrame: direction === 1 ? previousFrame : currentFrame, + postSceneChangeFrame: direction === 1 ? currentFrame : previousFrame, + delta, + } + + } else if (currentStepSize === direction * stepSizeMedium) { + console.debug(`Switching to high-accuracy mode...`); + + // backtrack to preSceneChange frame offset + // previousFrame is the preSceneChange frame, so in the next iteration previous and current frame will be the same, that's fine + previousMaxOffset = currentFrameOffset + currentFrameOffset = previousFrame.offset + currentStepSize = direction * stepSizeSmall // switch to small step size for increased accuracy + // fs.unlink(currentFrame.path) // discard old, unneeded frame + continue // don't increase currentFrameOffset, jump right back to the top + + } else { + console.debug(`Switching to medium-accuracy mode...`); + + // backtrack to preSceneChange frame offset + // previousFrame is the preSceneChange frame, so in the next iteration previous and current frame will be the same, that's fine + previousMaxOffset = currentFrameOffset + currentFrameOffset = previousFrame.offset + currentStepSize = direction * stepSizeMedium // switch to small step size for increased accuracy + fs.unlink(currentFrame.path) // discard old, unneeded frame + continue // don't increase currentFrameOffset, jump right back to the top + + } + + } else { + + await fs.unlink(previousFrame.path) // discard old, unneeded frame + previousFrame = currentFrame + + if ( + currentStepSize !== direction * stepSizeLarge && + (direction === 1 && (previousMaxOffset <= currentFrameOffset)) || + (direction === -1 && (previousMaxOffset >= currentFrameOffset)) + ) { + // transition detected, switch back to low-accuracy mode + console.debug(`Transition detected, switching back to low-accuracy mode...`); + currentStepSize = direction * stepSizeLarge + } + + } + + currentFrameOffset += currentStepSize // go to next frame based on current step size + + } + + throw new Error(`No scene change found`) + +} + +async function searchForMatchingScene(video2, video1SceneChange, startOffset, endOffset) { + + let video2SceneChange = await findNextSceneChange(video2, startOffset, endOffset) + + if (video1SceneChange.preSceneChangeFrame.data.width !== video2SceneChange.preSceneChangeFrame.data.width || video1SceneChange.preSceneChangeFrame.data.height !== video2SceneChange.preSceneChangeFrame.data.height) { + console.debug(`resizing...`) + video2SceneChange.preSceneChangeFrame.data = bmp.decode(await resizeImg(await fs.readFile(video2SceneChange.preSceneChangeFrame.path), { + format: `bmp`, + width: video1SceneChange.preSceneChangeFrame.data.width, + height: video1SceneChange.preSceneChangeFrame.data.height, + })); + video2SceneChange.postSceneChangeFrame.data = bmp.decode(await resizeImg(await fs.readFile(video2SceneChange.postSceneChangeFrame.path), { + format: `bmp`, + width: video1SceneChange.preSceneChangeFrame.data.width, + height: video1SceneChange.preSceneChangeFrame.data.height, + })); + } + + let preSceneChangeFrameSimilarity = ssim(video1SceneChange.preSceneChangeFrame.data, video2SceneChange.preSceneChangeFrame.data).mssim + let postSceneChangeFrameSimilarity = ssim(video1SceneChange.postSceneChangeFrame.data, video2SceneChange.postSceneChangeFrame.data).mssim + + let deltaOfDeltas = Math.abs(video1SceneChange.delta - video2SceneChange.delta) + + return { + video2SceneChange, + preSceneChangeFrameSimilarity, + postSceneChangeFrameSimilarity, + deltaOfDeltas, + } + +} + +async function calculateOffset(video1, video2, options) { + + //TODO add cli progress output + + //TODO add support for options.offsetEstimate + //TODO add flag to specify search direction (e.g. if known whether the source is ahead or behind the destination) + + const video1SearchLength = 300 * 1000 + const searchIncrementSize = 10000 // maximum search area to find the next scene before switching sides + const startTime = Date.now(); + const spinner = ora(`Syncing the videos...`).start(); + + // create the tmp folder if it doesn't exist yet + try { + await fs.access(`tmp`) + } catch (err) { + await fs.mkdir(`tmp`) + } + + // search starts upwards + let direction = 1 + + if (options.searchDirection) { + direction = options.searchDirection + } + + let video1Data = await ffprobe(video1) + let video2Data = await ffprobe(video2) + let video1Duration = Number(video1Data.format.duration) * 1000 // offset in ms + let video2Duration = Number(video2Data.format.duration) * 1000 // offset in ms + let video1SearchStart = Math.round(video1Duration/4) + let video1SearchEnd = Math.min(video1SearchStart + (direction * video1SearchLength), video1Duration) // make sure to not search beyond the last frame + + let video1SceneChange + try { + video1SceneChange = await findNextSceneChange(video1, video1SearchStart, video1SearchEnd) + } catch (err) { + throw new Error(`Didn't find a scene change in the destination video, can't synchronize videos!`) + } + + console.debug(`Video 1 pre-scene change frame offset:`, video1SceneChange.preSceneChangeFrame.offset) + let currentSearchStart = video1SceneChange.preSceneChangeFrame.offset - (direction * 3*stepSizeSmall) // move the offset back a bit to make sure the 0 ms offset is included in the first iteration + + currentSearchStart += options.offsetEstimate // add the offsetEstimate to the search center for the second video + + // initialize offsets with the same value + let currentSearchOffsets = { + lower: currentSearchStart, + upper: currentSearchStart, + } + + // make sure to stay within offset bounds + // continue while at least one side still within the bounds + while ( + currentSearchOffsets.upper < video2Duration && + currentSearchOffsets.lower > 0 && + ((currentSearchOffsets.upper - video1SceneChange.preSceneChangeFrame.offset - options.offsetEstimate) < options.maxOffset || + (video1SceneChange.preSceneChangeFrame.offset - currentSearchOffsets.lower - options.offsetEstimate) < options.maxOffset) + ) { + + console.log(`Finding scene change in other video...`) + + currentSearchStart = direction === 1 ? currentSearchOffsets.upper : currentSearchOffsets.lower + console.log(`currentSearchOffset:`, currentSearchStart) + // make sure not to search past the start or end of the file + let currentSearchEnd = direction === 1 ? + Math.max(currentSearchStart + (direction*searchIncrementSize), 0) : + Math.min(currentSearchStart + (direction*searchIncrementSize), video2Duration) + + let sceneComparison + try { + sceneComparison = await searchForMatchingScene(video2, video1SceneChange, currentSearchStart, currentSearchEnd) + } catch (err) { + // no scene change found until currentSearchEnd + + console.log(`No scene change found until currentSearchEnd (${currentSearchEnd})`); + + if (direction === 1) { + currentSearchOffsets.upper = currentSearchEnd + console.log(`Current offset (upper):`, ms(currentSearchOffsets.upper - video1SceneChange.preSceneChangeFrame.offset)); + } else { + currentSearchOffsets.lower = currentSearchEnd + console.log(`Current offset (lower):`, ms(video1SceneChange.preSceneChangeFrame.offset - currentSearchOffsets.lower)); + } + + // only change direction if the other direction hasn't surpassed the offset yet + if ( + !options.exclusiveDirection && // only switch direction if no exclusive direction is set + (direction === 1 && (video1SceneChange.preSceneChangeFrame.offset - currentSearchOffsets.lower - options.offsetEstimate) < options.maxOffset) || + (direction === -1 && (currentSearchOffsets.upper - video1SceneChange.preSceneChangeFrame.offset - options.offsetEstimate) < options.maxOffset) + ) { + direction = direction * -1 + console.debug(`changing direction to ${direction}`) + } else { + console.debug(`NOT changing direction!`) + } + + continue + } + + console.log(`sceneComparison:`, sceneComparison) + + if ( + (sceneComparison.preSceneChangeFrameSimilarity > 0.6 && sceneComparison.postSceneChangeFrameSimilarity > 0.6 && (await sceneComparison).deltaOfDeltas < 0.03) || + (sceneComparison.preSceneChangeFrameSimilarity > 0.9 && sceneComparison.postSceneChangeFrameSimilarity > 0.9 && (await sceneComparison).deltaOfDeltas < 0.1) + ) { + // matching scene found + + // remove tmp folder + await fs.rm(`tmp`, { + recursive: true, + force: true, + }) + + const result = { + videoOffset: video1SceneChange.preSceneChangeFrame.offset - sceneComparison.video2SceneChange.preSceneChangeFrame.offset, + confidence: 1, + } + spinner.succeed(`Source video is approx. ${Math.abs(result.videoOffset)} ms ${result.videoOffset > 0 ? `ahead` : `behind`} destination video. Took ${ms(Date.now() - startTime)}`) + return result + + } else { + // retry the same with different offsets + + if (direction === 1) { + currentSearchOffsets.upper = sceneComparison.video2SceneChange.postSceneChangeFrame.offset + console.debug(`Current offset (upper):`, ms(currentSearchOffsets.upper - video1SceneChange.postSceneChangeFrame.offset)); + } else { + currentSearchOffsets.lower = sceneComparison.video2SceneChange.preSceneChangeFrame.offset + console.debug(`Current offset (lower):`, ms(video1SceneChange.preSceneChangeFrame.offset - currentSearchOffsets.lower)); + } + + // only change direction if the other direction hasn't surpassed the offset yet + if ( + !options.exclusiveDirection && // only switch direction if no exclusive direction is set + (direction === 1 && (video1SceneChange.preSceneChangeFrame.offset - currentSearchOffsets.lower - options.offsetEstimate) < options.maxOffset) || + (direction === -1 && (currentSearchOffsets.upper - video1SceneChange.preSceneChangeFrame.offset - options.offsetEstimate) < options.maxOffset) + ) { + direction = direction * -1 + console.debug(`changing direction to ${direction}`) + } else { + console.debug(`NOT changing direction!`) + } + + } + + } + + // remove tmp folder + await fs.rm(`tmp`, { + recursive: true, + force: true, + }) + + throw new Error(`Couldn't sync videos! (tried for ${ms(Date.now() - startTime)}`) + +} +module.exports.calculateOffset = calculateOffset + +// calculateOffset(`/mnt/c/Users/Chaphasilor/Videos/hobbit_1_ee.mp4`, `/mnt/c/Users/Chaphasilor/Videos/The Hobbit - An Unexpected Journey (Extended Edition).mp4`, 90*1000) +// calculateOffset(`/mnt/c/Users/Chaphasilor/Videos/Star Wars - The Bad Batch - 1x03.mkv`, `/mnt/c/Users/Chaphasilor/Videos/BadBatchCopy.mkv`, 90*1000) +// calculateOffset(`/mnt/v/Media/TV Shows/Game of Thrones (2011)/Season 6/Game of Thrones - 6x01.mkv`, `/mnt/v/Media/TV Shows/Game of Thrones (2011) (de)/S6/Game of Thrones - S6E1.mp4`, 90*1000) +// calculateOffset(`/mnt/v/Media/TV Shows/Game of Thrones (2011)/Season 6/Game of Thrones - 6x03.mkv`, `/mnt/v/Media/TV Shows/Game of Thrones (2011) (de)/S6/Game of Thrones - S6E3.mp4`, 90*1000) +//!!! +// calculateOffset(`/mnt/c/Users/Chaphasilor/Videos/Game of Thrones - 7x02.mkv`, `/mnt/c/Users/Chaphasilor/Videos/Game of Thrones - S7E02.mp4`, { +// calculateOffset(`/mnt/c/Users/Chaphasilor/Videos/Game of Thrones - 7x02.mkv`, `/mnt/v/Media/TV Shows/Game of Thrones (2011) (de)/HQ/Staffel 7/Game of Thrones 0702.mp4`, { +// maxOffset: 240*1000, +// }) +// .then(async result => { + +// console.log(result) + +// }) +// .finally(async () => { +// await fs.writeFile(`checkedOffsets.csv`, checkedOffsets.map(x => `${x}, 1`).join(`\n`)) +// await fs.rm(`tmp`, { +// recursive: true, +// force: true, +// }) +// }) + +//[ ] when automating, use the previously found offset as an estimate for following videos (if videos from the same source) + +//[ ] what happens when there are multiple similar scene changes? \ No newline at end of file diff --git a/util/merge-tracks.js b/util/merge-tracks.js index 6f352d5..7386922 100644 --- a/util/merge-tracks.js +++ b/util/merge-tracks.js @@ -3,12 +3,14 @@ const probe = require(`node-ffprobe`) const cli = require(`cli-ux`).default const ora = require('ora'); const chalk = require('chalk'); +const ms = require(`ms`) + const tracks = require(`./tracks`) module.exports = function(video1, video2, output, offset, tracksToSync) { return new Promise(async (resolve, reject) => { - let spinner = ora(`Figuring out offsets for audio tracks...`).start(); + let spinner = ora(`Figuring out offsets for selected tracks...`).start(); // cli.action.start(`Figuring out offsets for audio tracks`) let matchedTracks @@ -75,9 +77,11 @@ module.exports = function(video1, video2, output, offset, tracksToSync) { merger.stdout.setEncoding(`utf8`) merger.stderr.setEncoding(`utf8`) + const startTime = Date.now() const simpleBar = cli.progress({ - format: `Muxing output video [${chalk.green('{bar}')}] {percentage} % | ETA: {eta}s`, + format: `Muxing output video [${chalk.green('{bar}')}] {percentage} % | ETA: {eta_formatted}`, etaBuffer: 7, + clearOnComplete: true, }) simpleBar.start(100, 0); @@ -109,7 +113,7 @@ module.exports = function(video1, video2, output, offset, tracksToSync) { } const tempSpinner = ora(``).start(); - tempSpinner.succeed(`Done.`) + tempSpinner.succeed(`Successfully muxed output video in ${ms(Date.now() - startTime)}.`) return resolve() diff --git a/util/static-scenes.js b/util/static-scenes.js new file mode 100644 index 0000000..010ea45 --- /dev/null +++ b/util/static-scenes.js @@ -0,0 +1,64 @@ +const input1 = { + type: `static`, + data: [ 0.5994552698893447, 0.599064102235953, 0.35056786746718865, 0.3500060860690234, 0.35006407702387293, 0.348183724532739, 0.34884531979873606, 0.3486988612628478, 0.34931567039775985, 0.34805662901760775, 0.3481243268170159, 0.3471440943038979, 0.6020748067125484, 0.6769818442172816, 0.7008137276381164, 0.6955130145952071, 0.7124507056473198, 0.7551564794988055, 0.7765847728531536, 0.7846291132571114, 0.7884075745985573, 0.7852740709839631, 0.7543214236572517, 0.6024584250828178, 0.7590348350325902, 0.7572549057733584, 0.7596238711119784, 0.7721559770151246, 0.77332122847765, 0.7573920874950738, 0.7227731072697046, 0.691824052059341, 0.6822957972272966, 0.6912616443915275, 0.3516963993610157, 0.351695987552907, 0.3518557352996475, 0.3520544302094652, 0.35224975625138116, 0.3509199853750202, ], +} + +const input2 = { + type: `normal`, + data: [ 0.517961401614517, 0.51795945498385, 0.6929285967393342, 0.9373905920795014, 0.9375701063261677, 0.9420131246311026, 0.9487265573379107, 0.9532314497279086, 0.9597637392480829, 0.9649454381997624, 0.9704170111389493, 0.9718801723262454, 0.6917187162345894, 0.9818833162338804, 0.9741279207714674, 0.9670750470117099, 0.9599887901764863, 0.9561961601786073, 0.9536156078278396, 0.9452474558161617, 0.688486550137538, 0.6862855395278077, 0.6889790445720432, 0.694185102717786, 0.6867004000221388, 0.6868382370682793, 0.6831432022067203, 0.6891155012641412, 0.6869661722140721, 0.5132680138805535, 0.5142630591661521, 0.516669869117694, 0.5173126309844173, 0.5174420168168854, 0.6924266476834146, 0.6907055301574426, 0.6898531403697211, 0.6903550007422555, 0.6920621237445368, 0.692583999037324, ], +} + +const input3 = { + type: `normal`, + data: [ 0.7970336811789516, 0.7966955394497373, 0.6257892724474822, 0.6247660391979712, 0.6234494434206667, 0.4682748522565705, 0.47091479359573996, 0.47044409730804676, 0.4704123052809353, 0.46752838354686765, 0.46869666807729965, 0.4901419089485066, 0.7980105439577683, 0.4893614240849455, 0.48789638545527675, 0.4871492807479713, 0.4932491342948191, 0.4953005335043164, 0.49582755292586184, 0.4958078096957378, 0.8540240974479486, 0.87166426135269, 0.8678114318663785, 0.8003519330625414, 0.867899027195929, 0.878471901293726, 0.9193898053305759, 0.8943135493378668, 0.8794363885738545, 0.8612880500579801, 0.4747123659891098, 0.4818097376176827, 0.48014801519439265, 0.4848022520366683, 0.8021564106184855, 0.6212976613190518, 0.6221856125132545, 0.6234567691600998, 0.6246916908044958, 0.624051454171958, ], +} + +const input4 = { + type: `static`, + data: [ 0.6356052633609023, 0.6370200307873028, 0.6452912006468637, 0.6428166483565648, 0.6457104936505018, 0.930252374909148, 0.9355593709182481, 0.940417546358631, 0.938981387624808, 0.9361740873541464, 0.9353159703961992, 0.9405327709652588, 0.63826517444716, 0.9497610650942159, 0.9572042326113551, 0.9639666725017165, 0.968710774545617, 0.9716869304628997, 0.9761220611617342, 0.9739107681934567, 0.9705402982981287, 0.9682353946996541, 0.9665428828389981, 0.6409564786152606, 0.9690854891716492, 0.9778149048007924, 0.979645866968401, 0.9768414527768374, 0.977739504802136, 0.9776863156008326, 0.9795600471100725, 0.9806730668760013, 0.9804818997244141, 0.9775748512202764, 0.6438105874293297, 0.6444931466972921, 0.6434197738422119, 0.6390064865979158, 0.6443945465544884, 0.64497963317503,], +} + +function checkStaticScene(input) { + + console.log(`input.type:`, input.type) + + let highestScore = input.data.reduce((max, cur) => cur > max ? cur : max, 0) + let lowestScore = input.data.reduce((min, cur) => cur < min ? cur : min, 0) + + let deltas = [] + for (const i in input.data) { + if (i > 0) { + deltas.push(Math.abs(input.data[i] - input.data[i-1])) + } + } + + let validScores = [] + for (const score of input.data) { + if ((score - lowestScore) > (highestScore - lowestScore) * 0.8) { + validScores.push(score) + } + } + + + deltas = deltas.sort((a, b) => a - b) + let mean = deltas.reduce((sum, cur) => sum + cur, 0)/deltas.length + let median = deltas.length%2 === 0 ? (deltas[deltas.length/2] + deltas[deltas.length/2+1])/2 : deltas[Math.floor(deltas.length/2)+1] + + console.log(`mean:`, mean) + console.log(`median:`, median) + console.log(`validScores.length:`, validScores.length) + + if (validScores.length >= 0.5 * input.data.length) { + console.warn(`Static scene detected!`) + return true + } + + return false + +} +module.exports.checkStaticScene = checkStaticScene + +// checkStaticScene(input1) +// checkStaticScene(input2) +// checkStaticScene(input3) +// checkStaticScene(input4) \ No newline at end of file diff --git a/util/warping.js b/util/warping.js new file mode 100644 index 0000000..62ca88f --- /dev/null +++ b/util/warping.js @@ -0,0 +1,145 @@ +const fs = require('fs/promises') +const { exec } = require(`node-exec-promise`) +const { performance } = require(`perf_hooks`) +const bmp = require(`bmp-js`) +const ssim = require(`ssim.js`).default +const ora = require('ora'); +const resizeImg = require('resize-img') +const { getVideoInfo } = require("./calc-offset") + +async function findClosestFrame(destinationVideo, sourceVideo, destinationTimestamp, offset, radius, stepSize) { + + // create the tmp folder if it doesn't exist yet + try { + await fs.access(`tmp`) + } catch (err) { + await fs.mkdir(`tmp`) + } + const framesDir = await fs.mkdtemp(`tmp/frames`) + + let seekPosition = destinationTimestamp / 1000.0 + let destinationFrame + let similarity + let blackBarResult + let cropValueDestination + let cropValueSource + + // determine crop value for destination video + blackBarResult = await exec(`ffmpeg -accurate_seek -ss ${seekPosition} -i "${destinationVideo}" -vframes 200 -vf cropdetect=24:2:0 -f null - -y`) + cropValueDestination = blackBarResult.stderr.split(`\n`).splice(-4, 1)[0].match(/crop=(\d+\:\d+:\d+:\d+)/)[1] + + // determine crop value for destination video + blackBarResult = await exec(`ffmpeg -accurate_seek -ss ${seekPosition} -i "${sourceVideo}" -vframes 200 -vf cropdetect=24:2:0 -f null - -y`) + cropValueSource = blackBarResult.stderr.split(`\n`).splice(-4, 1)[0].match(/crop=(\d+\:\d+:\d+:\d+)/)[1] + + + // create reference frame + destinationFrame = { + offset: destinationTimestamp / 1000.0, + path: `${framesDir}/screenshot_${performance.now()*10000000000000}.bmp`, + data: null, + } + + destinationFrame.data = await extractFrame(destinationVideo, destinationFrame.offset, cropValueDestination, destinationFrame.path) + + mostSimilarFrame = { + offset: 0, + similarity: -1, + } + + // extract frame, check similarity, delete it + for (let currentOffset = offset-radius; currentOffset <= offset+radius; currentOffset += stepSize) { + + seekPosition = (destinationTimestamp + currentOffset) / 1000.0 + fullOutputPath = `${framesDir}/screenshot_${performance.now()*10000000000000}.bmp` + + let frameData = await extractFrame(sourceVideo, seekPosition, cropValueSource, fullOutputPath) + + //TODO instead of only checking image dimensions at the end, do it at the start and then generate the frames from the smaller video + //!!! make sure to use the negated offset when swapping source and destination! + if (destinationFrame.width !== frameData.width || destinationFrame.height !== frameData.height) { + // make sure to always downsize + if (destinationFrame.width*destinationFrame.height >= frameData.width*frameData.height) { + // resize destination frame once + destinationFrame.data = bmp.decode(await resizeImg(await fs.readFile(destinationFrame.path), { + format: `bmp`, + width: frameData.width, + height: frameData.height, + })); + } else { + // resize every source frame + frameData = bmp.decode(await resizeImg(await fs.readFile(fullOutputPath), { + format: `bmp`, + width: destinationFrame.data.width, + height: destinationFrame.data.height, + })); + } + } + + similarity = ssim(frameData, destinationFrame.data).mssim; + + if (similarity > mostSimilarFrame.similarity) { + mostSimilarFrame = { + offset: currentOffset, + similarity, + } + } + + } + + // remove tmp folder + await fs.rm(`tmp`, { + recursive: true, + force: true, + }) + + return mostSimilarFrame + +} + +async function extractFrame(video, offset, cropValue, path) { + + // extract the frame + await exec(`ffmpeg -accurate_seek -ss ${offset} -i "${video}" -vf crop=${cropValue} -frames:v 1 "${path}" -y -loglevel error`) + + // load the bitmap into memory + const frameData = bmp.decode(await fs.readFile(path), { + format: `bmp`, + }) + + return frameData + +} + +async function validateOffset(destinationVideo, sourceVideo, offsetToTest) { + + const testPositions = [ + 0.1, + 0.8, + ] + const testRadius = 500 + const testStepSize = 50 + + const videoInfo = await getVideoInfo(destinationVideo, sourceVideo) + + const mostSimilarFrameOffsets = [] + + for (const position of testPositions) { + + mostSimilarFrameOffsets.push( + (await findClosestFrame(destinationVideo, sourceVideo, Math.round(videoInfo.lengths[0] * position), offsetToTest, testRadius, testStepSize)).offset + ) + + } + + console.debug(`mostSimilarFrameOffsets:`, mostSimilarFrameOffsets) + + const offsetDelta = Math.abs(Math.max(...mostSimilarFrameOffsets) - Math.min(...mostSimilarFrameOffsets)) + if (offsetDelta > 250) { + return false + } + + return true + +} +module.exports.validateOffset = validateOffset