Plan
This commit is contained in:
parent
50613b8c57
commit
a21cc51eb0
2 changed files with 82 additions and 0 deletions
|
@ -112,3 +112,56 @@ const ffmpegBinaryPath = () => {
|
|||
// https://github.com/eugeneware/ffmpeg-static/issues/16
|
||||
return ensure(pathToFfmpeg).replace("app.asar", "app.asar.unpacked");
|
||||
};
|
||||
|
||||
/**
|
||||
* A variant of {@link ffmpegExec} adapted to work with streams so that it can
|
||||
* handle the MP4 conversion of large video files.
|
||||
*
|
||||
* See: [Note: Convert to MP4]
|
||||
*
|
||||
* @param command
|
||||
* @param dataOrPathOrZipItem
|
||||
* @param outputFileExtension
|
||||
* @param timeoutMS
|
||||
* @returns
|
||||
*/
|
||||
export const ffmpegConvertToMP4 = async (
|
||||
command: string[],
|
||||
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
|
||||
outputFileExtension: string,
|
||||
timeoutMS: number,
|
||||
): Promise<Uint8Array> => {
|
||||
// TODO (MR): This currently copies files for both input (when
|
||||
// dataOrPathOrZipItem is data) and output. This needs to be tested
|
||||
// extremely large video files when invoked downstream of `convertToMP4` in
|
||||
// the web code.
|
||||
|
||||
const {
|
||||
path: inputFilePath,
|
||||
isFileTemporary: isInputFileTemporary,
|
||||
writeToTemporaryFile: writeToTemporaryInputFile,
|
||||
} = await makeFileForDataOrPathOrZipItem(dataOrPathOrZipItem);
|
||||
|
||||
const outputFilePath = await makeTempFilePath(outputFileExtension);
|
||||
try {
|
||||
await writeToTemporaryInputFile();
|
||||
|
||||
const cmd = substitutePlaceholders(
|
||||
command,
|
||||
inputFilePath,
|
||||
outputFilePath,
|
||||
);
|
||||
|
||||
if (timeoutMS) await withTimeout(execAsync(cmd), timeoutMS);
|
||||
else await execAsync(cmd);
|
||||
|
||||
return fs.readFile(outputFilePath);
|
||||
} finally {
|
||||
try {
|
||||
if (isInputFileTemporary) await deleteTempFile(inputFilePath);
|
||||
await deleteTempFile(outputFilePath);
|
||||
} catch (e) {
|
||||
log.error("Could not clean up temp files", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -181,3 +181,32 @@ const writeNodeStream = async (filePath: string, fileStream: Readable) => {
|
|||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* [Note: Convert to MP4]
|
||||
*
|
||||
* When we want to convert a video to MP4, if we were to send the entire
|
||||
* contents of the video from the renderer to the main process over IPC, it just
|
||||
* causes the renderer to run out of memory and restart when the videos are very
|
||||
* large. So we need to stream the original video renderer → main and then
|
||||
* stream back the converted video renderer ← main.
|
||||
*
|
||||
* Currently Chromium does not support bi-directional streaming ("full" duplex
|
||||
* mode for the Web fetch API). So we need to simulate that using two different
|
||||
* streaming requests.
|
||||
*
|
||||
* renderer → main stream://convert-to-mp4
|
||||
* → request.body is the original video
|
||||
* ← response is a token
|
||||
*
|
||||
* renderer → main stream://convert-to-mp4?token=<token>
|
||||
* ← response.body is the converted video
|
||||
*
|
||||
* Note that the conversion itself is not streaming. The conversion still
|
||||
* happens in a single shot, we are just streaming the data across the IPC
|
||||
* boundary to allow us to pass large amounts of data without running out of
|
||||
* memory.
|
||||
*
|
||||
* See also: [Note: IPC streams]
|
||||
*/
|
||||
const convertToMP4 = (token: string | undefined) => {};
|
||||
|
|
Loading…
Reference in a new issue