I am writing a program where I need to process a video multiple times using ffmpeg. The ffmpeg codes (below) are inside a 'then' statement of a promise.
ffmpeg(path)
.size('640x?')
.aspect('1:1')
.autopad('#682BAB')
.saveToFile(`${userDirPath}/11-${userFileName}`)
.on('end', () => {
ffmpeg()
.input('test-11-start.mp4')
.mergeAdd(`${userDirPath}/11-${userFileName}`)
.mergeAdd('test-11-end.mp4')
.mergeToFile(`${userDirPath}/11-final-${userFileName}`, 'temp/')
.on('end', () => console.log('FFmpeg done!'));
});
There is another ffmpeg function after this (same, but with a different aspect ratio) and then, a 'then' statement with some other functions.
The problem is that this ffmpeg function runs asynchronously, and the next statements (which use the resulting file of ffmpeg func) are executed before it finishes executing and so I want it to run synchronously. I've tried async await (below) but it still runs asynchronously. What is wrong with code?
async function ffmpegMerge() {
try {
await ffmpeg(path)
.size('640x?')
.aspect('1:1')
.autopad('#682BAB')
.saveToFile(`${userDirPath}/11-${userFileName}`)
.on('end', () => {
ffmpeg()
.input(`test-11-start.mp4`)
.mergeAdd(`${userDirPath}/11-${userFileName}`)
.mergeAdd(`test-11-end.mp4`)
.mergeToFile(`${userDirPath}/11-final-${userFileName}.mp4`, 'temp/')
.on('end', () => console.log('FFmpeg done!'));
})
}
catch (err) {
return Promise.reject(new Error(err));
}
}
Create a function with promise and use await to wait until the function is resolved.
This is an example of using ffmpeg synchronously:
function ffmpegSync(){
return new Promise((resolve,reject)=>{
ffmpeg(path)
.size('640x?')
.aspect('1:1')
.autopad('#682BAB')
.saveToFile(`${userDirPath}/11-${userFileName}`)
.on('end', () => {
ffmpeg()
.input(`test-11-start.mp4`)
.mergeAdd(`${userDirPath}/11-${userFileName}`)
.mergeAdd(`test-11-end.mp4`)
.mergeToFile(`${userDirPath}/11-final-${userFileName}.mp4`, 'temp/')
.on('end', () => console.log('FFmpeg done!'));
resolve()
})
on('error',(err)=>{
return reject(new Error(err))
})
})
}
Now just use the function ffmpegSync and await.
createThumbnailForVideo().then(()=>{
console.log('ok');
})
function createThumbnailForVideo(){
return new Promise((resolve,reject)=>{
const ffmpegInstaller = require('#ffmpeg-installer/ffmpeg');
const ffmpeg = require('fluent-ffmpeg');
ffmpeg.setFfmpegPath(ffmpegInstaller.path);
var path = require('path'), // Default node module
pathToFile = path.join(__dirname, 'tempfiles', fileName),
pathToSnapshot = path.join(__dirname, 'tempfiles');
var proc = ffmpeg(pathToFile)
.on('filenames', (filenames)=> {})
.on('end', (data)=> {
console.log('screenshots were saved');
})
.on('error', (err)=> {
console.log('an error happened: ' + err.message);
return reject(new Error(err))
})
.takeScreenshots({ count: 1,filename:`thumb_${onlyName}.png`, timemarks: [ '00:00:01.000' ], size: '250x?' },pathToSnapshot)
.on('end', () => {
console.log('FFmpeg done!')
resolve()
})
.on('error', (err)=> {
console.log('an error happened: ' + err.message);
return reject(new Error(err))
})
})
Related
QUESTION:
Unfortunately, my function finishes execution within seconds instead of executing in full. This is apparently due to the fact that listeners are declared to stream the data: they are not promises I can await to my knowledge.
How may I have my firebase function execute in full ?
CODE:
exports.fifteenMinutesData = functions
.runWith(runtimeOpts)
.pubsub
.schedule('*/15 * * * *')
.timeZone('Etc/UTC')
.onRun((context) => {
return (async() => {
try {
const Client = require('ftp');
const c = new Client();
c.connect({
host: "...",
user: "..."
});
c.on('ready', async function () {
c.get('text.txt', async function (err, stream) {
if (err)
throw err;
var content = '';
stream.on('data', function (chunk) {
content += chunk.toString();
});
stream.on('end', function () {
(async () => {
try {
let data = content;
//etc....
}
catch(err) {
console.log("ERR: "+err);
}
})()
})
})
})
}
catch(err) {
console.log("ERR: "+err)
}
})()
});
You will need to promisify the result so the module is aware the value is asynchronous. Currently, your callback is not informing the module of anything so the execution exits immediately, you will want a format like
exports.fifteenMinutesData = functions
.runWith(runtimeOpts)
.pubsub
.schedule('*/15 * * * *')
.timeZone('Etc/UTC')
.onRun((context) => new Promise((resolve, reject) =>
{
});
Where you call resolve(data); for the success path and reject(err); for all error execution paths.
I am new and am trying to concat a folder of audio files and then stream the create file with ffmpeg in node.js.
I thought I could call the function that creates the file with await and then when it's done the code would continue allowing me to call the created file. However thats not whats happening. I am getting a "file undefined"
Main function
//CONCATS THE FILES
await concatAudio(supportedFileTypes.supportedAudioTypes, `${path}${config[typeKey].audio_directory}`);
// CALLS THE FILE CREATED FROM concatAudio
const randomSong = await getRandomFileWithExtensionFromPath(
supportedFileTypes.supportedAudioTypes,
`${path}${config[typeKey].audio_final}`
);
concatAudio function
var audioconcat = require('audioconcat');
const getRandomFileWithExtensionFromPath = require('./randomFile');
const find = require('find');
// Async Function to get a random file from a path
module.exports = async (extensions, path) => {
// Find al of our files with the extensions
let allFiles = [];
extensions.forEach(extension => {
allFiles = [...allFiles, ...find.fileSync(extension, path)];
});
await audioconcat(allFiles)
.concat('./live-stream-radio/final/all.mp3')
.on('start', function(command) {
console.log('ffmpeg process started:', command);
})
.on('error', function(err, stdout, stderr) {
console.error('Error:', err);
console.error('ffmpeg stderr:', stderr);
})
.on('end', function(output) {
console.error('Audio created in:', output);
});
// Return a random file
// return '/Users/Semmes/Downloads/live-stream-radio-ffmpeg-builds/live-stream-radio/final/all.mp3';
};
I solved it wrapping into a Promise, I adapted my solution to your script (I don't tested in this code, but is almost the same), I hope this helps someone:
var audioconcat = require('audioconcat');
const getRandomFileWithExtensionFromPath = require('./randomFile');
const find = require('find');
// Async Function to get a random file from a path
module.exports = async (extensions, path) => {
// Find al of our files with the extensions
let allFiles = [];
extensions.forEach(extension => {
allFiles = [...allFiles, ...find.fileSync(extension, path)];
});
const concatPromise = new Promise(async (resolve, error) => {
audioconcat(allFiles)
.concat('./live-stream-radio/final/all.mp3')
.on('start', function(command) {
console.log('ffmpeg process started:', command);
})
.on('error', function(err, stdout, stderr) {
console.error('Error:', err);
console.error('ffmpeg stderr:', stderr);
error(err);
})
.on('end', function(output) {
console.error('Audio created in:', output);
resolve(otuput);
});
});
// Return a random file (this should return the output string)
return await concatPromise();
// return '/Users/Semmes/Downloads/live-stream-radio-ffmpeg-builds/live-stream-radio/final/all.mp3';
};
got this code to loop through some .mp4 files and create a screenshot of them:
files.forEach(file => {
console.log(file);
if(!fs.existsSync('img/' + file.replace('.mp4', '.png'))) {
ffmpeg({ source: 'movies/' + file})
.takeScreenshots({ timemarks: [ '50' ], size: '150x100' }, 'img/', function(err, filenames) {
})
.on('end', function() {
fs.rename('img/tn.png', 'img/' + file.replace('.mp4', '.png'), function(err) {if (err) console.log('Error: ' + err) });
sleep(1000);
})
}
});
Now i got the problem that it seems like that .on(end) is sometimes not working, Error:
ENOENT: no such file or directory, rename
i think it´s because the process of saving the tn.png is slower than the renaming...
I wouldn't mix callback, sync, sleep and loop togather. You can use fs promise version to convert all your callback style to promise style. You could do sequentially or parallely.
Also, I would say convert the screenshot code to wrap in promise.
here is the code
const fs = require("fs").promises;
function takeScreenshot(file) {
return new Promise((resolve, reject) => {
ffmpeg({"source": `movies/${file}`})
.takeScreenshots({"timemarks": ["50"], "size": "150x100"}, "img/", function (err, filenames) {
})
.on("end", function () {
resolve();
})
.on("error", err => {
reject(err);
});
});
}
// execute one by one
async function sequential(files) {
for (const file of files) {
const fileExists = await fs.stat(`img/${file.replace(".mp4", ".png")}`);
if (fileExists) {
await takeScreenshot(file);
await fs.rename("img/tn.png", `img/${ file.replace(".mp4", ".png")}`);
}
}
}
// execute in parallel
async function parallel(files) {
return Promise.all(files.map(async file => {
const fileExists = await fs.stat(`img/${file.replace(".mp4", ".png")}`);
if (fileExists) {
await takeScreenshot(file);
await fs.rename("img/tn.png", `img/${ file.replace(".mp4", ".png")}`);
}
}));
}
Hope this helps.
I am refactoring some code and removing unnecessary callbacks.
The original code was:
create(_job_id).then(function (create_id) {
latest().then(function (latest_config) {
update(create_id).then(function () {
// More code
}).catch(function (err) {
console.log(err);
});
.catch(function (err) {
console.log(err);
});
.catch(function (err) {
console.log(err);
});
I have refactored it to:
const setup = async () => {
const create_id = create(_job_id);
const latest_config = await latest();
await update(create_id);
return { create_id, latest_config };
}
setup().then((setup) => {
console.log(setup);
})
But now the rejections are not handled. How would I go about elegantly handling the "catches". Promises.all looks good but I have tried to use it unsuccessfully.
Any help is appreciated.
The non-async version could also look like this:
const p_latest_config = latest();
create(_job_id)
.then(create_id => {
update(create_id).catch(err => console.log(err));
return Promise.all([create_id, p_latest_config]);
})
.then(([create_id, latest_config]) => ({create_id, latest_config}))
.then(setup => console.log(setup))
.catch(err => console.log(err));
The above assumes that create(_job_id), latest() and update(create_id) are not depended on each other via internal mutation.
add .catch(err => {//do sth with err}) on the setup() as you do the then(). Which means it catches any of the exception returned by setup().
setup()
.then((setup) => {
console.log(setup);
})
.catch(err => {console.log("Error", err.message)})
I am trying to write a program to get a zip file from s3, unzip it, then upload it to S3.
But I found two exceptions that I can not catch.
1. StreamContentLengthMismatch: Stream content length mismatch. Received 980323883 of 5770104761 bytes. This occurs irregularly.
2. NoSuchKey: The specified key does not exist. This happens when I input the wrong key.
When these two exceptions occur, this program crashes.
I'd like to catch and handle these two exceptions correctly.
I want to prevent a crash.
const unzipUpload = () => {
return new Promise((resolve, reject) => {
let rStream = s3.getObject({Bucket: 'bucket', Key: 'hoge/hoge.zip'})
.createReadStream()
.pipe(unzip.Parse())
.on('entry', function (entry) {
if(entry.path.match(/__MACOSX/) == null){
// pause
if(currentFileCount - uploadedFileCount > 10) rStream.pause()
currentFileCount += 1
var fileName = entry.path;
let up = entry.pipe(uploadFromStream(s3,fileName))
up.on('uploaded', e => {
uploadedFileCount += 1
console.log(currentFileCount, uploadedFileCount)
//resume
if(currentFileCount - uploadedFileCount <= 10) rStream.resume()
if(uploadedFileCount === allFileCount) resolve()
entry.autodrain()
}).on('error', e => {
reject()
})
}
}).on('error', e => {
console.log("unzip error")
reject()
}).on('finish', e => {
allFileCount = currentFileCount
})
rStream.on('error', e=> {
console.log(e)
reject(e)
})
})
}
function uploadFromStream(s3,fileName) {
var pass = new stream.PassThrough();
var params = {Bucket: "bucket", Key: "hoge/unzip/" + fileName, Body: pass};
let request = s3.upload(params, function(err, data) {
if(err) pass.emit('error')
if(!err) pass.emit('uploaded')
})
request.on('httpUploadProgress', progress => {
console.log(progress)
})
return pass
}
This is the library I use when unzipping.
https://github.com/mhr3/unzip-stream
Help me!!
If you'd like to catch the NoSuchKey error thrown by createReadStream you have 2 options:
Check if key exists before reading it.
Catch error from stream
First:
s3.getObjectMetadata(key)
.promise()
.then(() => {
// This will not throw error anymore
s3.getObject().createReadStream();
})
.catch(error => {
if (error.statusCode === 404) {
// Catching NoSuchKey
}
});
The only case when you won't catch error if file was deleted in a split second, between parsing response from getObjectMetadata and running createReadStream
Second:
s3.getObject().createReadStream().on('error', error => {
// Catching NoSuchKey & StreamContentLengthMismatch
});
This is a more generic approach and will catch all other errors, like network problems.
You need to listen for the emitted error earlier. Your error handler is only looking for errors during the unzip part.
A simplified version of your script.
s3.getObject(params)
.createReadStream()
.on('error', (e) => {
// handle aws s3 error from createReadStream
})
.pipe(unzip)
.on('data', (data) => {
// retrieve data
})
.on('end', () => {
// stream has ended
})
.on('error', (e) => {
// handle error from unzip
});
This way, you do not need to make an additional call to AWS to find out if out if it exists.
You can listen to events (like error, data, finish) in the stream you are receiving back. Read more on events
function getObjectStream (filePath) {
return s3.getObject({
Bucket: bucket,
Key: filePath
}).createReadStream()
}
let readStream = getObjectStream('/path/to/file.zip')
readStream.on('error', function (error) {
// Handle your error here.
})
Tested for "No Key" error.
it('should not be able to get stream of unavailable object', function (done) {
let filePath = 'file_not_available.zip'
let readStream = s3.getObjectStream(filePath)
readStream.on('error', function (error) {
expect(error instanceof Error).to.equal(true)
expect(error.message).to.equal('The specified key does not exist.')
done()
})
})
Tested for success.
it('should be able to get stream of available object', function (done) {
let filePath = 'test.zip'
let receivedBytes = 0
let readStream = s3.getObjectStream(filePath)
readStream.on('error', function (error) {
expect(error).to.equal(undefined)
})
readStream.on('data', function (data) {
receivedBytes += data.length
})
readStream.on('finish', function () {
expect(receivedBytes).to.equal(3774)
done()
})
})
To prevent a crash, you need to asynchronously listen to the object's head metadata, where it does not return the whole object, which will take less time. Try this one!
isObjectErrorExists = async functions () => {
try {
const s3bucket = {
secret key: '',
client id: ''
}
const params = {
Bucket: 'your bucket name',
Key: 'path to object'
};
await s3bucket.headObject(params).promise(); // adding promise will let you add await to listen to process untill it completes.
return true;
} catch (err) {
return false; // headObject threw error.
}
throw new Error(err.message);
}
}
public yourFunction = async() => {
if (await this.isObjectErrorExists()) {
s3Bucket.getObject().createReadStream(); // works smoothly
}
}