got this code to loop through some .mp4 files and create a screenshot of them:
files.forEach(file => {
console.log(file);
if(!fs.existsSync('img/' + file.replace('.mp4', '.png'))) {
ffmpeg({ source: 'movies/' + file})
.takeScreenshots({ timemarks: [ '50' ], size: '150x100' }, 'img/', function(err, filenames) {
})
.on('end', function() {
fs.rename('img/tn.png', 'img/' + file.replace('.mp4', '.png'), function(err) {if (err) console.log('Error: ' + err) });
sleep(1000);
})
}
});
Now i got the problem that it seems like that .on(end) is sometimes not working, Error:
ENOENT: no such file or directory, rename
i think it´s because the process of saving the tn.png is slower than the renaming...
I wouldn't mix callback, sync, sleep and loop togather. You can use fs promise version to convert all your callback style to promise style. You could do sequentially or parallely.
Also, I would say convert the screenshot code to wrap in promise.
here is the code
const fs = require("fs").promises;
function takeScreenshot(file) {
return new Promise((resolve, reject) => {
ffmpeg({"source": `movies/${file}`})
.takeScreenshots({"timemarks": ["50"], "size": "150x100"}, "img/", function (err, filenames) {
})
.on("end", function () {
resolve();
})
.on("error", err => {
reject(err);
});
});
}
// execute one by one
async function sequential(files) {
for (const file of files) {
const fileExists = await fs.stat(`img/${file.replace(".mp4", ".png")}`);
if (fileExists) {
await takeScreenshot(file);
await fs.rename("img/tn.png", `img/${ file.replace(".mp4", ".png")}`);
}
}
}
// execute in parallel
async function parallel(files) {
return Promise.all(files.map(async file => {
const fileExists = await fs.stat(`img/${file.replace(".mp4", ".png")}`);
if (fileExists) {
await takeScreenshot(file);
await fs.rename("img/tn.png", `img/${ file.replace(".mp4", ".png")}`);
}
}));
}
Hope this helps.
Related
I am create a simple NODE-JS function that Converts PDF to Image > Crops Image > Merge Them back with ImageMagick.
and this is the complete code i am using :
var os = require('os');
var fs = require('fs');
var path = require('path');
var gs = require('node-gs');
var sharp = require('sharp');
var areaMap = require('./areaMap');
const { performance } = require('perf_hooks');
var spawn = require('child_process').spawnSync;
var pExcep = 'someException';
var gsPath = 'Ghostscript/gs26';
var src = path.join(os.tmpdir(), '/');
var Files = {
file1: path.join(src, 'out1.jpeg'),
file2: path.join(src, 'out2.jpeg'),
OutImg: path.join(src, 'out.jpeg')
}
var crop = function (s, sFile) {
return new Promise((res, rej) => {
s = areaMap[s];
sharp(Files.OutImg).extract(s)
.toFile(sFile)
.then(()=> res())
.catch((err) => rej(err));
});
};
var getBaseCard = function (s) {
if (RegExp('^([0-9]{8})$').test(s)) { return 'SOMETHINGHERE' } else { return 'inception'; }
//This can be done on client side.
}
var GetCardType = function (base, sInfo) {
return new Promise((res, rej) => {
if (base === 'SOEMTHINGHERE') {
if (sInfo.includes('SOMETHINGHERE2')) {
if (sInfo.includes(pExcep)) {
res('PA_S_')
} else {
res('PA_S2')
}
} else {
res('PA_ST')
}
} else {
res('SA_')
}
})
}
var PdfToText = function (file, pass) {
return new Promise((res, rej) => {
gs()
.batch().safer().nopause().res(2).option('-dDEVICEWIDTHPOINTS=20').option('-dDEVICEHEIGHTPOINTS=20').option('-dFIXEDMEDIA').option('-sPDFPassword=' + pass).device('txtwrite').output('-').input(file).executablePath(gsPath)
.exec((err, stdout, stderr) => {
if (!err) {
res(stdout);
} else {
console.log(stdout);
console.log(err);
console.log(stderr);
}
})
});
}
var getBaseImage = function (file, pass, quality) {
return new Promise((res, rej) => {
gs()
.batch().nopause().safer().res(300 * quality).option('-dTextAlphaBits=4').option('-dGraphicsAlphaBits=4').option('-sPDFPassword=' + pass)
.executablePath(gsPath).device('jpeg').output(Files.OutImg).input(file)
.exec((err, stdout, stderr) => {
if (!err) { res(); } else { rej(stdout) };
})
})
}
exports.processCard = function (file, password, quality) {
return new Promise((resolve, reject) => {
getBaseImage(file, password, quality) // Convert PDF to Image
.then(() => {
PdfToText(file, password) // Extract Text from pdf
.then((res) => {
GetCardType(getBaseCard(password), res) // finally get PDF Type
.then((ct) => {
// crop image here using Sharp
Promise.all([
crop(ct + 'A_' + quality, Files.file1),
crop(ct + 'B_' + quality, Files.file2)])
.then(() => {
// Merge Above two image into one using ImageMagick convert
spawn('convert', [Files.file1, Files.file2, '+append', 'files/out1.jpg']);
fs.unlinkSync(Files.OutImg); // Unlink tmp folders
fs.unlinkSync(Files.file1);
fs.unlinkSync(Files.file2);
resolve(); // finally resolve
}).catch((err) => reject(err));
}).catch((err) => reject(err))
}).catch((err) => reject(err))
}).catch((err) => reject(err))
})
}
and now these are the problem i am facing:
1. ImageMagick isn't creating the output file.
2. fs.unlinksysnc throws ENOENT: no such file or directory, unlink '/tmp/out1.jpeg'
on average every second execution.
3. Using above code increases execution time.
For Example: getBaseImage should complete in 600ms but it takes 1400 using above code.
About speed in General it (The Complete Function not just getBaseImage) should finish in 1100-1500ms(*) on average but the time taken is ~2500ms.
*1100-1500ms time is achievable by using function chaining but that is hard to read and maintaine for me.
I am going to use this function in Firebase Functions.
How to properly chain these functions ?
EDIT
exports.processCard = function (file, password, quality) {
return new Promise((resolve, reject) => {
console.log(performance.now());
getBaseImage(file, password, quality) //Convert PDF TO IMAGE
.then(() => { return PdfToText(file, password) })
.then((res) => {return GetCardType(getBaseCard(password), res) })
.then((ct) => {
return Promise.all([
crop(ct + 'A_' + quality, Files.file1),
crop(ct + 'B_' + quality, Files.file2)])
})
.then(() => {
spawn('convert', [Files.file1, Files.file2, '+append', 'files/out1.jpg']);
fs.unlinkSync(Files.OutImg); // Unlink tmp folders
fs.unlinkSync(Files.file1);
fs.unlinkSync(Files.file2);
resolve();
})
.catch((err) => { console.log(err) });
Using above pattern didn't solved my issues here.
There's a good chance this weirdness is caused by using the file system. If I understand it correctly, the fs in cloud functions is in memory, so when you write to it, read from it, and remove from it, you're using more and less os memory. That can get weird if a function is called repeatedly and re uses the loaded module.
One thing to try to keep the state clean for each invocation is to put everything (including the requires) inside the scope of the handler. That way you instantiate everything freshly on each invocation.
Finally, you don't seem to be waiting for the spawned convert command to run, you'll need to wait for it to complete:
const convertProc = spawn('convert', [Files.file1, Files.file2, '+append', 'files/out1.jpg']);
convertProc.on('close', function() {
fs.unlinkSync(Files.OutImg); // Unlink tmp folders
fs.unlinkSync(Files.file1);
fs.unlinkSync(Files.file2);
resolve();
})
convertProc.on('close', function(error) {
reject(error);
});
Then you wait for it to complete before you resolve.
cannot return the dat[0] value from the inner function
let ite = fs.readdir(directoryPath, function (err, files) {
if (err) {
return console.log('Unable to scan directory: ' + err);
}
dat = files.filter(item => item[1] == files.length);
return dat[0];
});
You are returning from the function which will called when the operation will be finished. So returning from that function doesn't change the value of your variable.
You can use readdirSync
const res = fs.readdirSync(directoryPath);
If you don't want it to be sync you can use fs.promises with async await
let res;
(async function() {
res = await fs.promises.readdir("");
})();
I don't recommend to read sync reading because it can block the main thread. You could use a callback function by passing it as parameter to another function.
function getData(onSuccess, onError) {
fs.readdir(directoryPath, function (err, files) {
if (err) {
onError('Unable to scan directory: ' + err);
}
dat = files.filter(item => item[1] == files.length);
onSuccess(dat[0]);
});
}
getData(function(data){
console.log(data);
}, function(error){
//error here
})
We can make it as a promise method to run the code async. Here we don't block any thread. It will be running asynchronously.
function getData(directoryPath) {
return new Promise((resolve, reject) => {
fs.readdir(directoryPath, function (err, files) {
if (err) {
reject('Unable to scan directory: ' + err.message);
}
dat = files.filter(item => item[1] == files.length);
resolve(dat[0]);
});
});
}
getData(directoryPath).then((data) => {
console.log(data);
}).catch((err) => {
console.error(err);
});
// or
try {
let data = await getData(directoryPath);
} catch (error) {
console.error(error);
}
I am writing a program where I need to process a video multiple times using ffmpeg. The ffmpeg codes (below) are inside a 'then' statement of a promise.
ffmpeg(path)
.size('640x?')
.aspect('1:1')
.autopad('#682BAB')
.saveToFile(`${userDirPath}/11-${userFileName}`)
.on('end', () => {
ffmpeg()
.input('test-11-start.mp4')
.mergeAdd(`${userDirPath}/11-${userFileName}`)
.mergeAdd('test-11-end.mp4')
.mergeToFile(`${userDirPath}/11-final-${userFileName}`, 'temp/')
.on('end', () => console.log('FFmpeg done!'));
});
There is another ffmpeg function after this (same, but with a different aspect ratio) and then, a 'then' statement with some other functions.
The problem is that this ffmpeg function runs asynchronously, and the next statements (which use the resulting file of ffmpeg func) are executed before it finishes executing and so I want it to run synchronously. I've tried async await (below) but it still runs asynchronously. What is wrong with code?
async function ffmpegMerge() {
try {
await ffmpeg(path)
.size('640x?')
.aspect('1:1')
.autopad('#682BAB')
.saveToFile(`${userDirPath}/11-${userFileName}`)
.on('end', () => {
ffmpeg()
.input(`test-11-start.mp4`)
.mergeAdd(`${userDirPath}/11-${userFileName}`)
.mergeAdd(`test-11-end.mp4`)
.mergeToFile(`${userDirPath}/11-final-${userFileName}.mp4`, 'temp/')
.on('end', () => console.log('FFmpeg done!'));
})
}
catch (err) {
return Promise.reject(new Error(err));
}
}
Create a function with promise and use await to wait until the function is resolved.
This is an example of using ffmpeg synchronously:
function ffmpegSync(){
return new Promise((resolve,reject)=>{
ffmpeg(path)
.size('640x?')
.aspect('1:1')
.autopad('#682BAB')
.saveToFile(`${userDirPath}/11-${userFileName}`)
.on('end', () => {
ffmpeg()
.input(`test-11-start.mp4`)
.mergeAdd(`${userDirPath}/11-${userFileName}`)
.mergeAdd(`test-11-end.mp4`)
.mergeToFile(`${userDirPath}/11-final-${userFileName}.mp4`, 'temp/')
.on('end', () => console.log('FFmpeg done!'));
resolve()
})
on('error',(err)=>{
return reject(new Error(err))
})
})
}
Now just use the function ffmpegSync and await.
createThumbnailForVideo().then(()=>{
console.log('ok');
})
function createThumbnailForVideo(){
return new Promise((resolve,reject)=>{
const ffmpegInstaller = require('#ffmpeg-installer/ffmpeg');
const ffmpeg = require('fluent-ffmpeg');
ffmpeg.setFfmpegPath(ffmpegInstaller.path);
var path = require('path'), // Default node module
pathToFile = path.join(__dirname, 'tempfiles', fileName),
pathToSnapshot = path.join(__dirname, 'tempfiles');
var proc = ffmpeg(pathToFile)
.on('filenames', (filenames)=> {})
.on('end', (data)=> {
console.log('screenshots were saved');
})
.on('error', (err)=> {
console.log('an error happened: ' + err.message);
return reject(new Error(err))
})
.takeScreenshots({ count: 1,filename:`thumb_${onlyName}.png`, timemarks: [ '00:00:01.000' ], size: '250x?' },pathToSnapshot)
.on('end', () => {
console.log('FFmpeg done!')
resolve()
})
.on('error', (err)=> {
console.log('an error happened: ' + err.message);
return reject(new Error(err))
})
})
I have this code that serves every markdown file in the './markdown' folder. At '/api/markdown/filename'.
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
fs.readdir(markdownFolder, function(err, markdown) {
if (err) throw err;
markdown.forEach(function(file) {
fs.readFile(markdownFolder + file, 'utf8', function(err, file_content) {
if (err) throw err;
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
res.json({
'title': fileNoExtension,
'markdown': marked(file_content)
});
};
});
});
});
});
But i end having a ton of callbacks do the the nature of the 'fs' methods. How do i avoid this?
Using Q as promise library:
const Q = require('q');
const fs = require('fs');
const markdownFolder = './markdown/';
const readdir = Q.nfbind(fs.readdir);
const readFile = Q.nfbind(fs.readFile);
readdir(markdownFolder).then(markdown => {
const promises = [];
markdown.forEach(file => promises.push(readFile(markdownFolder + file, 'utf8')));
return Q.all(promises);
}).then(files => {
// Do your magic.
}).catch(error => {
// Do something with error.
});
You have different option.
Use named Function instead of anonymus functinos. It would make it a little bit more readable but you will still be using callbacks.
Use Promises, but you will need to use bluebird to wrap the fs module.
For a more advance option, you can use generators and Promises to make your code look more like a sync way. Take a look at co or bluebird.coroutine.
With Promises you could do like this:
const path = require('path');
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
readdir(markdownFolder)
.then((files) => {
const tasks = files.map((file) => {
const filePath = path.resolve(markdownFolder, file);
return readFile(filePath);
});
return Promise.all(tasks); // Read all files
})
.then((fileContents) => {
return fileContents.map((content) => {
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
return {
'title': fileNoExtension,
'markdown': marked(content)
};
};
})
})
.then((results) => {
// It's better if you aggregate all results in one array and return it,
// instead of calling res.json for each result
res.json(results);
})
.catch((err) => {
// All errors are catched here
console.log(err);
})
});
function readdir(folderPath) {
return new Promise((resolve, reject) => {
fs.readdir(folderPath, (err, files) {
if (err) {
return reject(err);
}
resolve(files);
});
});
}
function readFile(filePath) {
return new Promise((resolve, reject) => {
fs.readFile(filePath, 'utf8', (err, file_content) => {
if (err) {
return reject(err);
}
resolve(file_content);
});
});
}
I am using Node.js file system to build an array of file paths. I would like to know when all files have been read, so I could work further with my array.
Sequence of events:
Go into a folder
Get a path of each file
Put each path into an array
Let me know once you're done
Code:
'use strict';
const fs = require('fs');
function readDirectory(path) {
return new Promise((resolve, reject) => {
const files = [];
fs.readdir(path, (err, contents) => {
if (err) {
reject(err);
}
contents.forEach((file) => {
const pathname = `${ path }/${ file }`;
getFilesFromPath(pathname).then(() => {
console.log('pathname', pathname);
files.push(pathname);
});
resolve(files);
});
});
});
}
function getFilesFromPath(path) {
return new Promise((resolve, reject) => {
const stat = fs.statSync(path);
if (stat.isFile()) {
fs.readFile(path, 'utf8', (err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
} else if (stat.isDirectory()) {
readDirectory(path);
}
});
}
getFilesFromPath('./dist');
Would be great to glue with:
Promise.all(files).then(() => {
// do stuff
})
Your suggestion pretty much works - did you try it? Here's a typical way of doing it:
getFilesFromPath( path ).then( files => {
const filePromises = files.map( readFile );
return Promises.all( filePromises );
}).then( fileContentsArray => {
//do stuff - the array will contain the contents of each file
});
You'll have to write the "readFile()" function yourself, but looks like you got that covered.