Concat audio files then call create file - javascript

I am new and am trying to concat a folder of audio files and then stream the create file with ffmpeg in node.js.
I thought I could call the function that creates the file with await and then when it's done the code would continue allowing me to call the created file. However thats not whats happening. I am getting a "file undefined"
Main function
//CONCATS THE FILES
await concatAudio(supportedFileTypes.supportedAudioTypes, `${path}${config[typeKey].audio_directory}`);
// CALLS THE FILE CREATED FROM concatAudio
const randomSong = await getRandomFileWithExtensionFromPath(
supportedFileTypes.supportedAudioTypes,
`${path}${config[typeKey].audio_final}`
);
concatAudio function
var audioconcat = require('audioconcat');
const getRandomFileWithExtensionFromPath = require('./randomFile');
const find = require('find');
// Async Function to get a random file from a path
module.exports = async (extensions, path) => {
// Find al of our files with the extensions
let allFiles = [];
extensions.forEach(extension => {
allFiles = [...allFiles, ...find.fileSync(extension, path)];
});
await audioconcat(allFiles)
.concat('./live-stream-radio/final/all.mp3')
.on('start', function(command) {
console.log('ffmpeg process started:', command);
})
.on('error', function(err, stdout, stderr) {
console.error('Error:', err);
console.error('ffmpeg stderr:', stderr);
})
.on('end', function(output) {
console.error('Audio created in:', output);
});
// Return a random file
// return '/Users/Semmes/Downloads/live-stream-radio-ffmpeg-builds/live-stream-radio/final/all.mp3';
};

I solved it wrapping into a Promise, I adapted my solution to your script (I don't tested in this code, but is almost the same), I hope this helps someone:
var audioconcat = require('audioconcat');
const getRandomFileWithExtensionFromPath = require('./randomFile');
const find = require('find');
// Async Function to get a random file from a path
module.exports = async (extensions, path) => {
// Find al of our files with the extensions
let allFiles = [];
extensions.forEach(extension => {
allFiles = [...allFiles, ...find.fileSync(extension, path)];
});
const concatPromise = new Promise(async (resolve, error) => {
audioconcat(allFiles)
.concat('./live-stream-radio/final/all.mp3')
.on('start', function(command) {
console.log('ffmpeg process started:', command);
})
.on('error', function(err, stdout, stderr) {
console.error('Error:', err);
console.error('ffmpeg stderr:', stderr);
error(err);
})
.on('end', function(output) {
console.error('Audio created in:', output);
resolve(otuput);
});
});
// Return a random file (this should return the output string)
return await concatPromise();
// return '/Users/Semmes/Downloads/live-stream-radio-ffmpeg-builds/live-stream-radio/final/all.mp3';
};

Related

Useing fs.createReadStream and fs.createWriteStream for multiple files

I'm trying to loop over multiple files with fs.createReadStream, and I cannot figure out how to read the second file.
const fs = require('fs')
const csv = require('csv-parse')
const parser = csv({
...
})
const files = process.argv.slice(2)
async function analyzeFiles () {
for (const file of files) {
const string = file
console.log(`Analyzing ${file}.`)
await runFile(file, string)
console.log(`Analyzed ${file}.`)
}
}
async function runFile (filepath, string) {
return new Promise(function (resolve, reject) {
const shimmedData = {}
let fileName = ''
fs.createReadStream(filepath)
.pipe(parser)
.on('data', (row) => {
// ...
fileName = 'something dynamic from row'
shimmedData[index] = row // Or something similar, not sure this matters
})
.on('error', (e) => {
console.log('BONK', e)
})
.on('end', () => {
fs.writeFile(`${fileName}.json`, JSON.stringify(shimmedData), (err) => {
if (err) {
console.log(err)
reject(err)
} else {
console.log('File written successfully.')
resolve()
}
})
})
})
}
analyzeFiles()
And then I run node script.js file1.txt file2.txt file3.txt
When I run this, only the first file will ever be saved. Looking into it with console logs, it looks like for the second file, fs.createReadStream is never called.
➜ shimeBirdData git:(main) ✗ node stackoverflowExample.js sampleData/sample.txt sampleData/sample2.txt
Analyzing sampleData/sample.txt.
File written successfully.
Analyzed sampleData/sample.txt.
Analyzing sampleData/sample2.txt.
is saved.
What am I missing?

Promise chaining causing increased execution time?

I am create a simple NODE-JS function that Converts PDF to Image > Crops Image > Merge Them back with ImageMagick.
and this is the complete code i am using :
var os = require('os');
var fs = require('fs');
var path = require('path');
var gs = require('node-gs');
var sharp = require('sharp');
var areaMap = require('./areaMap');
const { performance } = require('perf_hooks');
var spawn = require('child_process').spawnSync;
var pExcep = 'someException';
var gsPath = 'Ghostscript/gs26';
var src = path.join(os.tmpdir(), '/');
var Files = {
file1: path.join(src, 'out1.jpeg'),
file2: path.join(src, 'out2.jpeg'),
OutImg: path.join(src, 'out.jpeg')
}
var crop = function (s, sFile) {
return new Promise((res, rej) => {
s = areaMap[s];
sharp(Files.OutImg).extract(s)
.toFile(sFile)
.then(()=> res())
.catch((err) => rej(err));
});
};
var getBaseCard = function (s) {
if (RegExp('^([0-9]{8})$').test(s)) { return 'SOMETHINGHERE' } else { return 'inception'; }
//This can be done on client side.
}
var GetCardType = function (base, sInfo) {
return new Promise((res, rej) => {
if (base === 'SOEMTHINGHERE') {
if (sInfo.includes('SOMETHINGHERE2')) {
if (sInfo.includes(pExcep)) {
res('PA_S_')
} else {
res('PA_S2')
}
} else {
res('PA_ST')
}
} else {
res('SA_')
}
})
}
var PdfToText = function (file, pass) {
return new Promise((res, rej) => {
gs()
.batch().safer().nopause().res(2).option('-dDEVICEWIDTHPOINTS=20').option('-dDEVICEHEIGHTPOINTS=20').option('-dFIXEDMEDIA').option('-sPDFPassword=' + pass).device('txtwrite').output('-').input(file).executablePath(gsPath)
.exec((err, stdout, stderr) => {
if (!err) {
res(stdout);
} else {
console.log(stdout);
console.log(err);
console.log(stderr);
}
})
});
}
var getBaseImage = function (file, pass, quality) {
return new Promise((res, rej) => {
gs()
.batch().nopause().safer().res(300 * quality).option('-dTextAlphaBits=4').option('-dGraphicsAlphaBits=4').option('-sPDFPassword=' + pass)
.executablePath(gsPath).device('jpeg').output(Files.OutImg).input(file)
.exec((err, stdout, stderr) => {
if (!err) { res(); } else { rej(stdout) };
})
})
}
exports.processCard = function (file, password, quality) {
return new Promise((resolve, reject) => {
getBaseImage(file, password, quality) // Convert PDF to Image
.then(() => {
PdfToText(file, password) // Extract Text from pdf
.then((res) => {
GetCardType(getBaseCard(password), res) // finally get PDF Type
.then((ct) => {
// crop image here using Sharp
Promise.all([
crop(ct + 'A_' + quality, Files.file1),
crop(ct + 'B_' + quality, Files.file2)])
.then(() => {
// Merge Above two image into one using ImageMagick convert
spawn('convert', [Files.file1, Files.file2, '+append', 'files/out1.jpg']);
fs.unlinkSync(Files.OutImg); // Unlink tmp folders
fs.unlinkSync(Files.file1);
fs.unlinkSync(Files.file2);
resolve(); // finally resolve
}).catch((err) => reject(err));
}).catch((err) => reject(err))
}).catch((err) => reject(err))
}).catch((err) => reject(err))
})
}
and now these are the problem i am facing:
1. ImageMagick isn't creating the output file.
2. fs.unlinksysnc throws ENOENT: no such file or directory, unlink '/tmp/out1.jpeg'
on average every second execution.
3. Using above code increases execution time.
For Example: getBaseImage should complete in 600ms but it takes 1400 using above code.
About speed in General it (The Complete Function not just getBaseImage) should finish in 1100-1500ms(*) on average but the time taken is ~2500ms.
*1100-1500ms time is achievable by using function chaining but that is hard to read and maintaine for me.
I am going to use this function in Firebase Functions.
How to properly chain these functions ?
EDIT
exports.processCard = function (file, password, quality) {
return new Promise((resolve, reject) => {
console.log(performance.now());
getBaseImage(file, password, quality) //Convert PDF TO IMAGE
.then(() => { return PdfToText(file, password) })
.then((res) => {return GetCardType(getBaseCard(password), res) })
.then((ct) => {
return Promise.all([
crop(ct + 'A_' + quality, Files.file1),
crop(ct + 'B_' + quality, Files.file2)])
})
.then(() => {
spawn('convert', [Files.file1, Files.file2, '+append', 'files/out1.jpg']);
fs.unlinkSync(Files.OutImg); // Unlink tmp folders
fs.unlinkSync(Files.file1);
fs.unlinkSync(Files.file2);
resolve();
})
.catch((err) => { console.log(err) });
Using above pattern didn't solved my issues here.
There's a good chance this weirdness is caused by using the file system. If I understand it correctly, the fs in cloud functions is in memory, so when you write to it, read from it, and remove from it, you're using more and less os memory. That can get weird if a function is called repeatedly and re uses the loaded module.
One thing to try to keep the state clean for each invocation is to put everything (including the requires) inside the scope of the handler. That way you instantiate everything freshly on each invocation.
Finally, you don't seem to be waiting for the spawned convert command to run, you'll need to wait for it to complete:
const convertProc = spawn('convert', [Files.file1, Files.file2, '+append', 'files/out1.jpg']);
convertProc.on('close', function() {
fs.unlinkSync(Files.OutImg); // Unlink tmp folders
fs.unlinkSync(Files.file1);
fs.unlinkSync(Files.file2);
resolve();
})
convertProc.on('close', function(error) {
reject(error);
});
Then you wait for it to complete before you resolve.

ffmpeg - on end function probably not working correctly?

got this code to loop through some .mp4 files and create a screenshot of them:
files.forEach(file => {
console.log(file);
if(!fs.existsSync('img/' + file.replace('.mp4', '.png'))) {
ffmpeg({ source: 'movies/' + file})
.takeScreenshots({ timemarks: [ '50' ], size: '150x100' }, 'img/', function(err, filenames) {
})
.on('end', function() {
fs.rename('img/tn.png', 'img/' + file.replace('.mp4', '.png'), function(err) {if (err) console.log('Error: ' + err) });
sleep(1000);
})
}
});
Now i got the problem that it seems like that .on(end) is sometimes not working, Error:
ENOENT: no such file or directory, rename
i think it´s because the process of saving the tn.png is slower than the renaming...
I wouldn't mix callback, sync, sleep and loop togather. You can use fs promise version to convert all your callback style to promise style. You could do sequentially or parallely.
Also, I would say convert the screenshot code to wrap in promise.
here is the code
const fs = require("fs").promises;
function takeScreenshot(file) {
return new Promise((resolve, reject) => {
ffmpeg({"source": `movies/${file}`})
.takeScreenshots({"timemarks": ["50"], "size": "150x100"}, "img/", function (err, filenames) {
})
.on("end", function () {
resolve();
})
.on("error", err => {
reject(err);
});
});
}
// execute one by one
async function sequential(files) {
for (const file of files) {
const fileExists = await fs.stat(`img/${file.replace(".mp4", ".png")}`);
if (fileExists) {
await takeScreenshot(file);
await fs.rename("img/tn.png", `img/${ file.replace(".mp4", ".png")}`);
}
}
}
// execute in parallel
async function parallel(files) {
return Promise.all(files.map(async file => {
const fileExists = await fs.stat(`img/${file.replace(".mp4", ".png")}`);
if (fileExists) {
await takeScreenshot(file);
await fs.rename("img/tn.png", `img/${ file.replace(".mp4", ".png")}`);
}
}));
}
Hope this helps.

NODE.JS : How to make sure a reading stream has ended and the data written?

so I am new to async/await on node.js and I could use some help figuring out this code.
I'm trying to get a file from a ftp server via the 'ftp' package, to write the data into a local 'data.txt' and to open it later in the code. My problem is that I don't understand how to make sure the file is completely written in the 'data.txt' before trying to open it with fs.readFileSync().
const ConfigFTP = require('./credentials.json')
const FtpClient = new ftpclient();
FtpClient.on('ready', async function() {
await new Promise(resolve =>
FtpClient.get('the ftp file directory', (err, stream) => {
if (err) throw err;
stream.once('close', () => {FtpClient.end();});
// Stream written in data.txt
const Streampipe = stream.pipe(fs.createWriteStream('data.txt')).on('finish', resolve)
})
)
})
FtpClient.connect(ConfigFTP);
var Data = fs.readFileSync('data.txt', 'utf8');
I'm not sure what you want to accomplish, but you can do something like these:
1)
const ConfigFTP = require('./credentials.json')
const FtpClient = new ftpclient()
let writeStream = fs.createWriteStream('data.txt')
FtpClient.on('ready', async function () {
FtpClient.get('the ftp file directory', (err, stream) => {
if (err) throw err
stream.once('close', () => { FtpClient.end() })
// Stream written in data.txt
const Streampipe = stream.pipe(writeStream)
})
})
FtpClient.connect(ConfigFTP)
writeStream.on('finish', () => {
var Data = fs.readFileSync('data.txt', 'utf8')
})
2)
const ConfigFTP = require('./credentials.json')
const FtpClient = new ftpclient()
FtpClient.on('ready', async function() {
await new Promise(resolve =>
FtpClient.get('the ftp file directory', (err, stream) => {
if (err) throw err
stream.once('close', () => {FtpClient.end()})
// Stream written in data.txt
const Streampipe = stream.pipe(fs.createWriteStream('data.txt')).on('finish', resolve)
})
)
var Data = fs.readFileSync('data.txt', 'utf8')
})
FtpClient.connect(ConfigFTP)

How to get xml2js results out of the parser in ES6?

I'm building a server in Node that will search a folder to see if an XML file exists (glob), and if it does, read the file in (fs) as a JSON object (xml2js) and eventually store it in a database somewhere. I'm want to get the results OUT of the parser and into another variable so I can do other things with the data. From what I can tell, something is running synchronously, but I can't figure out how to stop it and for me to wait until it's finished to continue moving on.
I'm separating my function out into a controller elsewhere from app.js:
app.controller.js
const fs = require('fs-extra');
const glob = require('glob');
const xml2js = require('xml2js');
exports.requests = {};
exports.checkFileDrop = async () => {
console.log('Checking for xml in filedrop...');
// this is the only place await works...
await glob('./filedrop/ALLREQUESTS-*.xml', (err, files) => {
var parser = new xml2js.Parser();
// this is looking for a specific file now, which I'll address later once I can figure out this issue
fs.readFile('./filedrop/ALLREQUESTS-20170707.xml', 'utf16le', function (err, data) {
if (err) {
console.log('ERROR: ', err);
} else {
parser.parseString(data, (err, result) => {
if (err) {
console.log('ERROR: ', err);
} else {
console.log('data found');
exports.requests = JSON.stringify(result.Records.Record);
// data is outputted here correctly
console.log(exports.requests);
// this doesn't even seem to want to save to exports.requests anyways...
}
});
}
});
});
}
app.js
const appController = require('./controllers/app.controller');
// check if there is file in filedrop
appController.checkFileDrop();
// prints out an empty object
console.log(appController.requests);
// can't do anything if it doesn't exist yet
appController.saveToDB(appController.requests);
await will wait for a Promise value to resolve, otherwise it'll just wrap the value it is given in a promise and resolve the promise right away. In your example,
await glob('./filedrop/ALLREQUESTS-*.xml', (err, files) => {
the call to glob does not return a Promise, so the await is essentially useless. So you need to create the promise yourself.
exports.checkFileDrop = async () => {
console.log('Checking for xml in filedrop...');
const files = await new Promise((resolve, reject) => glob('./filedrop/ALLREQUESTS-*.xml', (err, files) => {
if (err) reject(err);
else resolve(files);
});
const parser = new xml2js.Parser();
const data = await new Promise((resolve, reject) => fs.readFile('./filedrop/ALLREQUESTS-20170707.xml', 'utf16le', function (err, data) {
if (err) reject(err);
else resolve(data);
});
const result = await new Promise((resolve, reject) => parser.parseString(data, (err, result) => {
if (err) reject(err);
else resolve(result);
});
console.log('data found');
const requests = JSON.stringify(result.Records.Record);
console.log(requests);
}
Note that now this function will reject the promise it returns instead of force-logging the error.
You can also condense this down with a helper. Node 8 for instance includes util.promisify to make code like this easier to write, e.g.
const util = require('util');
exports.checkFileDrop = async () => {
console.log('Checking for xml in filedrop...');
const files = await util.promisify(glob)('./filedrop/ALLREQUESTS-*.xml');
const parser = new xml2js.Parser();
const data = await util.promisify(fs.readFile)('./filedrop/ALLREQUESTS-20170707.xml', 'utf16le');
const result = await util.promisify(parser.parseString.bind(parser))(data);
console.log('data found');
const requests = JSON.stringify(result.Records.Record);
console.log(requests);
}
You can use async/await
import fs from 'fs';
import { promisify } from 'util';
const xmlToJson = async filePath => {
const parser = new xml2js.Parser
try {
const data = await fs.promises.readFile(filePath, 'utf8')
const result = await promisify(parser.parseString)(data);
const requests = JSON.stringify(result.merchandiser.product);
return requests
}
catch(err) {
console.log(err)
}
}

Categories

Resources