nodejs get video dureation - javascript

I've been trying this for ages now and I'm not making any progress.
I found this on google https://gist.github.com/Elements-/cf063254730cd754599e
and it's running but when I put that in a function and try to use it with my code its not running.
Code:
fs.readdir(`${__dirname}/data`, (err, files) => {
if (err) return console.error(`[ERROR] ${err}`);
files.forEach(file => {
if (file.endsWith(".mp4")) {
// getVideoDuration(`${__dirname}/data/${file}`)
group = new Group(file.split(".")[0], file, null, getVideoDuration(`${__dirname}/data/${file}`), 0);
groups.push(group);
}
});
console.log(groups);
});
function getVideoDuration(video) {
var buff = new Buffer.alloc(100);
fs.open(video, 'r', function (err, fd) {
fs.read(fd, buff, 0, 100, 0, function (err, bytesRead, buffer) {
var start = buffer.indexOf(new Buffer.from('mvhd')) + 17;
var timeScale = buffer.readUInt32BE(start, 4);
var duration = buffer.readUInt32BE(start + 4, 4);
var movieLength = Math.floor(duration / timeScale);
console.log('time scale: ' + timeScale);
console.log('duration: ' + duration);
console.log('movie length: ' + movieLength + ' seconds');
return movieLength;
});
});
}
Output:
[
Group {
_name: 'vid',
_video: 'vid.mp4',
_master: null,
_maxTime: undefined,
_currentTime: 0
},
Group {
_name: 'vid2',
_video: 'vid2.mp4',
_master: null,
_maxTime: undefined,
_currentTime: 0
}
]
time scale: 153600
duration: 4636416
movie length: 30 seconds
time scale: 153600
duration: 4636416
movie length: 30 seconds
its logging the information correctly but is returning undefined

This seems like a lot of extra work for little benefit, so I'm going to refer to get-video-duration https://www.npmjs.com/package/get-video-duration which does a great job of getting durations of any video file in seconds minutes and hours

Copying the last comment of the gist you sent, I came up with this:
const fs = require("fs").promises;
class Group {
constructor(name, video, master, maxTime, currentTime) {
this._name = name;
this._video = video;
this._master = master;
this._maxTime = maxTime;
this._currentTime = currentTime;
}
setMaster(master) {
if (this._master != null) {
this._master.emit('master');
}
this._master = master;
this._master.emit('master');
}
};
const asyncForEach = async (array, callback) => {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array);
}
};
async function loadGroups() {
const files = await fs.readdir(`${__dirname}/data`);
const groups = []
await asyncForEach(files, async file => {
if (file.endsWith(".mp4")) {
const duration = await getVideoDuration(`${__dirname}/data/${file}`);
const group = new Group(file.split(".")[0], file, null, duration, 0);
groups.push(group);
}
});
console.log(groups);
}
async function getVideoDuration(video) {
const buff = Buffer.alloc(100);
const header = Buffer.from("mvhd");
const file = await fs.open(video, "r");
const {
buffer
} = await file.read(buff, 0, 100, 0);
await file.close();
const start = buffer.indexOf(header) + 17;
const timeScale = buffer.readUInt32BE(start);
const duration = buffer.readUInt32BE(start + 4);
const audioLength = Math.floor((duration / timeScale) * 1000) / 1000;
return audioLength;
}
loadGroups();
As to why your original code wasn't working, my guess is that returning inside the callback to fs.open or fs.read doesn't return for getVideoDuration. I couldn't easily figure out a way from the fs docs to figure out how to return the value of the callback, so I just switched over to promises and async/await, which will essentially run the code synchronously. This way you can save the output of fs.open and fs.read and use them to return a value in the scope of getVideoDuration.

I've figured out a work-around for this problem.
async function test() {
const duration = await getDuration(`${__dirname}/data/vid.mp4`);
console.log(duration);
}
test();
function getDuration(file) {
return new Promise((resolve, reject) => {
exec(`ffprobe -v error -show_entries format=duration -of default=noprint_wrappers=1:nokey=1 ${file}`, (err, stdout, stderr) => {
if (err) return console.error(err);
resolve(stdout ? stdout : stderr);
});
});
}
I only tested it on linux so I dont know if it'll work on windows

Related

selenium Race Condition ? (ECONNREFUSED connect ECONNREFUSED 127.0.0.1:xxxxx)

const { Builder, By, Key } = require('selenium-webdriver');
const fs = require('fs');
const categories = fs.readFileSync('./categories.txt').toString().split("\n");
const list = fs.readFileSync('./list.txt').toString().split("\n");
async function check(check, index) {
let driver = await new Builder()
.forBrowser('chrome')
.build();
await driver.manage().setTimeouts({
implicit: 3000,
pageLoad: 3000,
script: 3000
});
try {
console.log(`now list: ${list[index]}`)
await driver.get(`http://localhost/${check}`);
return new Promise(async (resolve, reject) => {
resolve(true)
})
} catch (err) {
console.log(err);
} finally {
await driver.quit();
}
}
(async () => {
var index = 0;
var i = 0;
while (true) {
if (index > list.length) {
index = 0;
}
var idx = i * 2;
var first = categories[idx];
var second = categories[idx + 1];
if (first !== undefined && second !== undefined) {
await Promise.all([check(first, index), check(second, index+1)])
} else {
if (first !== undefined) {
await check([first, index]);
} else {
console.log('end');
break
}
}
i += 2;
index += 2;
}
})();
I have 20,000 data, and I want to call a specific function every two in order.
But there seems to be something very wrong.
Want:
When 'selenium' runs on the 20th line,
It seems to run on the same 'port' under competition conditions.
I found this error. Error: ECONNREFUSED connect ECONNREFUSED 127.0.0.1:49809
I'm totally Newbie here. I need your help!

using promises and async for protobufjs loadcall

I am trying to figure out the best way to re-write the following code:
var api = function(id, contract) {
var callback = function (error, root) {
if (error)
throw error;
var by = Buffer.from(contract,'base64')
var es = root.lookupType("Contract")
var esMsg = es.decode(by)
var esBytes = es.encode(esMsg).finish()
signature = id.sign(esBytes).toString('base64')
}
return new Promise((resolve, reject) => {
protobuf.load("contract.proto", callback)
})
}
var signContract = async(privateKey, contract) => {
let signature
var id = await crypto.keys.unmarshalPrivateKey(Buffer.from(privateKey, 'base64'))
result = await api(id,contract,signature)
}
function getSessionSignature (hash, time) {
return config.id + ":" + hash + ":" + time
}
module.exports = configure(({ ky }) => {
return async function * signbatch (input, options) {
var contracts = input.Contracts
for (var i = 0 ; i < contracts.length ; i++) {
contracts[i].contract = await signContract(config.PrivKey, contracts[i].contract)
}
//add signed contracts to the searchParams
searchParams.append("arg", JSON.stringify(contracts))
let res
res = await ky.post('storage/upload/signbatch', {
searchParams
}).json()
yield JSON.stringify({})
} else {
yield JSON.stringify({error:"Private key not found"})
}
}
})
My issue is how do I write the sign async code to pass in privateKey and contract variables to api var function and return the signature back to the result variable to be assigned to contracts[i].contract ? Please note that the id.sign(..) function is Promise inside the callback function.
You need to resolve the promise in the api function, the docs suggest you could use the single argument variant here, e.g.
var root = await protobuf.load("contract.proto");
... // (The code you currently by have in 'callback'
return signature;
As the generator is async, yield will emit a Promise which you can (obviously) handle with either .then or await

Why is my code not waiting for the completion of the function?

I am trying to read some data from a file and store it in a database.
This is part of a larger transaction and I need the returned ids for further steps.
async parseHeaders(mysqlCon, ghID, csv) {
var self = this;
var hIDs = [];
var skip = true;
var idx = 0;
console.log("Parsing headers");
return new Promise(async function(resolve, reject) {
try {
var lineReader = require('readline').createInterface({
input: require('fs').createReadStream(csv)
});
await lineReader.on('close', async function () {
console.log("done: ", JSON.stringify(hIDs));
resolve(hIDs);
});
await lineReader.on('line', async function (line) {
line = line.replace(/\"/g, '');
if (line.startsWith("Variable")) { //Variable,Statistics,Category,Control
console.log("found variables");
skip = false; //Ignore all data and skip to the parameter description.
return; //Skip also the header line.
}
if (!skip) {
var data = line.split(",");
if (data.length < 2) { //Variable section done return results.
console.log("Found sub?",line);
return lineReader.close();
}
var v = data[0];
var bidx = data[0].indexOf(" [");
if (bidx > 0)
v = data[0].substring(0, bidx); //[] are disturbing mysql (E.g.; Air temperature [�C])
var c = data[2];
hIDs[idx++] = await self.getParamID(mysqlCon, ghID, v, c, data);//, function(hID,sidx) { //add data in case the parameter is not in DB, yet.
}
});
} catch(e) {
console.log(JSON.stringify(e));
reject("some error occured: " + e);
}
});
}
async getParamID(mysqlCon,ghID,variable,category,data) {
return new Promise(function(resolve, reject) {
var sql = "SELECT ID FROM Parameter WHERE GreenHouseID="+ghID+" AND Variable = '" + variable + "' AND Category='" + category + "'";
mysqlCon.query(sql, function (err, result, fields) {
if(result.length === 0 || err) { //apparently not in DB, yet ... add it (Acronym and Machine need to be set manually).
sql = "INSERT INTO Parameter (GreenHouseID,Variable,Category,Control) VALUES ("+ghID+",'"+variable+"','"+category+"','"+data[3]+"')";
mysqlCon.query(sql, function (err, result) {
if(err) {
console.log(result,err,this.sql);
reject(err);
} else {
console.log("Inserting ",variable," into DB: ",JSON.stringify(result));
resolve(result.insertId); //added, return generated ID.
}
});
} else {
resolve(result[0].ID); //found in DB .. return ID.
}
});
});
}
The functions above are in the base class and called by the following code:
let headerIDs = await self.parseHeaders(mysqlCon, ghID, filePath);
console.log("headers:",JSON.stringify(headerIDs));
The sequence of events is that everything in parseHeaders completes except for the call to self.getParamID and control returns to the calling function which prints an empty array for headerIDs.
The console.log statements in self.getParamID are then printed afterward.
What am I missing?
Thank you
As you want to execute an asynchronous action for every line we could define a handler to do right that:
const once = (target, evt) => new Promise(res => target.on(evt, res));
function mapLines(reader, action) {
const results = [];
let index = 0;
reader.on("line", line => results.push(action(line, index++)));
return once(reader, "close").then(() => Promise.all(results));
}
So now you can solve that easily:
let skip = false;
const hIDs = [];
await mapLines(lineReader, async function (line, idx) {
line = line.replace(/\"/g, '');
if (line.startsWith("Variable")) { //Variable,Statistics,Category,Control
console.log("found variables");
skip = false; //Ignore all data and skip to the parameter description.
return; //Skip also the header line.
}
if (!skip) {
var data = line.split(",");
if (data.length < 2) { //Variable section done return results.
console.log("Found sub?",line);
return lineReader.close();
}
var v = data[0];
var bidx = data[0].indexOf(" [");
if (bidx > 0)
v = data[0].substring(0, bidx); //[] are disturbing mysql (E.g.; Air temperature [�C])
var c = data[2];
hIDs[idx] = await self.getParamID(mysqlCon, ghID, v, c, data);
}
});

NodeJS Loop issue due to async/synchronicity issues

I am porting an old ruby script over to use javascript setting the function as a cron instance so it will run on schedule. The function queries our mysql database and retrieves inventory information for our products and then sends requests to a trading partners api to update our inventory on their site.
Due to nodes a-synchronicity I am running into issues. We need to chunk requests into 1000 items per request, and we are sending 10k products. The issue is each request is just sending the last 1000 items each time. The for loop that is inside the while loop is moving forward before it finishes crafting the json request body. I tried creating anon setTimeout functions in the while loop to try and handle it, as well as creating an object with the request function and the variables to be passed and stuffing it into an array to iterate over once the while loop completes but I am getting the same result. Not sure whats the best way to handle it so that each requests gets the correct batch of items. I also need to wait 3 minutes between each request of 1000 items to not hit the request cap.
query.on('end',()=>{
connection.release();
writeArray = itemArray.slice(0),
alteredArray = [];
var csv = json2csv({data: writeArray,fields:fields}),
timestamp = new Date(Date.now());
timestamp = timestamp.getFullYear() + '-' +(timestamp.getMonth() + 1) + '-' + timestamp.getDate()+ ' '+timestamp.getHours() +':'+timestamp.getMinutes()+':'+timestamp.getSeconds();
let fpath = './public/assets/archives/opalEdiInventory-'+timestamp+'.csv';
while(itemArray.length > 0){
alteredArray = itemArray.splice(0,999);
for(let i = 0; i < alteredArray.length; i++){
jsonObjectArray.push({
sku: alteredArray[i]['sku'],
quantity: alteredArray[i]["quantity"],
overstockquantity: alteredArray[i]["osInv"],
warehouse: warehouse,
isdiscontinued: alteredArray[i]["disc"],
backorderdate: alteredArray[i]["etd"],
backorderavailability: alteredArray[i]["boq"]
});
}
var jsonObject = {
login: user,
password: password,
items: jsonObjectArray
};
postOptions.url = endpoint;
postOptions.body = JSON.stringify(jsonObject);
funcArray.push({func:function(postOptions){request(postOptions,(err,res,body)=>{if(err){console.error(err);throw err;}console.log(body);})},vars:postOptions});
jsonObjectArray.length = 0;
}
var mili = 180000;
for(let i = 0;i < funcArray.length; i++){
setTimeout(()=>{
var d = JSON.parse(funcArray[i]['vars'].body);
console.log(d);
console.log('request '+ i);
//funcArray[i]['func'](funcArray[i]['vars']);
}, mili * i);
}
});
});
You would need async/await or Promise to handle async actions in node js.
I am not sure if you have node version which supports Async/await so i have tried a promise based solution.
query.on('end', () => {
connection.release();
writeArray = itemArray.slice(0),
alteredArray = [];
var csv = json2csv({ data: writeArray, fields: fields }),
timestamp = new Date(Date.now());
timestamp = timestamp.getFullYear() + '-' + (timestamp.getMonth() + 1) + '-' + timestamp.getDate() + ' ' + timestamp.getHours() + ':' + timestamp.getMinutes() + ':' + timestamp.getSeconds();
let fpath = './public/assets/archives/opalEdiInventory-' + timestamp + '.csv';
var calls = chunk(itemArray, 1000)
.map(function(chunk) {
var renameditemsArray = chunk.map((item) => new renamedItem(item, warehouse));
var postOptions = {};
postOptions.url = endpoint;
postOptions.body = JSON.stringify({
login: user,
password: password,
items: renameditemsArray
});
return postOptions;
});
sequenceBatch(calls, makeRequest)
.then(function() {
console.log('done');
})
.catch(function(err) {
console.log('failed', err)
});
function sequenceBatch (calls, cb) {
var sequence = Promise.resolve();
var count = 1;
calls.forEach(function (callOptions) {
count++;
sequence = sequence.then(()=> {
return new Promise(function (resolve, reject){
setTimeout(function () {
try {
cb(callOptions);
resolve(`callsequence${count} done`);
}
catch(err) {
reject(`callsequence ${count} failed`);
}
}, 180000);
});
})
});
return sequence;
}
function makeRequest(postOptions) {
request(postOptions, (err, res, body) => {
if (err) {
console.error(err);
throw err;
}
console.log(body)
});
}
function chunk(arr, len) {
var chunks = [],
i = 0,
n = arr.length;
while (i < n) {
chunks.push(arr.slice(i, i += len));
}
return chunks;
}
function renamedItem(item, warehouse) {
this.sku = item['sku']
this.quantity = item["quantity"]
this.overstockquantity = item["osInv"]
this.warehouse = warehouse
this.isdiscontinued = item["disc"]
this.backorderdate = item["etd"]
this.backorderavailability= item["boq"]
}
});
Could you please try this snippet and let me know if it works?I couldn't test it since made it up on the fly. the core logic is in the sequenceBatch function. the The answer is based on an another question which explains how timeouts and promises works together.
Turns out this wasn't a closure or async issues at all, the request object I was building was using references to objects instead of shallow copies resulting in the data all being linked to the same object ref in the ending array.

Reading a file line by line, parse them and insert them in mongo in node js

I have a file which is tab separated. It has thousands of data. How can I use nodeJs to read the file, line by line, parse them and create an object and insert them in a mongo DB.
I am just learning node and mongo. I come from different background. So how can this be done.
Finally the Mongo DB has to be populated with proper data.
I searched in net but I could not find the complete solution.
Thanks.
I had an issue with the answer by Juvenik. My problem was that the database would not be populated by the time readline had completed. The lines were being read synchronously, but the DB insertion was asynchronous.
Instead, I found a simpler solution with the line-reader package. It reads the lines and waits for a callback before continuing.
var MongoClient = require('mongodb').MongoClient
var dbName = 'yourDbName'
var url = 'mongodb://localhost:27017/' + dbName
var collectionName = 'yourCollectionName'
var filename = 'yourFileName.txt'
var printLine = 1000
MongoClient.connect(url, function(err, db) {
if (err) {
console.error('Problem connecting to database')
} else {
console.log('Connected correctly to server.')
var lineReader = require('line-reader')
var collection = db.collection(collectionName)
var lineNum = -1
var headers = []
lineReader.eachLine(filename, function(line, last, cb) {
lineNum++
try {
var split = line.split('\t')
var object = {}
if (lineNum > 0) {
for (var i = 0; i < split.length; i += 1) {
object[headers[i]] = split[i]
}
collection.insert(object, function (insertErr, insertObj) {
if (insertErr) console.error(insertErr)
if (lineNum % printLine === 0) console.log('Line ' + lineNum)
if (last) {
console.log('Done with ' + filename + ' (' + lineNum + ' records)')
process.exit(0)
} else {
cb()
}
})
} else {
headers = line.split('\t')
cb()
}
} catch (lineError) {
console.error(lineError)
}
})
}
})
I came across similar problem. This approach worked for me.
Have a look, it might be helpful.
var mongoDb = require('mongodb');
var mongoClient = mongoDb.MongoClient;
var dbname = 'YOUR_DB_NAME';
var collectionName = 'YOUR_COLLECTION_NAME';
var url = 'mongodb://localhost:27017/'+dbname;
var filename = 'FIle_Name.txt';
console.log('***************Process started');
mongoClient.connect(url,function(err,db){
if(err){
console.log('error on connection '+err);
}
else{
console.log('***************Successfully connected to mongodb');
var collection = db.collection(collectionName);
var fs = require('fs');
var readline = require('readline');
var stream = require('stream');
var instream = fs.createReadStream(filename);
var outstream = new stream;
var rl = readline.createInterface(instream,outstream);
console.log('***************Parsing, please wait ...');
rl.on('line',function(line){
try{
var arr = line.split('\t');
var object = {};
//Parse them here
//Example
object['name'] = arr[0]; //Just an example
var res = collection.insert(object);
}
catch (err){
console.log(err);
}
});
rl.on('close',function(){
db.close();
console.log('***************completed');
});
}
});
I am a learner too. If someone can make it better, it will be good.
Here is a more performant (inserting batches of objects) and updated version (using async and latest mongo driver) of frank-0's answer
const lineReader = require('line-reader');
async function readFileAndInsertInMongo(file) {
let total = 0;
return new Promise((resolve, reject) => {
let buffer = [];
lineReader.eachLine(file, (line, last, cb) => {
// prepare your object based on the line content
let insertObject = {'some_content': 'some_value'};
if (total % 10000 === 0 || last) {
collection.insertMany(buffer, function(err, res){
if (last) {
if (err) {
reject(err);
} else {
resolve(res);
}
} else {
buffer = [];
return cb();
}
});
} else {
buffer.push(insertObject);
return cb();
}
});
});
}
This really is the best solution I have found to parse huge files and insert them in the database without exploding Node's memory. Hope this can help ;)

Categories

Resources