nodejs : syncronise pieces of code - javascript

I am deleting somefiles using fs.unlink and then I want to run some code. Due to the async nature of JS what is happening is that my code after unlinking is called before the callback of unlink. How can i Syncronise this? Is promises the only way ?
fs.unlink("FileName",function(err){
console.log("RUN");
})
for(let i = 0; i<10;i++) {
console.log(i);
}
RESULT :
1
2
3
4
5
6
7
8
9
RUN
The problem with using promises is that : If i have many files to delete, then i will have maintain a count of the promises and then check how many have been resolved. This i want to avoid

In this situation, you can use fs.unlinkSync, the synchronous version of fs.unlink:
try {
fs.unlinkSync("FileName");
console.log('Removing file successful!');
} catch(e) {
// TODO: handle errors here
};
console.log("RUN");
for(let i = 0; i<10;i++) {
console.log(i);
}
As #Keith rightfully mentions in the comments: synchronous operations like this should be use sparingly. If you have large numbers of files to delete, it may be better to use the asynchronous fs.unlink() because you can "start" more of those concurrently (tradeoff: start too many and the performance may suffer because of I/O saturation).

Using Promises is a good solution, you don't have to track the promises yourself, bluebird will do it for you:
const Promise = require('bluebird');
function unlinkFile(fileName) {
return new Promise((resolve, reject) => {
fs.unlink(fileName, function (err) {
if (err) {
return reject(err);
}
resolve();
});
});
}
Promise.all([unlinkFile('01.txt'), unlinkFile('02.txt'), unlinkFile('03.txt')])
.then(() => {
console.log('ALL FILES UNLINKED');
});
// OR YOU CAN USE promise.map
const filesToUnlink = ['01.txt', '02.txt', '3.txt'];
Promise.map(filesToUnlink, unlinkFile)
.then(() => {
console.log('ALL FILES UNLINKED');
});

Promise is not the only way. You can wrap your for loop in a function, and call that function from the unlink callback.
fs.unlink("FileName",function(err){
console.log("RUN");
loop();
})
function loop() {
for(let i = 0; i<10;i++) {
console.log(i);
}
}
On the other hand, with a Promise you would do this way :
new Promise((resolve, reject) => {
fs.unlink("FileName",function(err){
console.log("RUN");
return resolve();
});
})
.then(() => {
for(let i = 0; i<10;i++) {
console.log(i);
}
});
But, as #robertklep says, if your code does not need async, just call the synchronous function.

You should use Promise to get proper result.
let fs = require('fs');
new Promise((resolve,reject) => {
fs.unlink("demo1.txt", function (err) {
console.log("RUN");
if(err)
reject(err);
resolve();
})
})
.then(() => {
for (let i = 0; i < 10; i++) {
console.log(i);
}
},(error)=>{
console.log(error);
})

Related

Functions are not waiting until they are resolved

I'm trying to execute functions one at a time, sequentially. Using promises, I believe it should work, but for me, it does not work. I've researched somewhat and found this question, and one of the answers explains to use Promises, that is what I've been trying to do.
Here's the functions:
async function loadCommands () {
return new Promise((resolve, reject) => {
let commands = 0;
readdir('./commands/', (error, files) => {
if (error) reject(error);
for (const file of files) {
if (!file.endsWith('.js')) return;
commands++;
}
}
resolve(commands); // this is in my code, I forgot to put it - sorry commenters
});
};
async function loadEvents () {
return new Promise(async (resolve, reject) => {
let events = 0;
readdir('./events/', (error, files) => {
if (error) reject (error);
for (const file of files) {
if (!file.endsWith('.js')) return;
events++
}
});
resolve(events);
});
};
I am then using await in an async function to try and make sure it each function resolves before going onto the next function:
console.log('started');
const events = await loadEvents();
console.log(events);
console.log('load commands');
const commands = await loadCommands();
console.log(commands);
console.log('end')
In the console, this is linked (keep in mind, I have no files in ./events/ and I have one file in ./commands/):
start
0 // expected
load commands
0 // not expected, it's supposed to be 1
end
What am I doing wrong? I want these functions to be run sequentially. I've tried making it so instead of functions, it's just the bare code in the one async function, but still came to the issue.
You never resolve() the promise that you create in loadCommands, and you resolve() the promise that you create in loadEvents before the readdir callback happened.
Also, don't do any logic in non-promise callbacks. Use the new Promise constructor only to promisify, and call only resolve/reject in the async callback:
function readdirPromise(path) {
return new Promise((resolve, reject) => {
readdir(path, (err, files) => {
if (err) reject(err);
else resolve(files);
});
});
});
or simply
import { promisify } from 'util';
const readdirPromise = promisify(readdir);
Then you can use that promise in your actual logic function:
async function countJsFiles(path) {
const files = await readdirPromise(path);
let count = 0;
for (const file of files) {
if (file.endsWith('.js'))
count++;
// I don't think you really wanted to `return` otherwise
}
return count;
}
function loadCommands() {
return countJsFiles('./commands/');
}
function loadEvents() {
return countJsFiles('./events/');
}
You're trying to use await outside async. You can await a promise only inside an async function. The functions returning promises ( here loadCommands & loadEvents ) don't need to be async. Make an async wrapper function like run and call the await statements inside it like this.
PS: Plus you also need to resolve loadCommands with commands in the callback itself. Same for loadEvents. Also, remove the return and simple increment the variable when true.
function loadCommands() {
return new Promise((resolve, reject) => {
let commands = 0;
readdir('./commands/', (error, files) => {
if (error) reject(error);
for (const file of files) {
if (file.endsWith('.js')) commands++;
}
}
resolve(commands);
});
};
function loadEvents() {
return new Promise((resolve, reject) => {
let events = 0;
readdir('./events/', (error, files) => {
if (error) reject(error);
for (const file of files) {
if (file.endsWith('.js')) events++
}
resolve(events);
});
});
};
async function run() {
console.log('started');
const events = await loadEvents();
console.log(events);
console.log('load commands');
const commands = await loadCommands();
console.log(commands);
console.log('end')
}
run();
Hope this helps !

jQuery / Javascript function execution

I have 3 functions func1(), func2() and func3(). They are independent of each other. I would like to execute these 3 methods in parallel and with a single call back method. is it possible something like this
function parentMethod(){
call ([func1(),func2(), func3()],<callback function>);
}
Callback functionality is optional but can these 3 functions be executed in parallel.
Use a promise chain with Promise.all
Promise.all([func1, func2, func3]).then(values => {
//values from all the functions are in the callback param
});
Adding to the promise answer given, you'll need to "promisify" your functions if they don't already return promises (this demo assumes an error-first callback):
let promises = [func1, func2, func3].map(func => {
return new Promise((resolve, reject) => {
func((err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
})
});
});
Promise.all(promises)
.then((results) => {
let [func1Data, func2Data, func3Data] = results;
})
.catch(err => {
console.log('error!', err);
});
You can also use web workers but it'll be a little more verbose solution. You can check this post: HTML5/JS - Start several webworkers

Promise.all is never triggered due Promise rejection

I have two promises. One that reads a sample.txt file and another that reads all the files from a /books/ folder. The second promise uses a function called readFiles, which takes the dirnames and uses them to look though each file. When all the promises are ready the code inside then should run:
const p1 = new Promise((resolve, reject) => {
fs.readdir(__dirname + '/books/', (err, archives) => {
// archives = [ 'archive1.txt', 'archive2.txt']
readFiles(archives, result => {
if (archives.length === result.length) resolve(result)
else reject(result)
})
})
})
const p2 = new Promise((resolve, reject) => {
fs.readFile('sample.txt', 'utf-8', (err, sample) => {
resolve(sample)
})
})
Promise.all([p1, p2]).then(values => {
console.log('v:', values)
}).catch(reason => {
console.log('reason:', reason)
})
function readFiles (archives, callback) {
const result = []
archives.forEach(archive => {
fs.readFile(__dirname + '/books/' + archive, 'utf-8', (err, data) => {
result.push(data)
callback(result)
})
})
}
However, Promise.all always get rejected:
reason: [ 'archive 1\n' ]
What am I doing wrong?
Promises are one-shot devices. Once they've been rejected or resolved, their state can never change. With that in mind, readFiles() calls its callback for every file that it reads and you reject or resolve every time that callback is called, but the way you are using it, you check:
if (archives.length === result.length)
which will never be true on the first one and then you reject. Once that promise is rejected, its state cannot change. Subsequent calls to the callback will also call reject() and then the last one will call resolve(), but the state is long since set so only the first call to reject() or resolve() actually does anything. The others are simply ignored. So, p1 will always reject, thus Promise.all() that uses p1 will always reject.
You need to change readFiles() to either only call its callback once when it is done with all the files or change it to return a single promise that resolves when all the files are read or change how you're using the callback so you don't reject the first time it is called.
In general, if you're going to use promises, then you want to promisify at the lowest level and use the advantages of promises (particular for error propagation) everywhere rather than mix callbacks and promises. To that end, I'd suggest:
fs.readFileP = function(fname, encoding) {
return new Promise(function(resolve, reject) {
fs.readFile(fname, encoding, function(err, data) {
if (err) return reject(err);
resolve(data);
});
});
}
function readFiles(archives, encoding, callback) {
return Promise.all(archives.map(function(file) {
return fs.readFileP(file, encoding);
}));
}
Or, going a level deeper and promisifying fs.readdir() also, you'd get this:
// helper functions
fs.readdirP = function(dir) {
return new Promise(function(resolve, reject) {
fs.readdir(dir, function(err, files) {
if (err) return reject(err);
resolve(files);
});
});
}
fs.readFileP = function(fname, encoding) {
return new Promise(function(resolve, reject) {
fs.readFile(fname, encoding, function(err, data) {
if (err) return reject(err);
resolve(data);
});
});
}
function readFiles(archives, encoding) {
encoding = encoding || 'utf8';
return Promise.all(archives.map(function(file) {
return fs.readFileP(file, encoding);
}));
}
// actual logic for your operation
const p1 = fs.readdirP(__dirname + '/books/').then(readFiles);
const p2 = fs.readFileP('sample.txt', 'utf-8');
Promise.all([p1, p2]).then(values => {
console.log('v:', values);
}).catch(reason => {
console.log('reason:', reason);
});
If you use the Bluebird promise library which makes it easy to promisify whole modules at once and has some extra functions for managing Promise flow control, then the above code simplifies to this:
const Promise = require('bluebird');
const fs = Promise.promisifyAll(require('fs'));
const p1 = fs.readdirAsync(__dirname + '/books/').then(files => {
return Promise.map(archives, file => {
return fs.readFileAsync(file, 'utf8');
});
});
const p2 = fs.readFileAsync('sample.txt', 'utf-8');
Promise.all([p1, p2]).then(values => {
console.log('v:', values);
}).catch(reason => {
console.log('reason:', reason);
});
In this block of code, the Promise.promisifyAll() line of code creates promisified versions of every method on the fs module with the Async suffix on them. Here, we use fs.readFileAsync() and fs.readdirAsync() so we can use promises for everything.

Promise will not resolve

The second part of the Promise below (inside the then) is never run. When I run the database query without using the Promise(in a node script that I run node myscript.js it returns the data but the console never returns the prompt--the console just hangs and I have to send an interrupt manually. Therefore, when I put it inside a Promise, I think the Promise doesn't know that the database query is complete even though it seems to have returned all the data, therefore the second part of the Promise isn't running ( I think). If that's the problem, how do I write the database query so that it doesn't hang and the Promise can run to completion?
const sqlite = require('/usr/local/lib/node_modules/sqlite3');
const express = require('/usr/local/lib/node_modules/express')
const promise = require('/usr/local/lib/node_modules/promise')
app.get('/', (request, res) => {
var res = [];
function getData() {
return new Promise(function(resolve, reject) {
db.each('SELECT column_a, column_b FROM trips group by column_a', (e, rows) => {
var d = {
a: rows['column_a'],
b: rows['column_b']
}
res.push(d)
});
});
}
getData().then(function(data) {
console.log("never run....", res, data) //never run
});
})
You need to resolve a promise by calling one of the functions it provides in the callback through its constructor.
const promise = new Promise((resolve, reject) => {
// you must call resolve() or reject() here
// otherwise the promise never resolves
});
Otherwise it will always stay in Pending state and never call the callbacks(s) you pass into then.
promise.then(() => {
// this never gets called if we don't resolve() or reject()
});
Additionally, promises allow you to resolve with values so there's usually no need to maintain global variables, you can just pass results through.
Finally, the callback in db.each will be called once for each row, so you would need to handle that by resolving the promise after all rows have been obtained
Here's how you could write your code:
function getData() {
const data = [];
return new Promise((resolve, reject) => {
db.each('SELECT column_a, column_b FROM trips group by column_a', (e, row) => {
if (e) {
// error reading a row, reject the Promise immediately
// optionally you could accumulate errors here in a similar manner to rows
reject(e);
return;
}
// success reading a row, store the row result
data.push({
a: row['column_a'],
b: row['column_b']
});
}, (e, rowCount) => { // the complete handler called when the operation is done, see docs: https://github.com/mapbox/node-sqlite3/wiki/API#databaseeachsql-param--callback-complete
if (e) {
// operation finished, there was an error
reject(e);
return;
}
// operation succeeded, resolve with rows
resolve(data);
});
});
}
app.get('/', (request, res) => {
getData().then((data) => {
// here `data` is an array of row objects
}, (e) => {
console.error(`Database error: ${e}`);
});
});
Side Note
Not sure why you are redeclaring the parameter res as an [], but there's no need for doing var res = []. Since you already have res, you can just say res = [] to point res to a new array. Of course that will overwrite the response object so I assume that you're doing it just for the purposes of this example. If not, you should probably create a new variable.
You've declared a Promise which means you're responsible for calling one of resolve or reject once and once only.
Here's a cleaned up example:
app.get('/', (request, res) => {
var res = [ ];
new Promise((resolve, reject) => {
db.each('SELECT column_a, column_b FROM trips group by column_a', (e, row) => {
if (e) {
reject(e);
return;
}
res.push({
a: row['column_a'],
b: row['column_b']
});
}, (err) => {
if (err) {
return reject(err);
}
resolve(res);
});
}).then((data) => {
console.log("Running ", res, data)//never run
}
});
If your database layer supports promises that usually makes this sort of code a lot less messy since you can simply chain that in there.
Edit: Since the Sqlite3 API is bizarrely non-standard and the each function has two callbacks you need to handle each row with the first, then the completion handler with the second.
If you design an API like this you're doing it wrong. Don't.
Several points :
resolve/reject must be called, otherwise a new Promise() will remain forever "pending".
always promisify at the lowest level possible, ie promisify db.each() not getData(). This gives you a testable, reusable utility and more comprehensible application code.
db.each() is a challenge to promisify because it has two possible sources of error; one in its iteration callback and one in its complete callback.
the sqlite3 documentation does not state what happens if an iteration error occurs but presumably the iteration continues, otherwise the error would simply appear as a completion error?
Here's a couple of ways to promisify :
1. First iteration error or completion error causes promise rejection - iteration errors are not exposed to your application code.
// Promisification
db.eachAsync = function(sql, iterationCallback) {
return new Promise(function(resolve, reject) {
db.each(sql, (iterationError, row) => {
if(iterationError) {
reject(iterationError);
} else {
iterationCallback(row);
}
}, (completionError, n) => {
if(completionError) {
reject(completionError);
} else {
resolve(n); // the number of retrieved rows.
}
});
});
};
// Application
app.get('/', (request, response) => {
function getData() {
var res = [];
return db.eachAsync('SELECT column_a, column_b FROM trips group by column_a', (row) => {
res.push({
a: row['column_a'],
b: row['column_b']
});
}).then(n => res);
}
getData().then(results => {
console.log(results);
}).catch(error => {
console.log(error);
});
});
2. Only a completion error causes promise rejection - iteration errors are exposed to your application code
// Promisification
db.eachAsync = function(sql, iterationCallback) {
return new Promise(function(resolve, reject) {
db.each(sql, iterationCallback, (completionError, n) => {
if(completionError) {
reject(completionError);
} else {
resolve(n); // the number of retrieved rows.
}
});
});
};
// Application
app.get('/', (request, response) => {
function getData() {
var res = [];
return db.eachAsync('SELECT column_a, column_b FROM trips group by column_a', (iterationError, row) => {
// You can choose what to do on iterationError.
// Here, nulls are injected in place of values from the db,
// but you might choose not to push anything.
res.push({
a: iterationError ? null : row['column_a'],
b: iterationError ? null : row['column_b']
});
}).then(n => res);
}
getData().then(results => {
console.log(results);
}).catch(error => {
console.log(error);
});
});
(2) is the better approach because exposing iteration errors affords you more flexibility. For example, you could choose to promisify with (2), and emulate (1) in your application :
// Application
app.get('/', (request, response) => {
function getData() {
var res = [];
var e = null;
return db.eachAsync('SELECT column_a, column_b FROM trips group by column_a', (iterationError, row) => {
if(iterationError && !e) {
// remember the first iteration error
e = iterationError;
} else {
// push only on success
res.push({
a: row['column_a'],
b: row['column_b']
});
}
}).then(n => {
if(e) {
throw e;
} else {
return res;
}
});
}
getData().then(results => {
console.log(results);
}).catch(error => {
console.log(error);
});
});
With (1), by rejecting on first iteration error rather than exposing iteration errors, the same flexibility is not available. (1) could not fully emulate (2).
Fortunately, the preferred approach (2) is the same as would be obtained with Bluebird's .promisify() method :
Promise.promisify(db.each);

Callback after for-loop finishes in node.js

I need some help with the asynchronous nature of node.js. I have a for loop, which collects data from the database. "result" is an array, which should then be returned to the main function.
user_collection.findOne({
_id : uid
}, function(error, user) {
if(error)
callback(error)
else {
for(var j = 0; j < user.contacts.length; j++) {
if(user.contacts[j].accepted == 'true') {
user_collection.findOne({
_id : user.contacts[j].contactId
}, function(error, user) {
result.push(user);
})
}
}
callback(null, result); // This callback executes before the for-loop ends, ofc
}
});
How can I ensure that the callback executes after the loop finished?
You might want to consider using helper library like async
https://github.com/caolan/async
It helps keep code more consistent..
In your case, you can look at the forEach() method
forEach(arr, iterator, callback)
The iterator is called with an item from the list and a callback for when it has finished.
Checkout the unit tests for examples
https://github.com/caolan/async/blob/master/mocha_test/each.js
Using ES6 Promises
(a promise library can be used for older browsers):
Process all requests guaranteeing synchronous execution (e.g. 1 then 2 then 3)
function asyncFunction (item, cb) {
setTimeout(() => {
console.log('done with', item);
cb();
}, 100);
}
let requests = [1, 2, 3].reduce((promiseChain, item) => {
return promiseChain.then(new Promise((resolve) => {
asyncFunction(item, resolve);
}));
}, Promise.resolve());
requests.then(() => console.log('done'))
Process all async requests without "synchronous" execution (2 may finish faster than 1)
let requests = [1,2,3].map((item) => {
return new Promise((resolve) => {
asyncFunction(item, resolve);
});
})
Promise.all(requests).then(() => console.log('done'));
I did it on that way
Promise.all(body.schedules.map(function(scheduleId) {
return new Promise(function(resolve, reject) {
return scheduleSchema.findOneAndRemove({
_id: scheduleId
})
.then(function() {
logSchema.insert({
userId: req.user.id,
actId: constants.LOG_SCHEDULE_DELETE.id,
extData: scheduleId
});
resolve();
})
.catch(function(err) {
reject(err);
});
});
})).then(function() {
return res.json({
code: constants.SUCCESS_CODE
});
}).catch(function(err) {
return res.json(constants.DATABASE_ERROR);
});
The last example
function callback (result) { console.log('all done'); }
[1, 2, 3].forEach((item, index, array) => {
asyncFunction(item, () => {
if (index === array.length - 1) {
callback();
}
});
});
This does not guarantee that callback will execute after all items are processed. It only guarantees that callback will execute after the very last item is processed.
More information
Michael.
With v1.5.2 of Async.js, It is each.
each(arr, iterator, [callback])
arr - An array to iterate over.
iterator(item, callback) - A function to apply to each item in arr.
callback(err) - Optional. A callback which is called when all iterator functions have finished, or an error occurs.

Categories

Resources