Async PUT Requests - javascript

I'm building a script to handle some data modifications via the Shopify API, through JS.
My strategy is pull ALL the orders from one date, and to run a function against each order, then PUT the new data up.
I've gotten pretty close, I have an array containing all the orders from a specific date. I am returning this data through promises.
I need help with the async nature of this function to work as expected. Currently, my PUT request is being ignored completely, and I'm not sure why. Here's my code:
(function main() {
var yesterday = moment().add('-1', 'days').format('YYYY-MM-DD');
var today = moment().format('YYYY-MM-DD');
var scopeStart = 'created_at_min=' + yesterday + "%2000:00";
var scopeEnd = '&created_at_max=' + today + '%2000:00';
var promise = new Promise(function(resolve, reject) {
var apiEndpoint = '/admin/orders/count.json?limit=250&status=any&' + scopeStart + scopeEnd;
Shopify.get(apiEndpoint, function(err, data) {
if (err) reject(err);
else resolve(data);
});
});
promise.then(function(data) {
console.log('promise data', data.count);
if (data.count <= 250) {
var secondPromise = new Promise(function(resolve, reject) {
var apiEndpoint = '/admin/orders.json?limit=250&status=any&' + scopeStart + scopeEnd;
Shopify.get(apiEndpoint, function(err, data) {
if (err) reject(err);
else resolve(data);
});
});
secondPromise.then(function(data) {
var array = data.orders;
async.each(array, function(order, callback) {
console.log(order.name, order.id);
var thirdPromise = new Promise(function(resolve, reject) {
var fields = generateFields(order.id);
console.log('fields', fields);
console.log('----------------');
Shopify.put('/admin/orders/' + order.id, fields, function(err, data) {
if (err) reject(err);
else resolve(data);
})
});
thirdPromise.then(function(data) {
console.log('Updated', data);
});
});
});
}
});
}());
My third promise never returns a console log. The put request does not seem to go through. Any suggestions?

Related

Wait for the query api to return before looping chain async await

The problem was I was looping for a array that will be query in a firebase database, instead of waiting for that query to return it will continue to loop even if the return promise is not executed.
Code below executes like this , if a user enters it checks if it is existing, if existing it will query to the real time database to get a specific "data" saved from signing up.
Problem is it will not wait for the query in the real time database, it will immediately loop then executes the query which is wrong. I know that query in firebase returns a function but how do I chain it? Or is there any way to do it easily? Thanks btw
loginIdentification = ['citizen','bfp','ndrmmc','pnp','rta'];
loginAuthentication(email:any,password:any){
this.loginFireAuth.signInWithEmailAndPassword(email, password).then(async authData =>{
if(authData.uid){
for (var counter = 0; counter < this.loginIdentification.length; counter++) {
console.log('counter');
await this.searchuser(counter,authData.uid);
}
}
console.log(JSON.stringify(authData.uid));
}).catch(function (error) {
var errorCode = error.code;
var errorMessage = error.message;
console.log("error code : " + errorCode + " errorMessage : " + errorMessage);
});
}
searchuser(counter,userid){
var that = this;
if(counter == 0){
that.loginFiredatabase.ref('users/' + that.loginIdentification[counter] + '/' + userid).on('value',async function (snapshot) {
await new Promise(function (resolve, reject) {
if (snapshot.exists()) {
console.log(JSON.stringify(snapshot));
return resolve();
}
else{
return reject();
}
});
});
}
else {
that.loginFiredatabase.ref('users/' + that.loginIdentification[counter] + '/' + userid).on('value',async function (snapshot) {
await new Promise(function (resolve, reject) {
if(snapshot.exists()) {
console.log(JSON.stringify(snapshot));
return resolve();
}
else {
return reject();
}
});
});
}
}

JS Asynchronous execution is causing pain

I am developing a backend server code with NodeJS. What the code does is, periodically connect to a REST API, request updates and then write to my database.
I have no way of getting delta of the data from the API, so I drop my collection from MongoDB and then just insert the newly pulled data.
I implemented promises to make sure that the dependent methods are executed only after the previous methods resolve. This however doesn't seem to work as I anticipated.
So, I drop the collection and insert, this works. But the following method seems to execute before the new data is populated. It sometime works, when I have some new console.log statements which seems to induce a slight delay ever so slightly to make it all work.
setTimeout function didn't seem to help. Any suggestions?
Here is a sanitized version of the code: https://jsfiddle.net/ppbfrozg/
var request = require("request");
var q = require('q');
function authenticate() {
var deferred = q.defer();
request(options, function(error, response, body) {
if (error) throw new Error(error);
deferred.resolve(JSON.parse(body).token);
});
return deferred.promise;
}
function getData(token) {
var deferred = q.defer();
request(options, function(error, response, body) {
if (error) throw new Error(error);
deferred.resolve(JSON.parse(body).token);
});
return deferred.promise;
}
function insertDataInMongo(a) {
var deferred = q.defer();
var MongoClient = require('mongodb').MongoClient;
var url = 'mongodb://localhost/myDB';
var token = a[1];
MongoClient.connect(url, function(err, db) {
if (err) return deferred.reject(new Error(err))
console.log("connected for insert");
var apiData = JSON.parse(a[0]).data;
if (JSON.parse(a[0]).data) {
db.collection('MediaData').insert(apiData);
console.log("Records Inserted");
} else {
db.collection('Details').drop();
db.collection('Details').insert(JSON.parse(a[0]));
console.log("Records Inserted");
}
deferred.resolve(token);
});
return deferred.promise;
}
function getMedia(dataContext) {
var deferred = q.defer();
var cursor = dataContext[0];
var token = dataContext[1];
if (cursor !== null) {
console.log("Inside cursor not null");
cursor.forEach(function(data) {
insertDataInMongo(data);
})
}
return deferred.promise;
}
function check(array, attr, value) {
for (var i = 0; i < array.length; i += 1) {
if (array[i][attr] === value) {
return false;
}
}
return true;
}
function get_value(array, attr) {
for (var i = 0; i < array.length; i += 1) {
if (array[i].hasOwnProperty(attr)) {
return array[i][attr];
}
}
}
function getNames(token) {
var deferred = q.defer();
var MongoClient2 = require('mongodb').MongoClient;
var url = 'mongodb://localhost/myDB';
console.log("going to get Data");
MongoClient2.connect(url, function(err, db) {
if (err) return deferred.reject(new Error(err));
console.log("connected for select");
var data = db.collection('Details').find();
var dataContext = [data, token, 0, 0, 0, 0, 0, 0, 0, null];
deferred.resolve(dataContext);
});
return deferred.promise;
}
function convertDate(date) {
var yyyy = date.getFullYear().toString();
var mm = (date.getMonth() + 1).toString();
var dd = (date.getDate() - 3).toString();
var mmChars = mm.split('');
var ddChars = dd.split('');
return yyyy + '-' + (mmChars[1] ? mm : "0" + mmChars[0]) + '-' + (ddChars[1] ? dd : "0" + ddChars[0]);
}
authenticate()
.then(getData)
.then(insertDataInMongo)
.then(getNames)
.then(getMedia);
This should work. Let me know if any issue.
function insertDataInMongo(a) {
var deferred = q.defer();
var MongoClient = require('mongodb').MongoClient;
var url = 'mongodb://localhost/myDB';
var token = a[1];
MongoClient.connect(url, function(err, db) {
if (err) return deferred.reject(new Error(err))
console.log("connected for insert");
var apiData = JSON.parse(a[0]).data;
if (JSON.parse(a[0]).data) {
db.collection('MediaData').insert(apiData, function(){
console.log("Records Inserted");
return deferred.resolve(token);
});
} else {
db.collection('Details').drop(function(error, result){//Callback function that executes after drop operation has completed.
if(error){
return deferred.reject(error);//Reject the promise if there was an error
}
db.collection('Details').insert(JSON.parse(a[0]), function(err, res){//Callback function that executes after insert operation has completed.
if(err){
return deferred.reject(err);//Reject the promise if there was an error
}
console.log("Records Inserted");
return deferred.resolve(token);
});
});
}
});
return deferred.promise;
}
From what I see from nodeJS driver API for MongoDB :
https://mongodb.github.io/node-mongodb-native/api-generated/collection.html
hint : db.collection.drop is asynchronous, so you have to use a callback with it
db.collection('Details').drop(function(err, result){
// Do anything AFTER you dropped your collection
}
but since you are using promises, you should use something like that:
authenticate()
.then(getData)
.then(db.collection('Details').drop)
.then(insertDataInMongo)
.then(getNames)
.then(getMedia);
or if you want to really keep the same code format :
function dropCollectionInMongo() {
db.collection('Details').drop()
}
authenticate()
.then(getData)
.then(dropCollectionInMongo)
.then(insertDataInMongo)
.then(getNames)
.then(getMedia);

NodeJS: Use Promises in Loop with Q

I have a problem with promises in a loop. The whole promise thing is completely new to me, so I try to learn it with very simple examples.
In my example, I have 2 text files on a server and I want to save the content of the text files into an array.
It works with a setTimeout, but this is not the solution that I want. Here is the example wit setTimeout
var http = require('http'),
Q = require('q');
var urls = ["http://localhost:8000/1.txt", "http://localhost:8000/2.txt"]
var txts = [];
function getData(url) {
http.get(url, function(res) {
var data = "";
res.on('data',function(chunk){
data+=chunk;
});
res.on('end',function(){
txts.push(data);
});
}).on('error',function(e){
console.log("Error Request: "+e.message);
})
}
function getTxts() {
for(a in urls) {
var url = urls[a];
getData(url);
}
// is not working
console.log(txts);
// is working
setTimeout(function() {
console.log(txts);
}, 1000)
}
getTxts();
I now tried doing it with Q, but I am stuck at some point. There is some point where I am going into the wrong direction, but I can't see where it is.
var http = require('http'),
Q = require('q');
var urls = ["http://localhost:8000/1.txt", "http://localhost:8000/2.txt"]
var txts = [];
function getData(url) {
return Q.promise(function(respond,reject){
http.get(url, function(res) {
var data = "";
res.on('data',function(chunk){
data+=chunk;
});
res.on('end',function(){
txts.push(data);
});
}).on('error',function(e){
console.log("Error Request: "+e.message);
})
});
}
function getTxts() {
var promises = [];
for(a in urls) {
var url = urls[a];
var promise = getData(url);
promises.push(promise);
}
return promises;
}
function start() {
Q.fcall(function() {
getTxts();
}).then(function() {
console.log(txts);
})
}
start();
Thanks for your help!
You could use just regular promises for this
var http = require('http');
var urls = ["http://localhost:8000/1.txt", "http://localhost:8000/2.txt"]
function getData(url) {
return new Promise(function(resolve, reject) {
http.get(url, function(res) {
var data = "";
res.on('data',function(chunk){
data+=chunk;
});
res.on('end',function(){
resolve(data);
});
}).on('error',function(err){
reject(err);
});
});
}
function getTxts() {
return Promise.all(
urls.map(function(url) {
return getData(url);
})
);
}
getTxts().then(function(texts) {
// "texts" is an array of the returned data
}).catch(function(err) {
// epic fail
});
Issue is you are not resolving or rejecting the promise you created in the getData function
function getData(url) {
return Q.promise(function(resolve,reject){
http.get(url, function(res) {
var data = "";
res.on('data',function(chunk){
data+=chunk;
});
res.on('end',function(){
txts.push(data);
resolve(); // resolve the promise when done
});
}).on('error',function(e){
console.log("Error Request: "+e.message);
reject(); // reject the promise if there is an error
})
});
}

Nice way to do recursion with ES6 promises?

Here's what I've got:
function nextAvailableFilename(path) {
return new Promise(function (resolve, reject) {
FileSystem.exists(path, function (exists) {
if (!exists) return resolve(path);
var ext = Path.extname(path);
var pathWithoutExt = path.slice(0, -ext.length);
var match = /\d+$/.exec(pathWithoutExt);
var number = 1;
if (match) {
number = parseInt(match[0]);
pathWithoutExt = pathWithoutExt.slice(0, -match[0].length);
}
++number;
nextAvailableFilename(pathWithoutExt + number + ext).then(function () {
return resolve.apply(undefined, arguments);
}, function () {
return reject.apply(undefined, arguments);
});
});
});
}
But I don't like that block at the end -- isn't there a way to 'replace' the current promise with the next one in the stack rather than having one promise resolve the next like I've done here?
Here's a version that uses promise chaining and file create to avoid the race condition. I used the bluebird promise library so I can use promises with the fs library just to simplify the code and error handling:
var Promise = require('bluebird');
var fs = Promise.promisifyAll(require('fs'));
var path = require('path');
// Creates next available xxx/yyy/foo4.txt numeric sequenced file that does
// not yet exist. Returns the new filename in the promise
// Calling this function will create a new empty file.
function nextAvailableFilename(filename) {
return fs.openAsync(filename, "wx+").then(function(fd) {
return fs.closeAsync(fd).then(function() {
return filename;
});
}, function(err) {
if (err.code !== 'EEXIST') {
// unexpected file system error
// to avoid possible looping forever, we must bail
// and cause rejected promise to be returned
throw err;
}
// Previous file exists so reate a new filename
// xxx/yyy/foo4.txt becomes xxx/yyy/foo5.txt
var ext = path.extname(filename);
var filenameWithoutExt = filename.slice(0, -ext.length);
var number = 0;
var match = filenameWithoutExt.match(/\d+$/);
if (match) {
number = parseInt(match[0], 10);
filenameWithoutExt = filenameWithoutExt.slice(0, -match[0].length);
}
++number;
// call this function again, returning the promise
// which will cause it to chain onto previous promise
return nextAvailableFilename(filenameWithoutExt + number + ext);
});
}
I came up with a solution too that doesn't depend on bluebird.promisify. It should handle the case where file creation fails for a reason other than it already exists.
function createFile(path) {
return new Promise(function (resolve, reject) {
FileSystem.open(path, 'wx', function (err, fd) {
if (err) return reject(err);
FileSystem.close(fd, function (err) {
if (err) return reject(err);
return resolve();
});
});
});
}
// todo: make more efficient by multiplying numbers by 2 or something like http://stackoverflow.com/a/1078898/65387
function nextFile(path) {
return createFile(path).then(function () {
return path;
}, function (err) {
if (err.code !== 'EEXIST') throw err; // error other than "file exists"
var ext = Path.extname(path);
var pathWithoutExt = path.slice(0, -ext.length);
var match = /\d+$/.exec(pathWithoutExt);
var number = 2;
if (match) {
number = parseInt(match[0]) + 1;
pathWithoutExt = pathWithoutExt.slice(0, -match[0].length);
}
return nextFile(pathWithoutExt + number + ext);
});
}

node.js async function in loop?

I am having some problems with node.js. What I'm trying to do is get an array of the directories in "./"+req.user.email and loop through them finding out their size and adding a table row to output, as you can see in the code. At the end I wan't to send all the table rows using res.send().
However the only output I am getting is:
<tr></tr>
for each file in the array. It seems that the forEach function is not waiting for readSizeRecursive at all. The readSizeRecursive function is asynchronous, and I believe that is what's causing the problem, but I don't know how I can fix this.
Any help would be greatly appreciated, I have included the readSizeRecursive function too. Thank you!
var output = "";
fs.readdir("./" + req.user.email, function (err, files) {
files.forEach(function(file){
output += "<tr>";
readSizeRecursive("./"+req.user.email+"/"+file, function (err, total){
output += '<td>' + file + '</td><td>' + total + '</td>';
});
output += "</tr>"
});
res.send(output)
});
readSizeRecursive() :
// Function to find the size of a directory
function readSizeRecursive(item, cb) {
fs.lstat(item, function(err, stats) {
var total = stats.size;
if (!err && stats.isDirectory()) {
fs.readdir(item, function(err, list) {
async.forEach(
list,
function(diritem, callback) {
readSizeRecursive(path.join(item, diritem), function(err, size) {
total += size;
callback(err);
});
},
function(err) {
cb(err, total);
}
);
});
}
else {
cb(err, total);
}
});
}
Please use the async module for this kind of pattern. Using async.each will allow you to compute the size for each folder asynchronously, and then return the sizes once you're done computing everything individually.
var output = [];
fs.readdir('./' + req.user.email, function (err, files) {
async.each(compute, report);
});
function compute (file, done) {
// calculate size, then callback to signal completion
// produce a result like below, then invoke done()
var obj = { files: [
{ name: file, size: size },
{ name: file, size: size },
{ name: file, size: size }
]};
output.push(obj);
done();
}
// doesn't need to be this awful
function format (list) {
var result = [];
list.forEach(function (item) {
var description = item.files.map(function (file) {
return util.format('<td>%s</td><td>%s</td>', file.name, file.size);
});
result.push(description);
});
result.unshift('<tr>');
result.push('</tr>');
return result.join('</tr><tr>');
}
function report (err) {
if (err) { return next(err); }
var result = format(output);
res.send(result);
}
This way you can easily swap out the different pieces of functionality, changing the formatting without altering the computing of the file size tree, for example.
Your main issue was control flow. You return with res.send while you are asynchronously looping and figuring out the sizes.
var fs = require ("fs");
var createTableContent = function (p, cb){
var read = function (p, cb){
//Prevent recursion if error
if (err) return cb ();
fs.stat (p, function (error, stats){
if (error){
err = error;
return cb ();
}
if (stats.isDirectory ()){
var dirSize = 0;
fs.readdir (p, function (error, entries){
if (error){
err = error;
return cb ();
}
var pending = entries.length;
//Empty dir
if (!pending) return cb (0);
entries.forEach (function (entry){
read (p + "/" + entry, function (entrySize){
dirSize += entrySize;
if (!--pending) return cb (dirSize);
});
});
});
}else{
cb (stats.size);
}
});
};
//A lot of errors can be produced, return only the first one
var err = null;
//Suppose p is a dir
fs.readdir (p, function (error, entries){
if (error) return cb (error);
var content = "";
var pending = entries.length;
if (!pending) return cb (null, content);
entries.forEach (function (entry){
read (p + "/" + entry, function (totalSize){
if (err) return cb (err);
content += "<tr><td>" + entry + "</td><td>" + totalSize + "</td></tr>";
if (!--pending){
//End
cb (null, content);
}
});
});
});
};
//Here goes the "email" path
createTableContent (".", function (error, content){
if (error) return console.error (error);
console.log (content);
});

Categories

Resources