Passing a variable to an asynchronous function (promise) in javascript - javascript

I have the following code which runs in a loop:
var index = fileNames[x].lastIndexOf("/") + 1;
var currentImageName = fileNames[x].substr(index);
if (currentImageName.indexOf(".jpg") != -1) {
reader.getFileAsBlob(fileNames[x])
.done(function(blob) {
picturesFilePathArray.push({
fileName: currentImageName,
fileURL: blobURL(blob)
});
refreshKMZList();
});
}
The problem I am having is that I am trying to save an object with 2 properties into an array. This object should have the identifier and the result. (fileName and fileURL respectively). But since this function is asynchronous (executed through a promise). By the time the "getFileAsBlob" finishes, currentImageName has already been updated ultimately ending in many of my objects having the same identifier (the last one processed before it finished).
This might be a really easy problem, but I am very new to javascript and haven't yet found anything about it.
I thought that the solution might be in passing the variable to the "done" function, but I think this function is the one being returned by the method and is already set. (I dont know how it looks)
Edit:
The code is just inside a normal loop
for (x = 0; x<fileNames.length; x++)

So create a function so the variable can not be changed
function getFile (filmName, imageName) {
reader.getFileAsBlob(fileName)
.done(function(blob) {
picturesFilePathArray.push({
fileName: imageName,
fileURL: blobURL(blob)
});
refreshKMZList();
});
}
and call it
if (currentImageName.indexOf(".jpg") != -1) {
getFile (fileNames[x], currentImageName);
}
or you can do something like
if (currentImageName.indexOf(".jpg") != -1) {
(function (fileName, imageName) {
reader.getFileAsBlob(fileName)
.done(function(blob) {
picturesFilePathArray.push({
fileName: imageName,
fileURL: blobURL(blob)
});
refreshKMZList();
});
})(fileNames[x], currentImageName);
}
MDN Closure

The solution to this problem is always the same: Use a closure.
But since you are using a promise based library, you have a nicer option. Use promises. (Internally this is based on closures as well, of course. It's just a much nicer abstraction.)
function getFileInfo(path) {
return reader.getFileAsBlob(path).done(function (blob) {
return {
fileName: path.split('/').pop(),
fileURL: blobURL(blob)
});
};
}
function isJpg(filename) {
return /\.jpg$/i.test(filename);
}
Now you can do this, where refreshKMZList() is called once per file:
fileNames.filter(isJpg).forEach(function (path) {
getFileInfo(path).then(function (fileInfo) {
picturesFilePathArray.push(fileInfo);
refreshKMZList();
})
.catch(function (error) {
// handle the error
});
});
or even this, where refreshKMZList() is called only once per overall:
var fileInfos = fileNames.filter(isJpg).map(getFileInfo);
Promise.all(fileInfos).then(function (arrayOfFileInfo) {
picturesFilePathArray.concat(arrayOfFileInfo);
refreshKMZList();
})
.catch(function (error) {
// handle the error
});
Read up on promises, they are worth being understood.

Related

Chrome Extension | Is there any way to make chrome.storage.local.get() return something?

in my chrome extension I need to use chrome storage. In my background script first I create an object and add it to chrome storage and then I want to get my object from there and to be returned. Something like that:
...
var obj = {};
chrome.storage.local.set(obj, function () { });
...
var data = getData(obj); // I want my object to be returned here
var returnedData = null;
function getData(obj) {
chrome.storage.local.get(obj, function(result) {
returnedData = result; // here it works, I can do something with my object
});
return returnedData; // here it doesn't work
}
As far as I understood from here chrome.storage.local.get is asynchronous with its consequences. But is there any way how to get something from chrome storage and make it to be returned? I mean maybe I should wrap chrome.storage.local.get in another function or so?
Many thanks in advance!
If you want to stay away from global variables and you're okay with modern browser requirements, then you can implement a native JavaScript Promise object. For example, here's a function that returns the stored data for a single given key:
function getData(sKey) {
return new Promise(function(resolve, reject) {
chrome.storage.local.get(sKey, function(items) {
if (chrome.runtime.lastError) {
console.error(chrome.runtime.lastError.message);
reject(chrome.runtime.lastError.message);
} else {
resolve(items[sKey]);
}
});
});
}
// Sample usage given this data:
// { foo: 'bar' }
getData('foo').then(function(item) {
// Returns "bar"
console.log(item);
});
If you need support for IE11 and below, then you'll have to turn to a library like jQuery.
No it's not possible
But there are several ways around this problem
Do everything you want to do with the data returned from .get() inside the callback (or start it from there using function calls). This is what #wernersbacher posted
Take a look at deferreds (jQuery or Q libraries). A deferred's promise can be returned from getData. Inside the .get() callback, you can resolve the deferred. Outside of getData you can use .then() to do something after the deferred resolved
Something like this
function getData(obj) {
var deferred = $.Deferred();
chrome.storage.local.get(obj, function(result) {
deferred.resolve(result);
});
return deferred.promise();
}
$.when(getData(obj)).then(function(data) {
// data has value of result now
};
You have to do it like that:
var returnedData = null;
function setData(value) {
returnedData = value;
}
function getData(obj) {
chrome.storage.local.get(obj, function(result) {
setData(result); // here it works, I can do something with my object
});
return; // here it doesn't work
}
..because you tried to return a value which did not get read from storage yet, so it's null.
Update with Manifest V3 :
Now chrome.storage.local.get() function returns a promise that you can chain or can await in an async function.
const storageCache = { count: 0 };
// Asynchronously retrieve data from storage.local, then cache it.
const initStorageCache = chrome.storage.local.get().then((items) => {
// Copy the data retrieved from storage into storageCache.
Object.assign(storageCache, items);
});
Note : You must omit the callback paramter to get the promise.
Reference : https://developer.chrome.com/docs/extensions/reference/storage/#:~:text=to%20callback.-,get,-function
You need to handle it with callback functions. Here are two examples. You use a single function to set, however you create a separate function for each "On Complete". You could easily modify your callback to pass additional params all the way through to perform your needed task.
function setLocalStorage(key, val) {
var obj = {};
obj[key] = val;
chrome.storage.local.set(obj, function() {
console.log('Set: '+key+'='+obj[key]);
});
}
function getLocalStorage(key, callback) {
chrome.storage.local.get(key, function(items) {
callback(key, items[key]);
});
}
setLocalStorage('myFirstKeyName', 'My Keys Value Is FIRST!');
setLocalStorage('mySecondKeyName', 'My Keys Value Is SECOND!');
getLocalStorage('myFirstKeyName', CallbackA);
getLocalStorage('mySecondKeyName', CallbackB);
// Here are a couple example callback
// functions that get executed on the
// key/val being retrieved.
function CallbackA(key, val) {
console.log('Fired In CallbackA: '+key+'='+val);
}
function CallbackB(key, val) {
console.log('Fired In CallbackA: '+key+'='+val);
}

Callback without parameter in javascript

I have a nodejs code that has callback and I couldn't understand how it works. Can someone explain it
function readJSONIntoArray(directory, array, callback)
{
var ending = 'json';
fs.readdir(directory, function (err, files)
{
if (err)
throw err;
var fileCnt = files.length;
files.forEach(function (file)
{
if (endsWith(file, '.' + ending))
{
file = file.substring(0, file.length - (ending.length + 1));
var fileContent = require(path.join(directory, file));
array.push(fileContent);
log.info('Read file: ' + file);
}
fileCnt--;
if (fileCnt === 0 && typeof callback === 'function')
{
callback();
}
});
});
}
Here the callback is empty so I guess no value is being returned. But in actual output the array is returned. I couldn't understand an empty callback can return a array.
Function call:readJSONIntoArray(profilefolder, profiles, setProfileDescriptions);
Definition of setProfileDescriptions is separate.
function setProfileDescriptions()
{
profiles = bubblesort(profiles, 'order');
}
Inside the setProfileDescriptions the profile array is populated with the json data from the file read in the read function.
Can someone explain how the 3rd argument in the readJSONIntoArray function call is recognized as a function and the array profiles is returned?
You're right that readJSONIntoArray does't return anything in it's callback. Instead it appends new data to the second argument array, thus mutating it.
So, readJSONIntoArray was meant to be used in the following way:
var content = []; // empty array to accumulate data from readJSONIntoArray function
readJSONIntoArray('some directory', content, function () {
// content is full of data now
doSomething(content);
});
Though I must point out that this is not a common pattern in node.js, and that it should be avoided because it's too confusing.
In fact, there are several things in readJSONIntoArray implementation which were done wrong:
functions should never mutate their arguments;
async functions should not throw errors, they should return them in callback instead;
any data produced by the function should also be returned in callback.
var globalArray=[];
function readFunction(path,globalArray,callbackFunction){
globalArray.push(path);
callbackFunction();
}
function callbackFunction(){
//globalArray was global so i can call here
console.log(globalArray);
}
readFunction('filePath',globalArray,callbackFunction);
consider above code because the 'globalArray' declared as global i can access inside the callback function

Avoiding callback hell in nodeJs / Passing variables to inner functions

Here's an example of something I'd like to simplify:
exports.generateUrl = function (req, res) {
var id = req.query.someParameter;
var query = MyMongooseModel.findOne({'id': id});
query.exec(function (err, mongooseModel) {
if(err) {
//deal with it
}
if (!mongooseModel) {
generateUrl(Id,
function (err, text, url) {
if (err) {
res.status(HttpStatus.INTERNAL_SERVER_ERROR).send(err);
return;
}
var newMongooseModel = new AnotherMongooseModel();
newMongooseModel.id = id;
newMongooseModel.save(function (err) {
if (err) {
res.status(HttpStatus.INTERNAL_SERVER_ERROR).send(err);
} else {
res.send({url: url, text: text});
}
});
});
} else {
//deal with already exists
}
});
};
I've seen other SO answer where they tell you to use named functions, but don't say how to deal with variable you want to pass in or use jQuery's queue. I do not have the luxury of either.
I understand that I can replace my anonymous functions with names functions, but then I would need to pass arounds variables. How would my inner function access res for instance if the function is defined elsewhere?
The core to your question is:
I understand that I can replace my anonymous functions with names functions, but then I would need to pass arounds variables. How would my inner function access res for instance if the function is defined elsewhere?
The answer is to use a function factory.
In general, this:
function x (a) {
do_something(function(){
process(a);
});
}
can be converted to this:
function x (a) {
do_something(y_maker(a)); // notice we're calling y_maker,
// not passing it in as callback
}
function y_maker (b) {
return function () {
process(b);
};
}
In the code above, y_maker is a function that generates a function (let's call that function's purpose "y"). In my own code, I use the naming convention .._maker or generate_.. to denote that I'm calling a function factory. But that's just me and the convention is in no way standard or widely adopted in the wild.
So for your code you can refactor it to:
exports.generateUrl = function (req, res) {
var id = req.query.someParameter;
var query = MyMongooseModel.findOne({'id': id});
query.exec(make_queryHandler(req,res));
};
function make_queryHandler (req, res) {
return function (err, mongooseModel) {
if(err) {
//deal with it
}
else if (!mongooseModel) {
generateUrl(Id,make_urlGeneratorHandler(req,res));
} else {
//deal with already exists
}
}}
function make_urlGeneratorHandler (req, res) {
return function (err, text, url) {
if (err) {
res.status(HttpStatus.INTERNAL_SERVER_ERROR).send(err);
return;
}
var newMongooseModel = new AnotherMongooseModel();
newMongooseModel.id = id;
newMongooseModel.save(make_modelSaveHandler(req,res));
}}
function make_modelSaveHandler (req, res) {
return function (err) {
if (err) res.status(HttpStatus.INTERNAL_SERVER_ERROR).send(err);
else res.send({url: url, text: text});
}}
This flattens out the nested callbacks. As an additional benefit, you get to properly name what the function is supposed to do. Which I consider good practice.
It also has the added advantage that it is significantly faster than when using anonymous callback (either with nesting callbacks or with promises, though if you pass named functions to promise.then() instead of anonymous functions then you'll get the same speed up benefits). A previous SO question (my google-fu is failing me today) found that named functions are more than twice the speed (if I remember correctly it was more than 5 times faster) of anonymous functions in node.js.
Use promises. Using Q and mongoose-q it would give: something like that:
exports.generateUrl = function (req, res) {
var id = req.query.someParameter;
var text = "";
var query = MyMongooseModel.findOne({'id': id});
query.execQ().then(function (mongooseModel) {
if (!mongooseModel) {
return generateUrl(Id)
}).then(function (text) {
var newMongooseModel = new AnotherMongooseModel();
newMongooseModel.id = id;
text = text;
newMongooseModel.saveQ()
}).then(function (url) {
res.send({url: url, text: text});
}).fail(function(err) {
res.status(HttpStatus.INTERNAL_SERVER_ERROR).send(err);
});
};
Named functions will be executed within the same scope that the anonymous functions are and would have access to all of variables you are currently using. This approach would make your code less nested and more readable (which is good) but would still technically be in "callback hell". The best way to avoid situations like this is to wrap your asynchronous libraries (assuming they don't already provide promises) with a promise library like Q. IMO, promises provide a much more clear picture of the execution path.
You can avoid the predicament of not knowing where variables came from by binding parameters to your named function using bind, for instance:
function handleRequest(res, err, text, url) {
if (err) {
res.status(HttpStatus.INTERNAL_SERVER_ERROR).send(err);
return;
}
var newMongooseModel = new AnotherMongooseModel();
newMongooseModel.id = id;
newMongooseModel.save(function (err) {
if (err) {
res.status(HttpStatus.INTERNAL_SERVER_ERROR).send(err);
} else {
res.send({url: url, text: text});
}
});
}
...
generateUrl(Id, handleRequest.bind(null, res));

Why is this named anonymous js function working before it is defined?

I have a simple Gulpfile with a task defined. There is a named anonymous function that is defined after the Gulp task. The task uses this function and is working when I would expect to get undefined is not a function, but I am not. Here is the code:
gulp.task('bower', function() {
var bowerDir = 'bower_components';
fs.readdir(bowerDir, function(err, dirs) {
_.each(dirs, function(dir) {
var directory = dir;
fs.stat(path.join(bowerDir, dir), function(err, stats) {
if(stats.isDirectory()) {
listing('bower_components');
}
});
});
});
});
var listing = function(dir) {
console.log(dir);
};
Please explain why this is working?
gulp.task(), fs.readdir() and fs.stat() are all asynchronous functions. They call their callback functions sometime LATER, not immediately. That means that the code after that defines listing gets a chance to run BEFORE the callback is actually called. So, thus listing is defined before it is actually used.
I wouldn't suggest this as a good coding method myself because you are relying on the timing on things.
If, instead you defined your listing function like this:
function listing(dir) {
console.log(dir);
}
Then, you would not have a dependency on timing because all statically defined functions like this are parsed first and hoisted to the top of the scope they are defined in and thus always available in that scope ragardless of timing.
FYI, if you really want to show this to yourself, you can add this logging to see the actual timing and sequence of things:
function logTime(msg) {
console.log((new Date()).getTime() + ": " + msg);
}
logTime("start");
gulp.task('bower', function() {
var bowerDir = 'bower_components';
fs.readdir(bowerDir, function(err, dirs) {
_.each(dirs, function(dir) {
var directory = dir;
fs.stat(path.join(bowerDir, dir), function(err, stats) {
if(stats.isDirectory()) {
logTime("about to call listing()");
listing('bower_components');
}
});
});
});
});
logTime("about to define listing");
var listing = function(dir) {
logTime("listing() called");
console.log(dir);
};
Because that annonymous function is a callback function and most likely is called after the initialization of listing function.

Windows 8 Javascript asynchronous programming

I want to iterate through a list and rename a few files in Windows 8 / Javascript. Therefore, I wrote a function called "renameFile" and I call this function within a loop, like this:
list.forEach(function (value, index, array) {
var filename = index;
var newfilename = index+1;
renameFile(filename, newfilename);
});
function renameFile(filename, newfilename) {
Windows.Storage.ApplicationData.current.localFolder.getFileAsync(filename).then(function (sampleFile) {
sampleFile.renameAsync(newfilename).done(
function complete(result) {
},
function error(error) {
console.log("error" + error);
}
);
});
}
The problem is: The rename function is asynchronous and it seems that sometimes, the renaming works and sometimes not: In most cases, only the first element of the list is renamed. How can I tell the loop to wait, till the rename-process of the first item in the list is finished and then go an with the second, third, ... item?
Cheers
Well I did a fast lookup and you're out of luck. It seems like the StorageFolder class only has Asyn functions for what you're looking for. But it's not the end.
The easiest solution I have to do use a recursive function. And call the function with a different index.
function renameFile(filename, newfilename, list, index) {
Windows.Storage.ApplicationData.current.localFolder.getFileAsync(filename).then(function (sampleFile) {
sampleFile.renameAsync(newfilename).done(
function complete(result) {
renameList(list, index);
},
function error(error) {
console.log("error" + error);
}
);
});
function renameList(list, index) {
if(index >= list.length) return;
renameFile(index, index+1, list, index+1);
}
renameList(list, 0);
It's not very clean but it should work, this will force the code to be synchrone since you're calling from within a callback.

Categories

Resources