I have a file containing JSON data that I want to read. I found some code for reading a file at neontribe.co.uk, but I can't seem to get the console to wait for the load to complete.
function onDeviceReady(){
window.resolveLocalFileSystemURL(cordova.file.externalDataDirectory, function(fs) {
var directoryReader = fs.createReader();
directoryReader.readEntries(function(entries) {
var i;
for (i=0; i<entries.length; i++) {
document.addEventListener('deviceready', onDeviceReady2, false);
function onDeviceReady2() {
function readFromFile(fileName) {
var pathToFile = cordova.file.externalDataDirectory + fileName;
window.resolveLocalFileSystemURL(pathToFile, function (fileEntry) {
fileEntry.file(function (file) {
var reader = new FileReader();
reader.onloadend = function (e) {
console.log("Inside Load" + JSON.parse(this.result));
return JSON.parse(this.result);
};
reader.readAsText(file);
}, errorHandler.bind(null, fileName));
}, errorHandler.bind(null, fileName));
}
function some_function(callback) {
console.log("inside function1");
var data = readFromFile(entries[i].name);
callback(data);
}
some_function(function(data) {
console.log("data!" + data);
console.log("Data String!" + JSON.stringify(data));
});
// var obj = JSON.parse(data);
// console.log("arresteeFirst!" + data.arresteeFirst);
// console.log("data!" + data);
}
console.log(entries[i].name);
}
}, function (error) {
alert(error.code);
});
}, function (error) {
alert(error.code);
});
}
When I run it I get this output in the console:
inside function1
Data!undefined
Data String!undefined
1452034357845.json
Inside Load[object Object]
So it looks like it goes to some_function and then prints the inside function 1. But then it does not wait for the function to pass the result from the load. It prints the two lines from the callback function straight away. It then prints the filename (at the end of the loop) and then finally prints the console message from within the load function. It looks like I am returning an object according to the console so data should not be undefined.
There are a few bits wrong with the code, mainly because it's not written taking into account the asynchronous calls and just assuming that everything happens in a synchronous fashion.
The callback(data) within some_function(callback) is in fact called after you called var data = readFromFile(entries[i].name);. But two issues arise here.
function readFromFile(fileName) doesn't return any data (actually, it doesn't return anything at all);
reader.readAsText(file); is treated asynchronous. In a nutshell, it means that your code will keep running (in your case print those messages) and call the reader.onloadend callback once the data is fully loaded - which would be too late for you as your message has already been printed.
There are several ways to fix this code, a good way is to use Promises. I like Bluebird for promises personally.
With promises, the solution would look something like this (pseudo-code):
function readFromFile(fileName) {
return new Promise(resolve, reject) {
var pathToFile = cordova.file.externalDataDirectory + fileName;
window.resolveLocalFileSystemURL(pathToFile, function (fileEntry) {
fileEntry.file(function (file) {
var reader = new FileReader();
reader.onloadend = function (e) {
console.log("Share the results... " + JSON.parse(this.result));
resolve(JSON.parse(this.result));
};
reader.readAsText(file);
}, errorHandler.bind(null, fileName));
}, errorHandler.bind(null, fileName));
} // Return the promise
}
function some_function(callback) {
console.log("inside function1");
var promise = readFromFile(entries[i].name);
promise.then(callback); // Will be called when promise is resolved
}
some_function(console.log);
Promises allows you to conceptually return a "contract" that the value will be fetched and all functions waiting for that value (.then) will be called once the data is received and the promise is resolved (you can reject a promise if it fails as well). (Sorry about the bad explanation, but there are loads of better documentation about it all over the internet, I recommend taking a look at it).
I hope it helps, there are other ways of coming around this problem, I believe that a promises might be the most elegant without getting into generators.
Related
Lets say this is my code (just a sample I wrote up to show the idea)
var extract = require("./postextract.js");
var rescore = require("./standardaddress.js");
RunFunc();
function RunFunc() {
extract.Start();
console.log("Extraction complete");
rescore.Start();
console.log("Scoring complete");
}
And I want to not let the rescore.Start() run until the entire extract.Start() has finished. Both scripts contain a spiderweb of functions inside of them, so having a callback put directly into the Start() function is not appearing viable as the final function won't return it, and I am having a lot of trouble understanding how to use Promises. What are ways I can make this work?
These are the scripts that extract.Start() begins and ends with. OpenWriter() is gotten to through multiple other functions and streams, with the actual fileWrite.write() being in another script that's attached to this (although not needed to detect the end of run. Currently, fileWrite.on('finish') is where I want the script to be determined as done
module.exports = {
Start: function CodeFileRead() {
//this.country = countryIn;
//Read stream of thate address components
fs.createReadStream("Reference\\" + postValid.country + " ADDRESS REF DATA.csv")
//Change separator based on file
.pipe(csv({escape: null, headers: false, separator: delim}))
//Indicate start of reading
.on('resume', (data) => console.log("Reading complete postal code file..."))
//Processes lines of data into storage array for comparison
.on('data', (data) => {
postValid.addProper[data[1]] = JSON.stringify(Object.values(data)).replace(/"/g, '').split(',').join('*');
})
//End of reading file
.on('end', () => {
postValid.complete = true;
console.log("Done reading");
//Launch main script, delayed to here in order to not read ahead of this stream
ThisFunc();
});
},
extractDone
}
function OpenWriter() {
//File stream for writing the processed chunks into a new file
fileWrite = fs.createWriteStream("Processed\\" + fileName.split('.')[0] + "_processed." + fileName.split('.')[1]);
fileWrite.on('open', () => console.log("File write is open"));
fileWrite.on('finish', () => {
console.log("File write is closed");
});
}
EDIT: I do not want to simply add the next script onto the end of the previous one and forego the master file, as I don't know how long it will be and its supposed to be designed to be capable of taking additional scripts past our development period. I cannot just use a package as it stands because approval time in the company takes up to two weeks and I need this more immediately
DOUBLE EDIT: This is all my code, every script and function is all written by me, so I can make the scripts being called do what's needed
You can just wrap your function in Promise and return that.
module.exports = {
Start: function CodeFileRead() {
return new Promise((resolve, reject) => {
fs.createReadStream(
'Reference\\' + postValid.country + ' ADDRESS REF DATA.csv'
)
// .......some code...
.on('end', () => {
postValid.complete = true;
console.log('Done reading');
resolve('success');
});
});
}
};
And Run the RunFunc like this:
async function RunFunc() {
await extract.Start();
console.log("Extraction complete");
await rescore.Start();
console.log("Scoring complete");
}
//or IIFE
RunFunc().then(()=>{
console.log("All Complete");
})
Note: Also you can/should handle error by reject("some error") when some error occurs.
EDIT After knowing about TheFunc():
Making a new Event emitter will probably the easiest solution:
eventEmitter.js
const EventEmitter = require('events').EventEmitter
module.exports = new EventEmitter()
const eventEmitter = require('./eventEmitter');
module.exports = {
Start: function CodeFileRead() {
return new Promise((resolve, reject) => {
//after all of your code
eventEmitter.once('WORK_DONE', ()=>{
resolve("Done");
})
});
}
};
function OpenWriter() {
...
fileWrite.on('finish', () => {
console.log("File write is closed");
eventEmitter.emit("WORK_DONE");
});
}
And Run the RunFunc like as before.
There's no generic way to determine when everything a function call does has finished.
It might accept a callback. It might return a promise. It might not provide any kind of method to determine when it is done. It might have side effects that you could monitor by polling.
You need to read the documentation and/or source code for that particular function.
Use async/await (promises), example:
var extract = require("./postextract.js");
var rescore = require("./standardaddress.js");
RunFunc();
async function extract_start() {
try {
extract.Start()
}
catch(e){
console.log(e)
}
}
async function rescore_start() {
try {
rescore.Start()
}
catch(e){
console.log(e)
}
}
async function RunFunc() {
await extract_start();
console.log("Extraction complete");
await rescore_start();
console.log("Scoring complete");
}
I have a callback which waits for a response from a HTTP request which responds with the word "done" if a file is successfully uploaded and I make one request via a callback to upload a single file every time.
What I want is that when the response is "done", I want to upload multiple files with a do-while loop and I'm thinking of doing that with promises, but I don't really know how.
My code now:
var self = this;
let i = 0;
let fileInput = fileCmp.get("v.files");
do {
// my callback
self.uploadHelper(component, event, fileInput[i]);
console.log("Uploading: " + fileInput[i].name);
i++;
} while (i < fileInput.length);
The thing I want is to go to i=1 (second file) only when I get the response "done" or something else from the call.
My callback which is called from uploadHelper():
uploadChunk: function (component, file, fileContents, fromPos, toPos, attachId) {
console.log('uploadChunk');
var action = component.get("c.saveTheChunk");
var chunk = fileContents.substring(fromPos, toPos);
action.setParams({
parentId: component.get("v.recordId"),
fileName: file.name,
base64Data: encodeURIComponent(chunk),
contentType: file.type,
fileId: attachId
});
action.setCallback(this, function (a) {
console.log('uploadChunk: Callback');
attachId = a.getReturnValue();
fromPos = toPos;
toPos = Math.min(fileContents.length, fromPos + this.CHUNK_SIZE);
if (fromPos < toPos) {
this.uploadChunk(component, file, fileContents, fromPos, toPos, attachId);
} else {
console.log('uploadChunk: done');
component.set("v.showLoadingSpinner", false);
// enabling the next button
component.set("v.nextDisabled", false);
component.set("v.uploadDisabled", true);
component.set("v.clearDisabled", true);
component.set("v.showToast", true);
component.set("v.toastType", 'success');
component.set("v.fileName", '');
component.set("v.toastMessage", 'Upload Successful.');
}
});
$A.getCallback(function () {
$A.enqueueAction(action);
})();
}
You have to make your uploadHelper method not aync to achieve what you want.
Try to use the async, await and Promise objects to create a promis in your function (instead of a callback) and force it to happened synchronously.
It may look somthing like this:
// uploadHelper definiton, fit its to your code
const uploadHelper = (component, event, file) => {
// Create the promise
return new Promise(function(component, event, file) {
// Do what you want to do
})
}
// Use it
var self = this;
let i = 0;
let fileInput = fileCmp.get("v.files");
do{
await self.uploadHelper(component, event, fileInput[i]).then(function() {
console.log("Uploading: "+fileInput[i].name);
i++;
});
}
while(i< fileInput.length);
For further info try this links:
https://developer.mozilla.org/he/docs/Web/JavaScript/Reference/Global_Objects/Promise
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function
As your uploadChunk does an asynchronous task, it would make sense for the function to take a callback, that gets called back when the task is done (your code does not contain any callback yet). As callbacks are difficult to handle (especially with loops), it makes sense to wrap the callback into a Promise (that resolves when the callback gets called):
uploadChunk: function (component, file, fileContents, fromPos, toPos, attachId) {
return new Promise(resolve => { // the promise gets returned immeadiately, the "resolve" callback can be called to resolve it
// ....
action.setCallback(this, function (a) {
//...
resolve();
});
});
},
Your uploadHelper could now return the promise returned back to the loop:
uploadHelper(component, event, file) {
//...
return this.uploadChunk(component, file, /*...*/);
}
Now that we have that, we can simply await that promise inside of an async function:
(async () => { // arrow function to preserve "this"
for(const file of files) { // Why do...while if you can use a for..of?
await this.uploadChunk(component, event, file);
}
})();
Get rid of the while loop, and simply pop an item off your fileInput array every time you use it. Cancel everything when the array becomes empty.
(You need to have a working callback function though...)
Hey you can do it with an auto run function :
var self = this;
let i = 0;
let fileInput = fileCmp.get("v.files");
do{
// my callback
(()=>{
self.uploadHelper(component, event, fileInput[i]);
console.log("Uploading: "+fileInput[i].name);
i++;
})();
}
while(i< fileInput.length);
My problem is that the code does not seem to be running in order, as seen below.
This code is for my discord.js bot that I am creating.
var Discord = require("discord.js");
var bot = new Discord.Client();
var yt = require("C:/Users/username/Documents/Coding/Discord/youtubetest.js");
var youtubetest = new yt();
var fs = require('fs');
var youtubedl = require('youtube-dl');
var prefix = "!";
var vidid;
var commands = {
play: {
name: "!play ",
fnc: "Gets a Youtube video matching given tags.",
process: function(msg, query) {
youtubetest.respond(query, msg);
var vidid = youtubetest.vidid;
console.log(typeof(vidid) + " + " + vidid);
console.log("3");
}
}
};
bot.on('ready', () => {
console.log('I am ready!');
});
bot.on("message", msg => {
if(!msg.content.startsWith(prefix) || msg.author.bot || (msg.author.id === bot.user.id)) return;
var cmdraw = msg.content.split(" ")[0].substring(1).toLowerCase();
var query = msg.content.split("!")[1];
var cmd = commands[cmdraw];
if (cmd) {
var res = cmd.process(msg, query, bot);
if (res) {
msg.channel.sendMessage(res);
}
} else {
let msgs = [];
msgs.push(msg.content + " is not a valid command.");
msgs.push(" ");
msgs.push("Available commands:");
msgs.push(" ");
msg.channel.sendMessage(msgs);
msg.channel.sendMessage(commands.help.process(msg));
}
});
bot.on('error', e => { console.error(e); });
bot.login("mytoken");
The youtubetest.js file:
var youtube_node = require('youtube-node');
var ConfigFile = require("C:/Users/username/Documents/Coding/Discord/json_config.json");
var mybot = require("C:/Users/username/Documents/Coding/Discord/mybot.js");
function myyt () {
this.youtube = new youtube_node();
this.youtube.setKey(ConfigFile.youtube_api_key);
this.vidid = "";
}
myyt.prototype.respond = function(query, msg) {
this.youtube.search(query, 1, function(error, result) {
if (error) {
msg.channel.sendMessage("There was an error finding requested video.");
} else {
vidid = 'http://www.youtube.com/watch?v=' + result.items[0].id.videoId;
myyt.vidid = vidid;
console.log("1");
}
});
console.log("2");
};
module.exports = myyt;
As the code shows, i have an object for the commands that the bot will be able to process, and I have a function to run said commands when a message is received.
Throughout the code you can see that I have put three console.logs with 1, 2 and 3 showing in which order I expect the parts of the code to run. When the code is run and a query is found the output is this:
I am ready!
string +
2
3
1
This shows that the code is running in the wrong order that I expect it to.
All help is very highly appreciated :)
*Update! Thank you all very much to understand why it isn't working. I found a solution where in the main file at vidid = youtubetest.respond(query, msg) when it does that the variable is not assigned until the function is done so it goes onto the rest of my code without the variable. To fix I simply put an if statement checking if the variable if undefined and waiting until it is defined.*
Like is mentioned before, a lot of stuff in javascript runs in async, hence the callback handlers. The reason it runs in async, is to avoid the rest of your code being "blocked" by remote calls. To avoid ending up in callback hell, most of us Javascript developers are moving more and more over to Promises. So your code could then look more like this:
myyt.prototype.respond = function(query, msg) {
return new Promise(function(resolve, reject) {
this.youtube.search(query, 1, function(error, result) {
if (error) {
reject("There was an error finding requested video."); // passed down to the ".catch" statement below
} else {
vidid = 'http://www.youtube.com/watch?v=' + result.items[0].id.videoId;
myyt.vidid = vidid;
console.log("1");
resolve(2); // Resolve marks the promises as successfully completed, and passes along to the ".then" method
}
});
}).then(function(two) {
// video is now the same as myyt.vidid as above.
console.log(two);
}).catch(function(err) {
// err contains the error object from above
msg.channel.sendMessage(err);
})
};
This would naturally require a change in anything that uses this process, but creating your own prototypes seems.. odd.
This promise returns the vidid, so you'd then set vidid = youtubetest.response(query, msg);, and whenever that function gets called, you do:
vidid.then(function(id) {
// id is now the vidid.
});
Javascript runs async by design, and trying to hack your way around that leads you to dark places fast. As far as I can tell, you're also targetting nodeJS, which means that once you start running something synchronously, you'll kill off performance for other users, as everyone has to wait for that sync call to finish.
Some suggested reading:
http://callbackhell.com/
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise
https://stackoverflow.com/a/11233849/3646975
I'd also suggest looking up ES6 syntax, as it shortens your code and makes life a hellofalot easier (native promises were only introduced in ES6, which NodeJS 4 and above supports (more or less))
In javascript, please remember that any callback function you pass to some other function is called asynchronously. I.e. the calls to callback function may not happen "in order". "In order" in this case means the order they appear on the source file.
The callback function is simply called on certain event:
When there is data to be processed
on error
in your case for example when the youtube search results are ready,
'ready' event is received or 'message' is received.
etc.
I'm having the following code, this all code is implemented in specified function myFunc, I need that the all function will finish (myFunc) i.e. when the file was extracted successfully/or not to return some status (success/ error).
var myFunc = () => {
var DecompressZip = require('decompress-zip');
var unzipper = new DecompressZip(filename)
unzipper.on('error', function (err) {
console.log('Caught an error');
});
unzipper.on('extract', function (log) {
console.log('Finished extracting');
});
unzipper.on('progress', function (fileIndex, fileCount) {
console.log('Extracted file ' + (fileIndex + 1) + ' of ' + fileCount);
});
unzipper.extract({
path: 'some/path',
filter: function (file) {
return file.type !== "SymbolicLink";
}
});
};
Since this open source is working with event this is a problem (to get return status...) my intention is to change it to promise by promisify or
like following:
var myFunc = () => {
return new Promise(function(resolve, reject) {
var DecompressZip = require('decompress-zip');
var unzipper = new DecompressZip(filename)
unzipper.on('error', function (err) {
console.log('Caught an error');
reject();
});
unzipper.on('extract', function (log) {
console.log('Finished extracting');
resolve();
});
unzipper.on('progress', function (fileIndex, fileCount) {
console.log('Extracted file ' + (fileIndex + 1) + ' of ' + fileCount);
});
unzipper.extract({
path: 'some/path',
filter: function (file) {
return file.type !== "SymbolicLink";
}
});
};
My questions are:
Since I'm not too expert in JS Does it make sense to convert the events to promise?
There is other
good solution which I can use for the use-case?
This is the OP
https://github.com/bower/decompress-zip
1) Yes it makes perfect sense to convert such events to a Promise. Currently, different libraries have different ways of expressing events (events, messages, callback functions, then error callback function, exceptions, error codes, etc...). Promises will soon unify all this now they're a Javascript standard. So it a good practice to put a Promise layer around the libraries you're using and use only Promises in your code instead of the old mess.
2) Your solution looks good to me.
Converting events to promises only makes sense when you are absolutely positive that the "end" event will only fire once.
So in this case, yes, what you are suggesting for the implementation should work (assuming I understand your code correctly).
I'm trying to run a function when two compilation steps are complete, but the success callback keeps getting called even one fails. Here's the code:
function compile(tplStr) {
return new Promise(function(resolve,reject) {
// compile template here
var tpl = new function(){};
resolve(tpl);
});
}
function parse(json) {
return new Promise(function(resolve,reject) {
try {
var obj = JSON.parse(json);
resolve(obj);
} catch(err) {
console.log('JSON parse failed');
reject(err);
}
});
}
var i = 0;
function bothReady() {
$('#c').text(++i);
}
function oneFailed(err) {
console.log('oneFailed hit');
$('#c').text(err.message);
}
var compileProm = compile($('#a').val());
var parseProm = parse($('#b').val());
Promise.all([compileProm,parseProm]).then(bothReady).catch(oneFailed);
$('#a').on('input', function() {
Promise.all([compile($('#a').val()),parseProm]).then(bothReady).catch(oneFailed);
});
$('#b').on('input', function() {
Promise.all(compileProm,parse($('#b').val())).then(bothReady).catch(oneFailed);
});
code pen
When I create a syntax error in the JSON portion it logs "JSON parse failed" but does not log "oneFailed hit" like I'd expect. Why not? Shouldn't the .catch block be ran if any of the promises are rejected?
Your code doesn't work correctly when something is typed inside of #b because instead of passing an iterable to Promise.All 2 parameters are passed instead.
The result is that while both promises run, only the result of the first one is taken into account by the continuation of all.
The code read
Promise.all(compileProm,parse($('#b').val())).then(bothReady).catch(oneFailed);
Instead of
Promise.all([compileProm,parse($('#b').val())]).then(bothReady).catch(oneFailed);
PS: The 2 other calls are correct it explain why the problem seem to happen only when editing the JSON.