Promisify streams - javascript

I'm trying to promisify streams but it appears harder than I expected. Here is my attempt:
'use strict'
const Promise = require('bluebird')
const Twitter = require('twitter')
const TwitterStream = module.exports = function TwitterStream (config) {
// init Twitter Streaming API for OAuth
this.stream = new Twitter({
consumer_key: config.get('/twitter/consumerKey'),
consumer_secret: config.get('/twitter/consumerSecret'),
access_token_key: config.get('/twitter/accessTokenKey'),
access_token_secret: config.get('/twitter/accessTokenSecret')
})
.stream('statuses/filter', {
track: config.get('/twitter/track')
})
}
TwitterStream.prototype.receive = function () {
return new Promise((resolve, reject) => {
this.stream.on('data', resolve).on('error', reject)
})
}
TwitterStream.prototype.destroy = function () {
this.stream.destroy()
}
The main problem is that when I create the object
const stream = new TwitterStream(config)
stream.receive().then((data) => console.log(data))
when I execute only one object is read. no other data are streamed.
TwitterStream.prototype.receive = function () {
return new Promise((resolve, reject) => {
this.stream
.on('data', (data) => resolve(data)
.on('error', (error) => reject(error))
})
}

By using Rx extensions, it's pretty straightforward:
TwitterStream.prototype.receive = function () {
return Rx.Observable.create((observer) => {
this.stream
.on('data', (data) => observer.onNext(data))
.on('error', (err) => observer.onError(err));
});
}
And then
const stream = new TwitterStream(config)
stream.receive().subscribe((data) => console.log(data));

You need to return a promise in the callback of the stream.on function. Right now, the receive method when being called just returns a promise which once resolved returns the value or error.

Here is a not tested and most likely still buggy code to illustrate how you could do it with promises:
function defer() {
var resolve, reject;
var promise = new Promise(function() {
resolve = arguments[0];
reject = arguments[1];
});
return {
resolve: resolve,
reject: reject,
promise: promise
};
}
TwitterStream.prototype.receive = function() {
this.stream
.on('data', data => {
this.dataCache = this.dataCache || [];
this.dataCache.push(data);
this.tryToSendData()
})
.on('end', () => {
this.finished = true;
this.tryToSendData()
})
.on('error', err => {
this.lastError = err;
// error handling still missing
})
return this;
}
TwitterStream.prototype.tryToSendData = function() {
if (this.defered) {
let defered = this.defered;
this.defered = null;
// if data is available or finished then pass the first element of buffer (or undefined)
defered.resolve(this.dataCache.shift())
}
}
TwitterStream.prototype.getNextData = function() {
if (this.dataCache.length > 0 || this.finished) {
// if data is available or finished then pass the first element of buffer (or undefined)
return Promise.resolve(this.dataCache.shift());
} else {
// otherwise we need a defered object
this.defered = defer();
}
}
The usage could then look like this:
stream.receive().getNextData()
.then(function processData(data) {
if (data) {
console.dir(data);
// if data is available then continue requestin the data
return stream.getNextData().then(processData);
}
})
It is a rare case where you could use Deferreds.

I think you might want to take a look at my, already promisified streams in scramjet.
For your Twitter example this code should work well:
const stream = new Twitter({
consumer_key: config.get('/twitter/consumerKey'),
consumer_secret: config.get('/twitter/consumerSecret'),
access_token_key: config.get('/twitter/accessTokenKey'),
access_token_secret: config.get('/twitter/accessTokenSecret')
})
.stream('statuses/filter', {
track: config.get('/twitter/track')
})
.pipe(new scramjet.DataStream)
Then perform any transformations you like... for example map the stream somehow and accumulate the stream into an array when you're done.
stream.map(
function (a) { return modifyTheTweetSomehow(a); } // a Promise can be returned here
).accumulate(
function(a, i) { a.push(i); },
[]
) // this returns a Promise that will be resolved on stream end.
I hope you like it. :)

Related

NodeJS: Wait for all foreach with Promises to finish but never actually finishes

I am working with Nodejs. I have a forEach which is async as I have to wait for a result inside the forEach. As a result, I need to wait for the forEach to finish and then carry on with the result of the loop. I found several solutions for waiting for the forEach, one of them is using Promises. I did though, and these promises are created, however, the code after the forEach (and therefore the promises) are finished, is never actually executed (console.log is not printed). And the NodeJS function just ends without any errors.
Here is my Code:
var Client = require('ssh2').Client;
// eslint-disable-next-line no-undef
var csv = require("csvtojson");
// eslint-disable-next-line no-undef
var fs = require("fs");
// eslint-disable-next-line no-undef
const config = require('./config.json');
// eslint-disable-next-line no-undef
const os = require('os');
let headerRow = [];
let sumTxAmount = 0;
const filenameShortened = 'testFile';
let csvLists = [];
let csvFile;
const options = {
flags: 'r',
encoding: 'utf8',
handle: null,
mode: 0o664,
autoClose: true
}
var conn = new Client();
async function start() {
const list = await getCSVList();
let content = fs.readFileSync('./temp.json', 'utf8');
content = JSON.parse(content);
var promises = list.map(function(entry) {
return new Promise(async function (resolve, reject) {
if (!content['usedFiles'].includes(entry.filename)) {
const filename = entry.filename;
csvFile = await getCsv(filename);
csvLists.push(csvFile);
console.log('here');
resolve();
} else {
resolve();
}
})
});
console.log(promises)
Promise.all(promises)
.then(function() {
console.log(csvLists.length, 'length');
})
.catch(console.error);
}
start();
The "here" is printed once (not 8 times as the arrays length is 8), but there are 8 promises created. The lower part where I am printing the length of the array is not executed.
Can anyone tell me what I am doing wrong? Am I using Promises and forEach falsely as I have to do an await inside the forEach?
Note: getCSVList() and getCsv() are functions to get Csvs from an sftp server:
function getCSVList() {
return new Promise((resolve, reject) => {
conn.on('ready', function () {
conn.sftp(function (err, sftp) {
if (err) throw err;
sftp.readdir(config.development.pathToFile, function (err, list) {
if(err) {
console.log(err);
conn.end();
reject(err);
} else {
console.log('resolved');
conn.end();
resolve(list);
}
})
})
}).connect({
host: config.development.host,
port: config.development.port, // Normal is 22 port
username: config.development.username,
password: config.development.password
// You can use a key file too, read the ssh2 documentation
});
})
}
function getCsv(filename) {
return new Promise((resolve, reject) => {
conn.on('ready', function () {
conn.sftp(function (err, sftp) {
if (err) reject(err);
let csvFile = sftp.createReadStream(`${config.development.pathToFile}/${filename}`, options);
// console.log(csvFile);
conn.end();
resolve(csvFile);
})
}).connect({
host: config.development.host,
port: config.development.port, // Normal is 22 port
username: config.development.username,
password: config.development.password
// You can use a key file too, read the ssh2 documentation
});
});
}
The output in my console from all the console logs is:
`➜ node server.js
resolved
[ Promise { <pending> },
Promise { <pending> },
Promise { <pending> },
Promise { <pending> },
Promise { <pending> },
Promise { <pending> },
Promise { <pending> },
Promise { <pending> } ]
here`
Break up your problem into pieces, confirming they work along the way.
You are not using the stream correctly, among other things.
I made a working example with ssh2-sftp-client so you can maybe use it as a starting point.
Working example :
var fs = require('fs'); var _ = require('underscore');
var SFTPClient = require('ssh2-sftp-client');
const CONFIG = {
"SSH_CONN_OPTS":{"host":"XXXXXXXX","port":22,"username":"XXXXXXXX","password":"XXXXXXXX"},
"CSV_DIRECTORY":"/var/www/html"
}
//---------------
//.:The order-logic of the script is here
function StartScript(){
console.log("[i] SSH Connection")
LoadValidationFile(()=>{
InitializeSFTP(()=>{ console.log("[+] SSH Connection Established")
ListRemoteDirectory((list)=>{ console.log(`[i] Total Files # ${CONFIG.CSV_DIRECTORY} : ${list.length}`)
//console.log(list) //:now you have a 'list' of file_objects, you can iterate over to check the filename
var csvFileList = [] //store the names of the files you will request after
_.each(list,(list_entry)=>{ console.log(list_entry)
if(!CONFIG.USED_FILES.includes(list_entry.name)){ csvFileList.push(list_entry.name) }
})
//:now loop over the new final list of files you have just validated for future fetch
GenerateFinalOutput(csvFileList)
})
})
})
}
//.:Loads your validation file
function LoadValidationFile(cb){
fs.readFile(__dirname+'/temp.json','utf8',(err,data)=>{ if(err){throw err}else{
var content = JSON.parse(data)
CONFIG.USED_FILES = content.usedFiles
cb()
}})
}
//.:Connects to remote server using CONFIG.SSH_CONN_OPTS
function InitializeSFTP(cb){
global.SFTP = new SFTPClient();
SFTP.connect(CONFIG.SSH_CONN_OPTS)
.then(()=>{cb()})
.catch((err)=>{console.log("[!] InitializeSFTP :",err)})
}
//.:Get a list of files from a remote directory
function ListRemoteDirectory(cb){
SFTP.list(`${CONFIG.CSV_DIRECTORY}`)
.then((list)=>{cb(list)})
.catch((err)=>{console.log("[!] ListRemoteDirectory :",err)})
}
//.:Get target file from remote directory
function GetRemoteFile(filename,cb){
SFTP.get(`${CONFIG.CSV_DIRECTORY}/${filename}`)
.then((data)=>{cb(data.toString("utf8"))}) //convert it to a parsable string
.catch((err)=>{console.log("[!] ListRemoteDirectory :",err)})
}
//-------------------------------------------
var csvLists = []
function GenerateFinalOutput(csv_files,current_index){ if(!current_index){current_index=0}
if(current_index!=csv_files.length){ //:loop
var csv_file = csv_files[current_index]
console.log(`[i] Loop Step #${current_index+1}/${csv_files.length} : ${csv_file}`)
GetRemoteFile(csv_file,(csv_data)=>{
if(csv_data){csvLists.push(csv_data)}
current_index++
GenerateFinalOutput(csv_files,current_index)
})
}else{ //:completed
console.log("[i] Loop Completed")
console.log(csvLists)
}
}
//------------
StartScript()
Good luck!
Promise.all is a method that will return a promise object, but you are not waiting for your start method to execute.
function getCSVList() {
return new Promise((resolve, reject) => {
setTimeout(() => {
resolve([1, 2, 3, 4]);
}, 1000);
});
}
function getCsv(params) {
return new Promise((resolve, reject) => {
setTimeout(() => {
resolve(params);
}, 1000);
});
}
async function start() {
const list = await getCSVList();
const promises = list.map(item => {
return new Promise(async function (resolve, reject) {
const csvFile = await getCsv(item);
console.log('here');
resolve(csvFile);
});
});
return Promise.all(promises);
}
start().then(res => {
console.log(res);
});

await for indexdb event in async function

I'm trying to return a custom object from a async function that works as wrapper for a put using indexdb.
Using Promises this is easy.
However, using async/await became more challenging...
const set = async (storeName, key, value) => {
if (!db)
throw new Error("no db!");
try {
const result = {};
let tx = db.transaction(storeName, "readwrite");
let store = tx.objectStore(storeName);
let r = store.put({ data: key, value: value });
console.log(r);
r.onsuccess = async () => {
console.log('onsuccess');
result.something = true;
}
r.onerror = async () => {
console.log('onerror');
result.something = false;
}
await r.transaction.complete; // ok... this don't work
// how can I await until onsuccess or onerror runs?
return result;
} catch (error) {
console.log(error);
}
}
The ideia is to return a composed object... however all my attemps fails as onsuccess runs after returning the result.
I googled a lot and could't find a way to proper await for onsuccess/onerror events.
I know that returning a Promise is more easy as resolve(result) would end returning what I want... but i'm trying to learn to make same code using async/await.
Thank you so much,
Try this:
function set(db, storeName, key, value) {
return new Promise((resolve, reject) => {
let result;
const tx = db.transaction(storeName, 'readwrite');
tx.oncomplete = _ => resolve(result);
tx.onerror = event => reject(event.target.error);
const store = tx.objectStore(storeName);
const request = store.put({data: key, value: value});
request.onsuccess = _ => result = request.result;
});
}
async function callIt() {
const db = ...;
const result = await set(db, storeName, key, value);
console.log(result);
}
Edit, since you insist on using the async qualifier for the set function, you can do this instead. Please note I find this pretty silly:
async function set(db, storeName, key, value) {
// Wrap the code that uses indexedDB in a promise because that is
// the only way to use indexedDB together with promises and
// async/await syntax. Note this syntax is much less preferred than
// using the promise-returning function pattern I used in the previous
// section of this answer.
const promise = new Promise((resolve, reject) => {
let result;
const tx = db.transaction(storeName, 'readwrite');
tx.oncomplete = _ => resolve(result);
tx.onerror = event => reject(event.target.error);
const store = tx.objectStore(storeName);
const request = store.put({data: key, value: value});
request.onsuccess = _ => result = request.result;
});
// We have executed the promise, but have not awaited it yet. So now we
// await it. We can use try/catch here too, if we want, because the
// await will translate the promise rejection into an exception. Of course,
// this is also rather silly because we are doing the same thing as just
// allowing an uncaught exception to exit the function early.
let result;
try {
result = await promise;
} catch(error) {
console.log(error);
return;
}
// Now do something with the result
console.debug('The result is', result);
}
Ultimately you'll end up wrapping IDB in a promise-friend library, but for your specific need, you could use something like this:
function promiseForTransaction(tx) {
return new Promise((resolve, reject) => {
tx.oncomplete = e => resolve();
tx.onabort = e => reject(tx.error);
});
}
And then in your code you can write things such as:
await promiseForTransaction(r.tx);
... which will wait until the transaction completes, and throw an exception if it aborts. (Note that this requires calling the helper
before the transaction could possibly have completed/aborted, since
it won't ever resolve if the events have already fired)
I can't confirm it right now but I think it should be await tx.complete instead of await r.transaction.complete;.
But a general solution that would work even if the API would not support Promises directly would be to wrap a new Promise around the onsuccess and onerror and use await to wait for that Promise to resolve, and in your onsuccess and onerror you then call the resolve function:
const set = async (storeName, key, value) => {
if (!db)
throw new Error("no db!");
try {
const result = {};
let tx = db.transaction(storeName, "readwrite");
let store = tx.objectStore(storeName);
let r = store.put({
data: key,
value: value
});
console.log(r);
await new Promise((resolve, reject) => {
r.onsuccess = () => {
console.log('onsuccess');
result.something = true;
resolve()
}
r.onerror = () => {
console.log('onerror');
result.something = false;
// I assume you want to resolve the promise even if you get an error
resolve()
}
})
return result;
} catch (error) {
console.log(error);
}
}
I would furhter change it to:
try {
await new Promise((resolve, reject) => {
r.onsuccess = resolve
r.onerror = reject
})
console.log('success');
result.something = true;
} catch(err) {
console.log('error');
result.something = false;
}

waiting for many async functions execution

I have the promise function that execute async function in the loop few times for different data. I want to wait till all async functions will be executed and then resolve(), (or call callback function in non-promise function):
var readFiles = ()=>{
return new Promise((resolve,reject)=>{
var iterator = 0;
var contents = {};
for(let i in this.files){
iterator++;
let p = path.resolve(this.componentPath,this.files[i]);
fs.readFile(p,{encoding:'utf8'},(err,data)=>{
if(err){
reject(`Could not read ${this.files[i]} file.`);
} else {
contents[this.files[i]] = data;
iterator--;
if(!iterator) resolve(contents);
}
});
}
if(!iterator) resolve(contents); //in case of !this.files.length
});
};
I increase iterator on every loop repetition, then, in async function's callback decrease iterator and check if all async functions are done (iterator===0), if so - call resolve().
It works great, but seems not elegant and readable. Do you know any better way for this issue?
Following up the comment with some code and more detail!
Promise.all() takes an iterator, and waits for all promises to either resolve or reject. It will then return the results of all the promises. So instead of keeping track of when all promises resolve, we can create little promises and add them to an array. Then, use Promise.all() to wait for all of them to resolve.
const readFiles = () => {
const promises = [];
for(let i in files) {
const p = path.resolve(componentPath, files[i]);
promises.push(new Promise((resolve, reject) => {
fs.readFile(p, {encoding:'utf8'}, (err, data) => {
if(err) {
reject(`Could not read ${files[i]} file.`);
} else {
resolve(data);
}
});
}));
}
return Promise.all(promises);
};
const fileContents = readFiles().then(contents => {
console.log(contents)
})
.catch(err => console.error(err));
You only need push all the Promises into an array to then pass it as argument to Promise.all(arrayOfPromises)
try something like this:
var readFiles = () => {
var promises = [];
let contents = {};
var keys_files = Object.keys(this.files);
if (keys_files.length <= 0) {
var promise = new Promise((resolve, reject) => {
resolve(contents);
});
promises.push(promise);
}
keys_files.forEach((key) => {
var file = this.files[key];
var promise = new Promise((resolve, reject) => {
const currentPath = path.resolve(this.componentPath, file);
fs.readFile(p,{encoding:'utf8'},(err, data) => {
if (err) {
return reject(`Could not read ${file} file.`);
}
contents[file] = data;
resolve(contents)
});
});
});
return Promises.all(promises);
}
Then you should use the function like so:
// this will return a promise that contains an array of promises
var readAllFiles = readFiles();
// the then block only will execute if all promises were resolved if one of them were reject so all the process was rejected automatically
readAllFiles.then((promises) => {
promises.forEach((respond) => {
console.log(respond);
});
}).catch((error) => error);
If you don't care if one of the promises was rejected, maybe you should do the following
var readFiles = () => {
var promises = [];
let contents = {};
var keys_files = Object.keys(this.files);
if (keys_files.length <= 0) {
var promise = new Promise((resolve, reject) => {
resolve(contents);
});
promises.push(promise);
}
keys_files.forEach((key) => {
var file = this.files[key];
var promise = new Promise((resolve, reject) => {
const currentPath = path.resolve(this.componentPath, file);
fs.readFile(p,{encoding:'utf8'},(err, data) => {
// create an object with the information
let info = { completed: true };
if (err) {
info.completed = false;
info.error = err;
return resolve(info);
}
info.data = data;
contents[file] = info;
resolve(contents)
});
});
});
return Promises.all(promises);
}
Copied from comments:
Also - you might want to use fs-extra, a drop-in replacement for fs, but with promise support added.
Here's how that goes:
const fs = require('fs-extra');
var readFiles = ()=>{
let promises = files
.map(file => path.resolve(componentPath, file))
.map(path => fs.readFile(path));
return Promise.all(promises);
});
Nice and clean. You can then get contents like this:
readFiles()
.then(contents => { ... })
.catch(error => { ... });
This will fail on first error though (because that's what Promise.all does). If you want individual error handling, you can add another map line:
.map(promise => promise.catch(err => err));
Then you can filter the results:
let errors = contents.filter(content => content instanceof Error)
let successes = contents.filter(content => !(content instanceof Error))

Look for Promise bluebird code review for node.js

When and where need to use new Promise(Function<Function resolve, Function reject> resolver) -> Promise
My Sample code:
userInfo.js
var Promise = require('bluebird');
var winston = require('winston');
var _ = require('lodash');
var request = Promise.promisify(require("request"));
exports.getWeather = function (data) {
var cityName = data.userProfile.city;
return request("http://0.0.0.0:3003/api/Weather/byCity?city=" + cityName).spread(function (res, body) {
var result = JSON.parse(body).data;
return _.merge(data, result);
});
};
exports.getUserProfile = function (userId) {
return new Promise(function (resolve, reject) {
request("http://0.0.0.0:3003/api/UserProfile/getUserProfile?id=" + userId).spread(function (res, body) {
var result = JSON.parse(body).data;
resolve(result);
});
})
};
exports.getEvents = function (data) {
var cityName = data.userProfile.city;
return request("http://0.0.0.0:3003/api/Events/byCity?city=" + cityName).spread(function (res, body) {
var result = JSON.parse(body).data;
return _.merge(data, result);
});
};
exports.getFashion = function (data) {
var gender = data.userProfile.gender;
return request("http://0.0.0.0:3003/api/Fashion/byGender?gender=" + gender).spread(function (res, body) {
var result = JSON.parse(body).data;
return _.merge(data, result);
});
};
exports.displayDetail = function (data) {
console.log(data);
};
Above code I try call in 2 way in promise
getUserProfile.js
var userInfo = require('./userInfo');
module.exports = function(){
return userInfo.getUserProfile(3)
.then(userInfo.getFashion)
.then(userInfo.getEvents)
.then(userInfo.getWeather)
.then(userInfo.displayDetail)
.catch(function (e) {
console.log('Error:');
console.error(e.stack)
})
.finally(function () {
console.log('done');
});
}
2nd way:
getUserInformation.js
var userInfo = require('./userInfo');
module.exports = function () {
return new Promise(function (resolve, reject) {
resolve(3);
})
.then(userInfo.getUserProfile)
.then(userInfo.getFashion)
.then(userInfo.getEvents)
.then(userInfo.getWeather)
.then(userInfo.displayDetail)
.catch(function (e) {
console.log('Error:');
console.error(e.stack)
})
.finally(function () {
console.log('done');
});
};
getDetails.js
var userInfo = require('./getUserInformation');
userInfo()
.then(function(){
console.log('getDetails done')
})
.catch(function (e) {
console.log('Error:');
console.error(e.stack)
})
.finally(function () {
console.log('done');
});
please let me know what the difference and is there any issues by using these way?
exports.getUserProfile = function (userId) {
return new Promise(function (resolve, reject) {
request("http://0.0.0.0:3003/api/UserProfile/getUserProfile?id=" + userId).spread(function (res, body) {
var result = JSON.parse(body).data;
resolve(result);
});
})
};
Please don't do this. Just return from the callback, and return the promise created by then, like you have done it in your other three methods.
return userInfo.getUserProfile(3)
.then(…)
vs.
return new Promise(function (resolve, reject) {
resolve(3);
})
.then(userInfo.getUserProfile)
.then(…)
Well, the first one is much more readable and concise. They're pretty much equivalent except for the case that getUserProfile does throw synchronously, which it shouldn't anyway. Also in the first case getUserProfile is invoked as a method on userInfo, while in the second case it's just a callback function, the this in the calls will be different.
The second pattern can be tremendously simplified though by using Promise.resolve instead of the new Promise constructor:
return Promise.resolve(3)
.then(userInfo.getUserProfile)
.then(…)
This is totally fine, and aligns better with the rest of the chain. Speaking of which, …
.then(userInfo.getFashion)
.then(userInfo.getEvents)
.then(userInfo.getWeather)
where each of the functions returns a promise that resolves with
additional data merged into its argument
is not exactly the best way to solve this. Yes, it ensures that these three functions are called after each other, and is an acceptable pattern for that case. However, in your case you're mixing the request calls to the API with that argument-extraction and result-merging in the same function; which by the separation of concerns you shouldn't. Rather make the functions pure
exports.… = function (arg) {
return request("http://0.0.0.0:3003/api/…?…=" + arg).spread(function (res, body) {
return JSON.parse(body).data;
});
};
And now you can combine them separately - and not only in sequence, but also in parallel:
userInfo.getUserProfile(3)
.then(function(data) {
var p = data.userProfile;
return Promise.prop({
userProfile: 0,
fashion: userInfo.getFashion(p.gender), // `\
events: userInfo.getEvents(p.city), // }=> execute requests in parallel
weather: userInfo.getWeather(p.city) // ./
});
})
.then(userInfo.displayDetail)
.catch(function (e) {
console.error('Error:', e.stack)
});
The first way is much more readable, and there's no benefit to starting the chain with a promise that returns a constant as in your second way.
They both do effectively the same thing, with one caveat: In your second example (Starting the chain with a Promise), the getUserProfile call will be run on the next tick (Similar to if you'd thrown it in a setTimeout 0) rather than atomically.

ES2015 equivalent of $.Deferred()

I'm using Babel for a project, and I'm stuck with a very basic problem. I'm very used to jQuery's Deferred objects and I'm struggling to find its ES2015 equivalent, here is what I basically want:
// file1.js
let dfd = new Promise()
function functionCalledAtSomePoint(thing) {
dfd.resolve(thing)
}
export default { dfd }
// file2.js
import { dfd } from './file1'
dfd.then((thing) => {
console.log('Yay thing:', thing)
})
What should be the correct way to write this simple deferred?
EDIT with royhowie's answer:
// file1.js
let thing
function getThing(_thing) {
return new Promise((resolve) => {
if (el) {
thing = new Thing(el)
}
resolve(thing)
})
}
function functionCalledAtSomePoint(el) {
getThing(el)
}
export default { getThing }
// file2.js
import { getThing } from './file1'
getThing.then((thing) => {
console.log('Yay thing:', thing)
})
You can export the promise directly (instead of a function)—like you have—but then you'll only be able to use it (.then) once, which is probably not what you want.
Instead, you should export a function which returns a Promise:
file 1.js
import User from '../models/user'
export function getUsersFromDatabase () {
return new Promise((resolve, reject) => {
User.find({}, (err, users) => {
return err ? reject(err) : resolve(users)
})
})
}
file2.js
import { getUsersFromDatabase } from './file1'
getUsersFromDatabase().then((users) => {
// success
}).catch((err) => {
// no users
})
You can use the default Promise implementation, but it much slower than 3rd party modules, e.g., bluebird (which I very much recommend using).
I'm very used to jQuery's Deferred objects and I'm struggling to find its ES2015 equivalent
If you must use deferred, this should work
function makeDeferred() {
var res, rej;
let dfd = new Promise(function(resolve, reject) {
res = resolve;
rej = reject;
});
dfd.resolve = res;
dfd.reject = rej;
return dfd;
}
let dfd = makeDeferred();
However, rewriting your code to avoid such kludge would be preferable (but not unavoidable - I still have one piece of code I can't get rid of the deferred promise in, so I feel your pain
This class will allow you to use the regular Promise methods as well as an additional resolve(value) method. This should give you a similar functionality as jQuery.deferred().
function DeferredPromise() {
var _resolve = null;
var _reject = null;
this.promise = new Promise(function(resolve, reject) {
_resolve = resolve;
_reject = reject;
});
this.then = function() {
return this.promise.then(...arguments);
}
this.catch = function() {
return this.promise.catch(...arguments);
}
this.resolve = function() {
_resolve(...arguments);
}
this.reject = function() {
_reject(...arguments);
}
}
Then you can use it to create a new DeferredPromise:
var p = new DeferredPromise();
Wait for it:
p.then(val => {
console.log('val(1)', val);
})
Maybe wait for it another time, you can also chain it with a regular Promise:
p.then(val => {
console.log('val(2)', val);
return 42;
}).then(val => {
console.log('.then(somethingElse)', val);
})
.catch(err => { console.error('err', err); })
And resolve it whenever you want:
p.resolve({ username: 'Luke.Skywalker', age: 42 });

Categories

Resources