Using Javascript async & await with google.script.url.getLocation - javascript

I am trying to refactor some ugly code in a Google Apps Script web application so that it uses async / await.
It uses the google.script.url.getLocation client-side, to pull URL parameters and then send them off to other async functions.
There must be a way to do this elegantly.
var doSomeAsyncShit =()=> {
google.script.url.getLocation(function (location) {
var rid = (location.parameter.rid) ? location.parameter.rid : defaultReportID;
var uid = (location.parameter.uid) ? location.parameter.uid : defaultUserID;
console.log (((location.parameter.rid) ? "Report #" : "Default Report ID #")+rid);
console.log (((location.parameter.uid) ? "User #" : "Default User ID #" )+uid);
google.script.run.withSuccessHandler(paintReport).returnJSON(rid);
google.script.run.withSuccessHandler(getMyReportsList).listMyReports(uid);
});
}
$(function () {
doSomeAsyncShit();
}

It is possible to intercept requests to google api and directly return Promise using Proxy.
Script:
/**
* Revives old client facing google api in apps script web applications
* Directly returns promises for `google.scipt.run` and `google.script.url.getLocation`
* #see https://stackoverflow.com/a/63537867/
*/
(function projectAdrenaline_google() {
const lifeline = {
funcList: [],
excludeList: [
'withSuccessHandler',
'withFailureHandler',
'withUserObject',
'withLogger',
],
get: function(target, prop, rec) {
if (this.excludeList.includes(prop))
//return (...rest) => new Proxy(Reflect.apply(target[prop], target, rest), trap);
throw new TypeError(
`${prop}: This method is deprecated in this custom api`
);
if (this.funcList.includes(prop))
return (...rest) =>
new Promise((res, rej) =>
target
.withSuccessHandler(res)
.withFailureHandler(rej)
[prop](...rest)
);
switch (prop) {
case 'run':
this.funcList = Object.keys(target.run);
break;
case 'getLocation':
return () => new Promise(res => target[prop](res));
}
return new Proxy(Reflect.get(target, prop, rec), lifeline);
},
};
//const superGoogle = new Proxy(google, trap);
//OR overwrite currently loaded google object:
google = new Proxy(google, lifeline);
})();
Example:
const doSomeAsyncStuff = async () => {
const location = await google.script.url.getLocation();
const rid = location.parameter.rid ? location.parameter.rid : defaultReportID;
const uid = location.parameter.uid ? location.parameter.uid : defaultUserID;
//promise
google.script.run.returnJSON(rid).then(paintReport);
//async-await
const reportsList = await google.script.run.listMyReports(uid);
getMyReportsList(reportsList);
};
Alternatively, It is possible to use functions as syntactic sugars. But this requires learning new syntax definitions:
/**
* Syntactic sugar around old callback api returning a promise
*
* #returns {promise} Promise of call from server
* #param {string[]|string} propertyAccesors Array of properties to access
* #param {object[][]} methodAccesors Array of [method_to_access,arguments[]]
* #param {number[]} resRejIdxs 2 Indexes of methodAccesors corresponding to resolve/success and rejection/failure. If omitted promise is resolved immediately.
*/
const GS = (propertyAccesors, methodAccesors, resRejIdxs) =>
new Promise((res, rej) => {
//Boilerplate for type correction
const nestArray = e => (Array.isArray(e) ? e : [e]);
propertyAccesors = nestArray(propertyAccesors);
methodAccesors = nestArray(methodAccesors);
methodAccesors[0] = nestArray(methodAccesors[0]);
if (typeof resRejIdxs !== 'undefined') {
resRejIdxs = Array.isArray(resRejIdxs) ? resRejIdxs : [resRejIdxs];
resRejIdxs[0] && (methodAccesors[resRejIdxs[0]][1] = res);
resRejIdxs[1] && (methodAccesors[resRejIdxs[1]][1] = rej);
} else {
res('Done');
}
//Access properties and call methods
methodAccesors.reduce(
(acc, [method, methodArg]) =>
Array.isArray(methodArg)
? acc[method](...methodArg)
: acc[method](methodArg),
propertyAccesors.reduce(
(acc, currentProp) => acc[currentProp],
google.script
)
);
});
//EXAMPLES:
GS(
'run',
[
['withSuccessHandler', null],
['callServer', [5, 4]], //call server function `callServer` with 2 arguments 5 and 4
['withFailureHandler', null],
],
[0, 2] //0 is withSuccessHandler and 2 is withFailureHandler
).then(alert);
GS('history', [['setChangeHandler', e => console.log(e.location.hash)]]);
GS('url', 'getLocation', 0).then(location => console.log(location.hash));
GS(['host', 'editor'], 'focus');
GS('host', ['setHeight', 50]);

Since a Promise can be constructed with a custom executor function, you can wrap the google.script.url into it and resolve or reject whenever you like. If you then make it a utility function, use await to wait for it to resolve.
Below is a small flexible utility for making google.script.url async-friendly:
/**
* #typedef {{
* hash : string,
* parameter : Object.<string, string>,
* parameters : Object.<string, string[]>
* }} UrlLocationObject
*
* #typedef {{
* callback : function (UrlLocationObject, ...any) : any,
* params : any[]
* }} AsyncUrlOptions
*
* #summary Promise-friendly google.script.url
* #param {AsyncUrlOptions}
* #returns {Promise}
*/
const asyncLocation = ({
callback,
params = [],
}) => {
return new Promise((res, rej) => {
google.script.url.getLocation((loc) => {
try {
const result = callback(loc, ...params);
res(result);
}
catch(error) {
rej(error);
}
});
});
};
Same goes for google.script.run:
/**
* #typedef {{
* funcName : string,
* onFailure : function,
* onSuccess : function,
* params : array
* }} AsyncOptions
*
* #summary v2 of async-friendly google.script.run
* #param {AsyncOptions}
* #returns {Promise}
*/
const asyncGAPIv2 = ({
funcName,
onFailure = console.error,
onSuccess,
params = []
}) => {
return new Promise((res, rej) => {
google.script.run
.withSuccessHandler(data => {
typeof onSuccess === "function" && onSuccess(data);
res(data);
})
.withFailureHandler(error => {
typeof onFailure === "function" && onFailure(error);
rej(error);
})
[funcName].apply(null, params);
});
};

Something like this isn't too bad:
var doSomeAsyncShit = async () => {
let location = await new promise(resolve => google.script.url.getLocation(resolve))
// do stuff with location
}
(async () => {
await doSomeAsyncShit();
// do something after
})()
Even still, you've added complexity, reduced readability and for no good reasom IMHO

Related

Why my bcrypt.compare() function returns false?

I'm trying to implement a password validation system to accept a user's "update password" request.
I'm using Strapi as my backend and Next.js as my frontend.
Strapi says it uses "bcrypt with autogenerated salt and hash" (mentioned here)
so the stored password is encrypted.
I use MongoDB for my database.
The user is registered, I use it for all my tests so I know the password is correct.
Question: What am I doing wrong ?
I am very new to this.
Thank you.
Comparison code in my controller file :
I did this but it always return false (backend):
const comparePass = await bcrypt.compare(oldPassword, user.password);
if(comparePass){
ctx.throw(200, 'valid password !')
}else{
ctx.throw(400, 'wrong password !')
}
When I debbug I can see in my console that:
oldPassword = 'maddie'
user.password ='$2a$10$tEXnOI.OASIeL0BsG2Go/ecHjXn38xNnM9HVHJlPnBFpqSRe6Yyf6'
which is correct
Hashing code- In my backend, this code below used to hash the password when the user register:
hashPassword(user = {}) {
return new Promise(resolve => {
if (!user.password || this.isHashed(user.password)) {
resolve(null);
} else {
bcrypt.hash(`${user.password}`, 10, (err, hash) => {
resolve(hash);
});
}
});
},
Complete code in my User.js service including the hashing code:
'use strict';
/**
* User.js service
*
* #description: A set of functions similar to controller's actions to avoid code duplication.
*/
const bcrypt = require('bcryptjs');
const crypto = require('crypto');
const { sanitizeEntity, getAbsoluteServerUrl } = require('strapi-utils');
module.exports = {
/**
* Promise to count users
*
* #return {Promise}
*/
count(params) {
return strapi.query('user', 'users-permissions').count(params);
},
/**
* Promise to search count users
*
* #return {Promise}
*/
countSearch(params) {
return strapi.query('user', 'users-permissions').countSearch(params);
},
/**
* Promise to add a/an user.
* #return {Promise}
*/
async add(values) {
if (values.password) {
values.password = await strapi.plugins['users-permissions'].services.user.hashPassword(
values
);
}
return strapi.query('user', 'users-permissions').create(values);
},
/**
* Promise to edit a/an user.
* #return {Promise}
*/
async edit(params, values) {
if (values.password) {
values.password = await strapi.plugins['users-permissions'].services.user.hashPassword(
values
);
}
return strapi.query('user', 'users-permissions').update(params, values);
},
/**
* Promise to fetch a/an user.
* #return {Promise}
*/
fetch(params, populate) {
return strapi.query('user', 'users-permissions').findOne(params, populate);
},
/**
* Promise to fetch authenticated user.
* #return {Promise}
*/
fetchAuthenticatedUser(id) {
return strapi.query('user', 'users-permissions').findOne({ id }, ['role']);
},
/**
* Promise to fetch all users.
* #return {Promise}
*/
fetchAll(params, populate) {
return strapi.query('user', 'users-permissions').find(params, populate);
},
hashPassword(user = {}) {
return new Promise(resolve => {
if (!user.password || this.isHashed(user.password)) {
resolve(null);
} else {
bcrypt.hash(`${user.password}`, 10, (err, hash) => {
resolve(hash);
});
}
});
},
isHashed(password) {
if (typeof password !== 'string' || !password) {
return false;
}
return password.split('$').length === 4;
},
/**
* Promise to remove a/an user.
* #return {Promise}
*/
async remove(params) {
return strapi.query('user', 'users-permissions').delete(params);
},
async removeAll(params) {
return strapi.query('user', 'users-permissions').delete(params);
},
validatePassword(password, hash) {
return bcrypt.compare(password, hash);
},

How do you write a chain of SQL queries and updates based on the query data in SQLite without callback hell?

In Java land I would do something like
#Transactional
FormData update(FormData updatedFormData) {
var result = dsl
.query(
"select id, formData from formStuff where formId = ?",
updatedFormData.formId
);
var result2 = dsl
.query(
"select reference from referenceStuff where formStuffId = ?",
result.get("id")
);
var mergedFormData = merge(
result.get("formData"),
result2.get("reference"),
updatedFormData
);
var updateResult = dsl
.executeUpdate(
"update formStuff set formData = ? where id = ?",
mergedFormData,
result.get("id")
);
return mergedFormData;
}
I am trying to do something similar on Expo SQLite but it started to appear like callback hell
async function update(db, updatedFormData) {
return
new Promise((resolve, reject) => {
db.transaction(
(tx) => {
tx.executeSql(
"select id, formData from formStuff where formId = ?",
[updatedFormData.formId],
(tx1, resultSet1) => {
tx1.executeSql(
"select reference from referenceStuff where formStuffId = ?",
[resultSet1.rows.item(0).id],
(tx2, resultSet2) => {
const mergedFormData = merge(
resultSet1.rows.item(0).formData,
resultSet2.rows.item(0).reference,
updatedFormData
);
tx2.executeSql(
"update formStuff set formData = ? where id = ?",
[mergedFormData, resultSet1.rows.item(0).id],
(tx3) => {
resolve(mergedFormData)
},
(tx3, error) => {
console.log(error);
reject(error);
return true;
}
)
}
(tx2, error) => {
console.log(error);
reject(error);
return true;
}
)
},
(tx1, error) => {
console.log(error);
reject(error);
return true;
}
);
},
(tx, error) => {
console.error(error);
reject(error);
},
() => {
resolve();
}
);
Wrap each call to executeSql in its own promise.
Generally it is better to then wrap each promise in its own function (which you can give a sensible name and arguments).
Then await the return value of each function in turn (which lets you assign the resolved value to a variable and pass it to the next function).
Maybe you can chain queries using recursion on success/error, something like:
function executeBatch(queries, ready) {
if (queries.length === 0) {
console.log('all done!');
ready();
return;
}
const queryId = `q${queries.length}`;
const query = queries.shift();
console.log(`starting ${query}`);
console.time(queryId);
const continueExecution = () => {
console.timeEnd(queryId);
executeBatch(queries);
};
db.transaction(tx =>
tx.executeSql(
query,
[],
() => {
console.log('ok');
continueExecution();
},
() => {
console.error('fail');
continueExecution();
}
)
);
}
executeBatch(['query1','query2',...], doneCallback);
UPDATE this does not work due to https://github.com/nolanlawson/node-websql/issues/46
I just did a quick hack of a module to do this for me. Likely there are better ways of doing this with extending classes and what not (plus I am limited to JavaScript though I use VSCode's TS check with JSDoc)
// #ts-check
/**
* This module provides an async/await interface to Expo SQLite. For now this provides a functional interface rather than a class based interface.
* #module
*/
/**
* #typedef {import("expo-sqlite").SQLTransaction} SQLTransaction
* #typedef {import("expo-sqlite").SQLError} SQLError
* #typedef {import("expo-sqlite").SQLResultSet} SQLResultSet
* #typedef {(tx: SQLTransaction)=>Promise<any>} AsyncTransactionCallback
*/
import * as SQLite from "expo-sqlite";
/**
*
* #param {string} name
* #param {string} [version]
* #returns {Promise<SQLite.WebSQLDatabase>}
*/
export async function openDatabaseAsync(name, version) {
return new Promise((resolve) => {
SQLite.openDatabase(name, version, "", 0, (db) => {
resolve(db);
});
});
}
/**
*
* #param {SQLTransaction} tx transaction
* #param {string} sqlStatement
* #param {any[]} [args]
* #return {Promise<SQLResultSet>}
*/
export async function executeSqlAsync(tx, sqlStatement, args = []) {
return new Promise((resolve, reject) => {
tx.executeSql(
sqlStatement,
args,
(txObj, resultSet) => {
resolve(resultSet);
},
(error) => {
console.log(error);
reject(error);
return true;
}
);
});
}
/**
*
* #param {SQLite.WebSQLDatabase} db
* #return {(fn: AsyncTransactionCallback)=>Promise<any>}
*/
export function txn(db) {
return async (f) => {
new Promise((resolve, reject) => {
db.transaction(
(tx) => {
f(tx)
.then((result) => resolve(result))
.catch(reject);
},
/**
*
* #param {SQLError} error error
*/
(error) => {
reject(error);
},
() => {
resolve();
}
);
});
};
}
For my scenario it is used like this
async function update(db, updatedFormData) {
return await txn(db)(async (tx) => {
// there's probably a less retarded way of writing this using bind or something
const resultSet1 = await executeSqlAsync(tx,
"select id, formData from formStuff where formId = ?",
[updatedFormData.formId]);
const resultSet2 = await executeSqlAsync(tx,
"select reference from referenceStuff where formStuffId = ?",
[resultSet1.rows.item(0).id]);
const mergedFormData = merge(
resultSet1.rows.item(0).formData,
resultSet2.rows.item(0).reference,
updatedFormData
);
await executeSqlAsync(tx,
"update formStuff set formData = ? where id = ?",
[mergedFormData, resultSet1.rows.item(0).id],
);
return mergedFormData;
});
};
Maybe I'll figure out how to tweak it so it looks like this in the future, but for now what I have does what I need.
async function update(db: AsyncSQLiteDatabase, updatedFormData: FormData) {
return await db.asyncTransaction<FormData>(async (tx) => {
// there's probably a less retarded way of writing this using bind or something
const resultSet1 = await tx.executeSqlAsync(
"select id, formData from formStuff where formId = ?",
[updatedFormData.formId]);
const resultSet2 = await tx.executeSqlAsync(
"select reference from referenceStuff where formStuffId = ?",
[resultSet1.rows.item(0).id]);
const mergedFormData = merge(
resultSet1.rows.item(0).formData,
resultSet2.rows.item(0).reference,
updatedFormData
);
await tx.executeSqlAsync(
"update formStuff set formData = ? where id = ?",
[mergedFormData, resultSet1.rows.item(0).id],
);
return mergedFormData;
});
};
Use async/await to make things feel synchronous. Assuming your sqlite lib is promise ready.
async function update(db, updatedFormData) {
const tx = await db.transaction();
try {
const resultSet1 = await tx.executeSql(
"select id, formData from formStuff where formId = ?",
[updatedFormData.formId]
);
const resultSet2 = await tx.executSql(
"select reference from referenceStuff where formStuffId = ?",
[resultSet1.rows.item(0).id]
);
// ... etc
tx.commit();
}
catch (e) {
tx.rollback();
}
}

ftp directory download triggers maximum call stack exceeded error

I'm currently working on a backup script with NodeJS. The script downloads a directory and its files und subdirectories recursively using FTP/FTPS. I'm using the basic-ftp package to do the FTP calls.
When I try to download a big directory with a lot of subdirectories, I get the Maximum call stack size exceeded error, but I don't find why and where it happens. I don't see any infinity loop or any missing return calls. After hours of debugging, I have no more ideas.
I don't use the downloadDirTo method from basic-ftp, because I don't want to stop downloading after a error happend. When an error occures it should keep going and it should add the error to the log file.
The repository is here: https://github.com/julianpoemp/webspace-backup.
As soon as the FTPManager is ready, I call the doBackup method (see method in BackupManager). This method calls the downloadFolder method defined in FTPManager.
export class BackupManager {
private ftpManager: FtpManager;
constructor() {
osLocale().then((locale) => {
ConsoleOutput.info(`locale is ${locale}`);
moment.locale(locale);
}).catch((error) => {
ConsoleOutput.error(error);
});
this.ftpManager = new FtpManager(AppSettings.settings.backup.root, {
host: AppSettings.settings.server.host,
port: AppSettings.settings.server.port,
user: AppSettings.settings.server.user,
password: AppSettings.settings.server.password,
pasvTimeout: AppSettings.settings.server.pasvTimeout
});
this.ftpManager.afterManagerIsReady().then(() => {
this.doBackup();
}).catch((error) => {
ConsoleOutput.error(error);
});
}
public doBackup() {
let errors = '';
if (fs.existsSync(path.join(AppSettings.appPath, 'errors.log'))) {
fs.unlinkSync(path.join(AppSettings.appPath, 'errors.log'));
}
if (fs.existsSync(path.join(AppSettings.appPath, 'statistics.txt'))) {
fs.unlinkSync(path.join(AppSettings.appPath, 'statistics.txt'));
}
const subscr = this.ftpManager.error.subscribe((message: string) => {
ConsoleOutput.error(`${moment().format('L LTS')}: ${message}`);
const line = `${moment().format('L LTS')}:\t${message}\n`;
errors += line;
fs.appendFile(path.join(AppSettings.appPath, 'errors.log'), line, {
encoding: 'Utf8'
}, () => {
});
});
let name = AppSettings.settings.backup.root.substring(0, AppSettings.settings.backup.root.lastIndexOf('/'));
name = name.substring(name.lastIndexOf('/') + 1);
const downloadPath = (AppSettings.settings.backup.downloadPath === '') ? AppSettings.appPath : AppSettings.settings.backup.downloadPath;
ConsoleOutput.info(`Remote path: ${AppSettings.settings.backup.root}\nDownload path: ${downloadPath}\n`);
this.ftpManager.statistics.started = Date.now();
this.ftpManager.downloadFolder(AppSettings.settings.backup.root, path.join(downloadPath, name)).then(() => {
this.ftpManager.statistics.ended = Date.now();
this.ftpManager.statistics.duration = (this.ftpManager.statistics.ended - this.ftpManager.statistics.started) / 1000 / 60;
ConsoleOutput.success('Backup finished!');
const statistics = `\n-- Statistics: --
Started: ${moment(this.ftpManager.statistics.started).format('L LTS')}
Ended: ${moment(this.ftpManager.statistics.ended).format('L LTS')}
Duration: ${this.ftpManager.getTimeString(this.ftpManager.statistics.duration * 60 * 1000)} (H:m:s)
Folders: ${this.ftpManager.statistics.folders}
Files: ${this.ftpManager.statistics.files}
Errors: ${errors.split('\n').length - 1}`;
ConsoleOutput.log('\n' + statistics);
fs.writeFileSync(path.join(AppSettings.appPath, 'statistics.txt'), statistics, {
encoding: 'utf-8'
});
if (errors !== '') {
ConsoleOutput.error(`There are errors. Please read the errors.log file for further information.`);
}
subscr.unsubscribe();
this.ftpManager.close();
}).catch((error) => {
ConsoleOutput.error(error);
this.ftpManager.close();
});
}
}
import * as ftp from 'basic-ftp';
import {FileInfo} from 'basic-ftp';
import * as Path from 'path';
import * as fs from 'fs';
import {Subject} from 'rxjs';
import {FtpEntry, FTPFolder} from './ftp-entry';
import {ConsoleOutput} from './ConsoleOutput';
import moment = require('moment');
export class FtpManager {
private isReady = false;
private _client: ftp.Client;
private currentDirectory = '';
public readyChange: Subject<boolean>;
public error: Subject<string>;
private connectionOptions: FTPConnectionOptions;
public statistics = {
folders: 0,
files: 0,
started: 0,
ended: 0,
duration: 0
};
private recursives = 0;
constructor(path: string, options: FTPConnectionOptions) {
this._client = new ftp.Client();
this._client.ftp.verbose = false;
this.readyChange = new Subject<boolean>();
this.error = new Subject<string>();
this.currentDirectory = path;
this.connectionOptions = options;
this.connect().then(() => {
this.isReady = true;
this.gotTo(path).then(() => {
this.onReady();
}).catch((error) => {
ConsoleOutput.error('ERROR: ' + error);
this.onConnectionFailed();
});
});
}
private connect(): Promise<void> {
return new Promise<void>((resolve, reject) => {
this._client.access({
host: this.connectionOptions.host,
user: this.connectionOptions.user,
password: this.connectionOptions.password,
secure: true
}).then(() => {
resolve();
}).catch((error) => {
reject(error);
});
});
}
private onReady = () => {
this.isReady = true;
this.readyChange.next(true);
};
private onConnectionFailed() {
this.isReady = false;
this.readyChange.next(false);
}
public close() {
this._client.close();
}
public async gotTo(path: string) {
return new Promise<void>((resolve, reject) => {
if (this.isReady) {
ConsoleOutput.info(`open ${path}`);
this._client.cd(path).then(() => {
this._client.pwd().then((dir) => {
this.currentDirectory = dir;
resolve();
}).catch((error) => {
reject(error);
});
}).catch((error) => {
reject(error);
});
} else {
reject(`FTPManager is not ready. gotTo ${path}`);
}
});
}
public async listEntries(path: string): Promise<FileInfo[]> {
if (this.isReady) {
return this._client.list(path);
} else {
throw new Error('FtpManager is not ready. list entries');
}
}
public afterManagerIsReady(): Promise<void> {
return new Promise<void>((resolve, reject) => {
if (this.isReady) {
resolve();
} else {
this.readyChange.subscribe(() => {
resolve();
},
(error) => {
reject(error);
},
() => {
});
}
});
}
public async downloadFolder(remotePath: string, downloadPath: string) {
this.recursives++;
if (this.recursives % 100 === 99) {
ConsoleOutput.info('WAIT');
await this.wait(0);
}
if (!fs.existsSync(downloadPath)) {
fs.mkdirSync(downloadPath);
}
try {
const list = await this.listEntries(remotePath);
for (const fileInfo of list) {
if (fileInfo.isDirectory) {
const folderPath = remotePath + fileInfo.name + '/';
try {
await this.downloadFolder(folderPath, Path.join(downloadPath, fileInfo.name));
this.statistics.folders++;
ConsoleOutput.success(`${this.getCurrentTimeString()}===> Directory downloaded: ${remotePath}\n`);
} catch (e) {
this.error.next(e);
}
} else if (fileInfo.isFile) {
try {
const filePath = remotePath + fileInfo.name;
if (this.recursives % 100 === 99) {
ConsoleOutput.info('WAIT');
await this.wait(0);
}
await this.downloadFile(filePath, downloadPath, fileInfo);
} catch (e) {
this.error.next(e);
}
}
}
return true;
} catch (e) {
this.error.next(e);
return true;
}
}
public async downloadFile(path: string, downloadPath: string, fileInfo: FileInfo) {
this.recursives++;
if (fs.existsSync(downloadPath)) {
const handler = (info) => {
let procent = Math.round((info.bytes / fileInfo.size) * 10000) / 100;
if (isNaN(procent)) {
procent = 0;
}
let procentStr = '';
if (procent < 10) {
procentStr = '__';
} else if (procent < 100) {
procentStr = '_';
}
procentStr += procent.toFixed(2);
ConsoleOutput.log(`${this.getCurrentTimeString()}---> ${info.type} (${procentStr}%): ${info.name}`);
};
if (this._client.closed) {
try {
await this.connect();
} catch (e) {
throw new Error(e);
}
}
this._client.trackProgress(handler);
try {
await this._client.downloadTo(Path.join(downloadPath, fileInfo.name), path);
this._client.trackProgress(undefined);
this.statistics.files++;
return true;
} catch (e) {
throw new Error(e);
}
} else {
throw new Error('downloadPath does not exist');
}
}
public chmod(path: string, permission: string): Promise<void> {
return new Promise<void>((resolve, reject) => {
this._client.send(`SITE CHMOD ${permission} ${path}`).then(() => {
console.log(`changed chmod of ${path} to ${permission}`);
resolve();
}).catch((error) => {
reject(error);
});
});
}
public getCurrentTimeString(): string {
const duration = Date.now() - this.statistics.started;
return moment().format('L LTS') + ' | Duration: ' + this.getTimeString(duration) + ' ';
}
public getTimeString(timespan: number) {
if (timespan < 0) {
timespan = 0;
}
let result = '';
const minutes: string = this.formatNumber(this.getMinutes(timespan), 2);
const seconds: string = this.formatNumber(this.getSeconds(timespan), 2);
const hours: string = this.formatNumber(this.getHours(timespan), 2);
result += hours + ':' + minutes + ':' + seconds;
return result;
}
private formatNumber = (num, length): string => {
let result = '' + num.toFixed(0);
while (result.length < length) {
result = '0' + result;
}
return result;
};
private getSeconds(timespan: number): number {
return Math.floor(timespan / 1000) % 60;
}
private getMinutes(timespan: number): number {
return Math.floor(timespan / 1000 / 60) % 60;
}
private getHours(timespan: number): number {
return Math.floor(timespan / 1000 / 60 / 60);
}
public async wait(time: number): Promise<void> {
return new Promise<void>((resolve) => {
setTimeout(() => {
resolve();
}, time);
});
}
}
export interface FTPConnectionOptions {
host: string;
port: number;
user: string;
password: string;
pasvTimeout: number;
}
Problem
Inside the FtpManager.downloadFolder function, I see recursive calls to the same downloadFolder method with an await. Your Maximum call stack exceeded error could come from there, since your initial call will need to keep everything in memory while traversing all subdirectories.
Proposed solution
Instead of awaiting everything recursively, you could setup a queue system, with an algorithm like this:
Add the current folder to a queue
While that queue is not empty:
Get the first folder in the queue (and remove it from it)
List all entries in it
Download all files
Add all subfolders to the queue
This allows you to download a lot of folders in a loop, instead of using recursion. Each loop iteration will run independently, meaning that the result of the root directory download won't depend on the deeeeeep file tree inside it.
Using a queue manager
There are plenty of queue manager modules for NodeJS, which allow you to have concurrency, timeouts, etc. One I've used in the past is simply named queue. It has a lot of useful features, but will require a little more work to implement in your project. Hence, for this answer, I used no external queue module, so that you can see the logic behind it. Feel free to search for queue, job, concurrency...
Example
I wanted to implement that logic directly into your own code, but I don't use Typescript, so I thought I'd make a simple folder copy function, which uses the same logic.
Note: For simplicity, I've not added any error handling, this is just a proof of concept! You can find a demo project which uses this here on my Github.
Here is how I've done it:
const fs = require('fs-extra');
const Path = require('path');
class CopyManager {
constructor() {
// Create a queue accessible by all methods
this.folderQueue = [];
}
/**
* Copies a directory
* #param {String} remotePath
* #param {String} downloadPath
*/
async copyFolder(remotePath, downloadPath) {
// Add the folder to the queue
this.folderQueue.push({ remotePath, downloadPath });
// While the queue contains folders to download
while (this.folderQueue.length > 0) {
// Download them
const { remotePath, downloadPath } = this.folderQueue.shift();
console.log(`Copy directory: ${remotePath} to ${downloadPath}`);
await this._copyFolderAux(remotePath, downloadPath);
}
}
/**
* Private internal method which copies the files from a folder,
* but if it finds subfolders, simply adds them to the folderQueue
* #param {String} remotePath
* #param {String} downloadPath
*/
async _copyFolderAux(remotePath, downloadPath) {
await fs.mkdir(downloadPath);
const list = await this.listEntries(remotePath);
for (const fileInfo of list) {
if (fileInfo.isDirectory) {
const folderPath = Path.join(remotePath, fileInfo.name);
const targetPath = Path.join(downloadPath, fileInfo.name);
// Push the folder to the queue
this.folderQueue.push({ remotePath: folderPath, downloadPath: targetPath });
} else if (fileInfo.isFile) {
const filePath = Path.join(remotePath, fileInfo.name);
await this.copyFile(filePath, downloadPath, fileInfo);
}
}
}
/**
* Copies a file
* #param {String} filePath
* #param {String} downloadPath
* #param {Object} fileInfo
*/
async copyFile(filePath, downloadPath, fileInfo) {
const targetPath = Path.join(downloadPath, fileInfo.name);
console.log(`Copy file: ${filePath} to ${targetPath}`);
return await fs.copy(filePath, targetPath);
}
/**
* Lists entries from a folder
* #param {String} remotePath
*/
async listEntries(remotePath) {
const fileNames = await fs.readdir(remotePath);
return Promise.all(
fileNames.map(async name => {
const stats = await fs.lstat(Path.join(remotePath, name));
return {
name,
isDirectory: stats.isDirectory(),
isFile: stats.isFile()
};
})
);
}
}
module.exports = CopyManager;
I found the source of the problem. It's the pkg package that emits the maximum callstack exceeded error: www.github.com/zeit/pkg/issues/681.
When I test it directly using node on windows, it work's. I will either downgrade to Node 10 or looking for another solution.
Thanks #blex for the help!

How to bypass jest setTimeout error of 5000ms by managing promises (Async and Await)

I wrote an Async/Await function to return promises for drivers report and analysis.
I have three different promise API files I extracted details from to do my analysis. However running test ith jest I get the error
Timeout - Async callback was not invoked within the 5000ms timeout specified by jest.setTimeout.Error:
I have refactored my code more than three times in two days but the error returns.
I will like to know how to manage my promises, perhaps there is something am not doing well and I am keen on this for optimization.
Is there a way to manage the promises in the code below to bypass the jest error?
any other suggestion will be highly appreciated.
NB: sorry I have post all the code for better insight.
code
const { getTrips } = require('api');
const { getDriver } = require('api')
const { getVehicle } = require('api')
/**
* This function should return the data for drivers in the specified format
*
* Question 4
*
* #returns {any} Driver report data
*/
async function driverReport() {
// Your code goes here
let trip = await getTrips()
trip = trip.map(item => {
item.billedAmount = parseFloat(item.billedAmount.toString().replace(',', '')).toFixed(2);
return item;
})
let getId = trip.reduce((user, cur) => {
user[cur.driverID] ? user[cur.driverID] = user[cur.driverID] + 1 : user[cur.driverID] = 1
return user
}, {})
// console.log(getId)
let mapId = Object.keys(getId)
// console.log(mapId)
let eachTripSummary = mapId.reduce((acc, cur) => {
let singleTrip = trip.filter(item => item.driverID == cur)
acc.push(singleTrip)
return acc
}, [])
// eachTripSummary = eachTripSummary[0]
// console.log(eachTripSummary)
// console.log(trip)
let reducedReport = eachTripSummary.reduce(async(acc, cur) =>{
acc = await acc
// console.log(acc)
let user = {}
let cash = cur.filter(item => item.isCash == true)
// console.log(cash.length)
let nonCash = cur.filter(item => item.isCash == false)
let driverSummary = await getDriverSummary(cur[0]['driverID'])
let trips = []
let customer = {}
cur[0].user ? (customer['user'] = cur[0]['user']['name'], customer['created'] = cur[0]['created'], customer['pickup'] = cur[0]['pickup']['address'],
customer['destination'] = cur[0]['destination']['address'], customer['billed'] = cur[0]['billedAmount'], customer['isCash'] = cur[0]['isCash']) : false
trips.push(customer)
let vehicles = []
if(driverSummary == undefined){
// console.log(cur)
user = {
id: cur[0]['driverID'],
vehicles: vehicles,
noOfCashTrips: cash.length,
noOfNonCashTrips: nonCash.length,
noOfTrips: cur.length,
trips: trips
}
acc.push(user)
// console.log(user)
return acc
}
let driverInfo = driverSummary[0]
let vehicleInfo = driverSummary[1]
let { name, phone } = driverInfo
let { plate, manufacturer } = vehicleInfo[0]
// console.log(plate)
let vpm = {
plate,
manufacturer
}
vehicles.push(vpm)
// console.log(cash.length)
user ={
fulName: name,
phone,
id: cur[0]['driverID'],
vehicles: vehicles,
noOfCashTrips: cash.length,
noOfNonCashTrips: nonCash.length,
noOfTrips: cur.length,
trips: trips
}
acc.push(user)
// console.log(acc)
return acc
}, [])
// reducedReport.then(data =>{console.log(data)})
return reducedReport
}
async function getDriverSummary(param) {
let driverDetails = await getDriver(param)
.then(data => {return data}).catch(err => {return err})
// console.log(driverDetails)
let vehicleDetails;
let { vehicleID } = driverDetails
if(driverDetails != "Error" & vehicleID != undefined){
// console.log(vehicleID)
vehicleDetails = vehicleID.map(async item => {
let vehicleSummary = getVehicle(item)
return vehicleSummary
})
// console.log(await vehicleDetails)
return await Promise.all([driverDetails, vehicleDetails])
}
}
driverReport().then(data => {
console.log(data)
})
module.exports = driverReport;
Use jest.setTimeout(30000); to increase the timeout. It will increase the timeout globally.
// jest.config.js
module.exports = {
setupTestFrameworkScriptFile: './jest.setup.js'
}
// jest.setup.js
jest.setTimeout(30000)
Or you can use user test example like this
describe("...", () => {
test(`...`, async () => {
...
}, 30000);
});

Web scraping and promises

I am using cheerio and node to do web scraping, but I have a problem with promises. I can scrape an article list from a page but in that list, we have more links for single pages. I need to scrape single pages as well for each item on the list.
I will show you my code for the better solution.
import rp from 'request-promise'
import cheerio from 'cheerio'
import conn from './connection'
const flexJob = `https://www.flexjobs.com`
const flexJobCategory = ['account-management', 'bilingual']
class WebScraping {
//list of article e.g for page 2
results = [] // [[title], [link for page],...]
contentPage = [] //content for each page
scrapeWeb(link) {
let fullLink = `${link}/jobs/${flexJobCategory[1]}?page=2`
const options = {
uri: fullLink,
transform(body) {
return cheerio.load(body)
}
}
rp(options)
.then(($) => {
console.log(fullLink)
$('.featured-job').each((index, value) => {
//html nodes
let shortDescription = value.children[1].children[1].children[3].children[1].children[1].children[0].data
let link = value.children[1].children[1].children[1].children[1].children[1].children[0].attribs.href
let pageLink = flexJob + '' + link
let title = value.children[1].children[1].children[1].children[1].children[1].children[0].children[0].data
let place = value.children[1].children[1].children[1].children[1].children[3].children[1].data
let jobType = value.children[1].children[1].children[1].children[1].children[3].children[0].children[0].data
this.results.push([title, '', pageLink.replace(/\s/g, ''), '', shortDescription.replace(/\n/g, ''), place, jobType, 'PageContent::: '])
})
})
.then(() => {
this.results.forEach(element => {
console.log('link: ', element[2])
this.scrapePage(element[2])
});
})
.then(() => {
console.log('print content page', this.contentPage)
})
.then(() => {
//this.insertIntoDB()
console.log('insert into db')
})
.catch((err) => {
console.log(err)
})
}
/**
* It's going to scrape all pages from list of jobs
* #param {Any} pageLink
* #param {Number} count
*/
scrapePage(pageLink) {
let $this = this
//console.log('We are in ScrapePage' + pageLink + ': number' + count)
//this.results[count].push('Hello' + count)
let content = ''
const options = {
uri: pageLink,
transform(body) {
return cheerio.load(body)
}
}
rp(options)
.then(($) => {
//this.contentPage.push('Hello' + ' : ');
console.log('Heloo')
})
.catch((err) => {
console.log(err)
})
}
/**
* This method is going to insert data into Database
*/
insertIntoDB() {
conn.connect((err) => {
var sql = "INSERT INTO contact (title, department, link, salary, short_description, location, job_type, page_detail) VALUES ?"
var values = this.results
conn.query(sql, [values], function (err) {
if (err) throw err
conn.end()
})
})
}
}
let webScraping = new WebScraping()
let scrapeList = webScraping.scrapeWeb(flexJob)
So, at 'scrapeWeb' method, at second '.then', I am calling 'scrapePage' method, however, the third promise executed before promise inside 'scrapePage' method.
You need a little more control flow at that stage. You do not want that .then()'s promise to resolve until all the calls are resolved.
You could use a Promise library like bluebird to do a Promise.each or a Promise.map for all the results you want to run.
Or use async/await to set up like .then(async () => {}) and do not use .forEach.
for(let element of this.results){
console.log('link: ', element[2])
await this.scrapePage(element[2])
}
You have a race condition problem.
The first tweak you'll need is having scrapePage returning a Promise.
scrapePage(pageLink) {
let $this = this
let content = ''
const options = {
uri: pageLink,
transform(body) {
return cheerio.load(body)
}
}
return rp(options);
}
In the second than, you need to invoke all child pages scraping eg :
.then(() => {
return Promise.all(this.results.map(childPage => this.scrapePage(childPage)));
})
This will wrap all scrapes of child pages into promises and only if all of them are resolved the code will flow.

Categories

Resources