This function takes 2 asynchronous callbacks. I am not sure why, but when these callbacks are called, the promises they await aren't being awaited correctly. I'm thinking it may have something to do with the way I'm calling them. I am not very familiar with the promise API so I kind of just hacked this together. If someone could tell me if I am doing something wrong, I would really appreciate it.
async queryTasks(handleCommand, handleSubmission) {
const D = new Date().getTime();
const promises = [];
while (!this.tasks.isEmpty()) {
const task = this.tasks.dequeue();
// If not a submission
if (task.item.body) {
const command = new Command().test(task.item.body);
if (command) { // If the item received was a command, return the command, the item, and priority
const T = {
command: command,
item: task.item,
priority: task.priority,
time: D
}
console.log("Calling back with handleCommand(task)".bgMagenta.white);
promises.push(handleCommand(T));
}
} else if (task.item.title) { // Task was a submission
console.log("Calling back with handleSubmission".bgMagenta.black);
const T = {
item: task.item,
priority: task.priority,
time: D
}
promises.push(handleSubmission(T));
}
}
return Promise.all(promises);
}
Or maybe it's the way I'm calling it queryTasks()?
/* [Snoolicious Run Cycle] */
const INTERVAL = (process.env.INTERVAL * 1000);
async function run() {
console.log("Running Test!!!".green);
await snoolicious.getMentions(2);
console.log("Size of the queue: ", snoolicious.tasks.size());
await snoolicious.queryTasks(handleCommand, handleSubmission);
console.log(`Finished querying tasks. Sleeping for ${INTERVAL/1000} seconds...`.rainbow);
setTimeout(() => {
return run()
}, (INTERVAL));
}
(async () => {
await run();
})();
The output:
Preparing new database...
SELECT count(*) FROM sqlite_master WHERE type='table' AND name='saved';
Preparing statements...
Running Test!!!
MentionBot --Assigning hte FIRST utc...
Size of the queue: 2
Snoolicious Querying Tasks!
Calling back with handleCommand(task)
Bot -- handling a command! { directive: 'positive', args: [] }
Test passed
getting the parent submission...
Calling back with handleCommand(task)
Bot -- handling a command! { directive: 'positive', args: [] }
Test passed
getting the parent submission...
Finished querying tasks. Sleeping for 30 seconds...
Got this parent: Comment {...
Handling commands and getting parent submission: (snoolicious.requester = snoowrap.requester)
async function handleCommand(task) {
let id = `${task.item.parent_id}${task.item.created_utc}${task.item.id}`;
const checkedId = await db.checkID(id);
if (task.item.subreddit.display_name === process.env.MASTER_SUB) {
try {
validateCommand(task.command);
const parent = await getParentSubmission(task.item);
console.log("Got this parent: ", parent);
console.log("Checking against this item: ", task.item);
await checkUserRatingSelf(task.item);
await checkTypePrefix(task.item);
} catch (err) {
await replyWithError(err.message);
}
} else {
console.log("id HAS been seen: id ", checkedId);
}
}
// Get a parent submission:
async function getParentSubmission(item) {
console.log("getting the parent submission...".magenta);
if (item.parent_id.startsWith('t3_')) {
const rep = item.parent_id.replace('t3_', '');
const parent = await snoolicious.requester.getSubmission(rep);
return parent;
} else if (item.parent_id.startsWith('t1_')) {
const rep = item.parent_id.replace('t1_', '');
const parent = await snoolicious.requester.getComment(rep);
return parent;
}
}
Related
So i'm trying to testing on my button that run the function asynchronously. this is my button logic looks like.
// Function below will run when user click the button
this._pageModule.favoriteButtonCallback = async () => {
try {
// I want to expect run after this await below is done
await this._favoriteRestaurants.PutRestaurant(this._restaurant);
console.log(
'console log in button',
await this._favoriteRestaurants.GetAllRestaurant(),
);
this._renderButton();
return Promise.resolve(
`Success add ${this._restaurant.name} to favorite!`,
);
} catch (err) {
this._renderButton();
return Promise.reject(
new Error(
`Failed add ${this._restaurant.name} to favorite! Error: ${err}`,
).message,
);
}
};
and this is my test
fit('should be able to add the restaurant to favorite', async () => {
expect((await RestaurantIdb.GetAllRestaurant()).length).toEqual(0);
// spyOn(RestaurantIdb, 'PutRestaurant');
document.body.innerHTML = `<detail-module></detail-module>
<modal-element></modal-element>`;
const pageModule = document.querySelector('detail-module');
await FavoriteButtonInitiator.init({
pageModule,
restaurant,
favoriteRestaurants: RestaurantIdb,
});
pageModule.restaurantDetail = restaurant;
await pageModule.updateComplete;
const favoriteButton = pageModule.shadowRoot
.querySelector('[aria-label="favorite this restaurant"]')
.shadowRoot.querySelector('button');
// 1. Simulate user click the button
favoriteButton.dispatchEvent(new Event('click'));
// expect(RestaurantIdb.PutRestaurant).toHaveBeenCalled();
const restaurants = await RestaurantIdb.GetAllRestaurant();
console.log('console log from test', restaurants);
expect(restaurants).toEqual([restaurant]);
});
i'm using lit-element, simply it similar with react, i have custom element <define-module> with button inside. then i give the required properties to it, then it will render.
This is my test log Test log
as you can see the console log from the test ran before the console log that i put in the button. and it is empty array.
what i want is when click event dispatched. the next line in the test wait until the asynchronous function in the button done, how do i make it possible?
What have i done:
i have tried to console log them.
i have tried to using done in jasmine, but it doesn't work since i using async/await in the test.
I have tried use spyOn, but i don't really understand how to spy indexedDb
UPDATE
So i have found what caused problem, here i have simplified my code.
/* eslint-disable */
import { openDB } from 'idb';
import { CONFIG } from '../src/scripts/globals';
const { DATABASE_NAME, DATABASE_VERSION, OBJECT_STORE_NAME } = CONFIG;
const dbPromise = openDB(DATABASE_NAME, DATABASE_VERSION, {
upgrade(database) {
database.createObjectStore(OBJECT_STORE_NAME, { keyPath: 'id' });
},
});
const RestaurantIdb = {
async GetRestaurant(id) {
return (await dbPromise).get(OBJECT_STORE_NAME, id);
},
async GetAllRestaurant() {
return (await dbPromise).getAll(OBJECT_STORE_NAME);
},
async PutRestaurant(restaurant) {
if (await this.GetRestaurant(restaurant.id)) {
return Promise.reject(
new Error('This restauant is already in your favorite!').message,
);
}
return (await dbPromise).put(OBJECT_STORE_NAME, restaurant);
},
async DeleteRestaurant(id) {
if (await this.GetRestaurant(id)) {
return (await dbPromise).delete(OBJECT_STORE_NAME, id);
}
return Promise.reject(
new Error('This restauant is not in favorite!').message,
);
},
};
describe('Testing RestaurantIdb', () => {
const removeAllRestaurant = async () => {
const restaurants = await RestaurantIdb.GetAllRestaurant();
for (const { id } of restaurants) {
await RestaurantIdb.DeleteRestaurant(id);
}
};
beforeEach(async () => {
await removeAllRestaurant();
});
afterEach(async () => {
await removeAllRestaurant();
});
it('should add restaurant', async () => {
document.body.innerHTML = `<button></button>`;
const button = document.querySelector('button');
button.addEventListener('click', async () => {
await RestaurantIdb.PutRestaurant({ id: 1 });
});
button.dispatchEvent(new Event('click'));
setTimeout(async () => {
const restaurants = await RestaurantIdb.GetAllRestaurant();
console.log('console log in test', restaurants);
expect(restaurants).toEqual([{ id: 1 }]);
}, 0);
});
});
And this is the result Test Result
I assume that IndexedDb takes times to put my restaurant data. and i still can't figure out how to fix it.
If you were using Angular, you would have access to fixture.whenStable(), and fakeAsync and tick() which wait until promises are resolved before carrying forward with the test.
In this scenario, I would try wrapping what you have in the test in a setTimeout
fit('should be able to add the restaurant to favorite', async () => {
expect((await RestaurantIdb.GetAllRestaurant()).length).toEqual(0);
// spyOn(RestaurantIdb, 'PutRestaurant');
document.body.innerHTML = `<detail-module></detail-module>
<modal-element></modal-element>`;
const pageModule = document.querySelector('detail-module');
await FavoriteButtonInitiator.init({
pageModule,
restaurant,
favoriteRestaurants: RestaurantIdb,
});
pageModule.restaurantDetail = restaurant;
await pageModule.updateComplete;
const favoriteButton = pageModule.shadowRoot
.querySelector('[aria-label="favorite this restaurant"]')
.shadowRoot.querySelector('button');
// 1. Simulate user click the button
favoriteButton.dispatchEvent(new Event('click'));
// expect(RestaurantIdb.PutRestaurant).toHaveBeenCalled();
setTimeout(() => {
const restaurants = await RestaurantIdb.GetAllRestaurant();
console.log('console log from test', restaurants);
expect(restaurants).toEqual([restaurant]);
}, 0);
});
The things in the setTimeout should hopefully happen after the asynchronous task of the button click since promises are microtasks and setTimeout is a macrotask and microtasks have higher priority than macrotasks.
When I access(for lack of a better term) the '/postUserInput' POST route, it either does not run getGeoNamesData() until the end, or it doesn't wait for the API fetch to finish. The data I get from this function is used for simultaneous API calls.
What should I do to make sure these functions run in order?
This is my server file code. It runs in this order according to the console:
first
third
fourth
fifth
sixth
seventh
eigth
second
This keeps all my remaining API calls from having the correct data. Please help.
Thanks.
app.post('/postUserInput', function (req, res) {
currentWeatherUI = req.body.buildCurrentBoolean;
userInput = {
userCityInput: req.body.userCityInput,
userStateInput: req.body.userStateInput,
userDateInput: req.body.userDateInput,
};
console.log("first");
getGeoNamesData();
console.log("third");
if (currentWeatherUI) {
getCurrentWeatherbitData(longitude, latitude);
console.log("fifth");
} else {
getFutureWeatherbitData(longitude, latitude);
};
console.log("sixth");
getPixabayData();
console.log("eigth");
});
const getGeoNamesData = async () => {
try{
const response = await axios.get('http://api.geonames.org/searchJSON?q='+userInput.userCityInput+'+'+userInput.userStateInput+'&maxRows=1&username='+geonamesUser);
longitude = response.data.geonames[0].lng
latitude = response.data.geonames[0].lat
console.log("second");
} catch(error) {
console.log("error2", error);
};
};
const getCurrentWeatherbitData = async (longitude, latitude) => {
try {
const weatherbitURL = "http://api.weatherbit.io/v2.0/current?&lat="+latitude+"&lon="+longitude+"&key="+weatherbitUser+"&units=I&lang=en";
const response = await axios.get(weatherbitURL);
let newDayObject = {
city: response.data.data[0].city_name,
state: response.data.data[0].state_code,
date: userInput.userDateInput,
temp: response.data.data[0].temp,
wind: response.data.data[0].wind_spd,
windDirection: response.data.data[0].wind_cdir,
rain: response.data.data[0].precip,
snow: response.data.data[0].snow
};
projectDataArray.push(newDayObject);
console.log("fourth");
} catch(error) {
console.log("error", error);
};
};
const getPixabayData = async () => {
try {
const pixabayURL = "https://pixabay.com/api/?key="+pixabayUser+"&q="+userInput.userCityInput+"+"+userInput.userStateInput+"&image_type=photo&pretty=true&per_page=3";
const response = await axios.get(pixabayURL);
if (response.data.hits.length > 0) {
let cityImageObj = {cityImageURL: response.data.hits[0].webformatURL};
projectDataArray.push(cityImageObj);
} else {
let cityImageObj = {cityImageURL: "https://www.pennlive.com/resizer/vNu0aYjk3xlFTUb16FSrSji_DIA=/1280x0/smart/advancelocal-adapter-image-uploads.s3.amazonaws.com/image.pennlive.com/home/penn-media/width2048/img/life/photo/wintermeme11.jpg"};
projectDataArray.push(cityImageObj);
}
console.log("seventh");
} catch(error) {
console.log("error", error);
};
};```
Please check the MDN Docs for async functions in Javascript.
According to the docs:
Async functions can contain zero or more await expressions. Await expressions make promise-returning functions behave as though they're synchronous by suspending execution until the returned promise is fulfilled or rejected.
This should run the functions - getGeoNamesData, getCurrentWeatherbitData getPixabayData in order that you expect.
app.post("/postUserInput", async function (req, res) {
currentWeatherUI = req.body.buildCurrentBoolean;
userInput = {
userCityInput: req.body.userCityInput,
userStateInput: req.body.userStateInput,
userDateInput: req.body.userDateInput,
};
console.log("first");
await getGeoNamesData();
console.log("third");
if (currentWeatherUI) {
await getCurrentWeatherbitData(longitude, latitude);
console.log("fifth");
} else {
await getFutureWeatherbitData(longitude, latitude);
}
console.log("sixth");
await getPixabayData();
console.log("eigth");
});
I wrote javascript code for a web crawler that scraps data from a list of websites (in csv file) in a single browser instance (code below). Now I want to modify the code for the scenario in which every single website in the list runs parallel at the same time in two browser instances. For example, a website www.a.com in the list should run in parallel at the same time on two browser instances and the same goes for the rest of the websites. If anyone can help me, please. I would be very thankful.
(async () => {
require("dotenv").config();
if (!process.env.PROXY_SPKI_FINGERPRINT) {
throw new Error("PROXY_SPKI_FINGERPRINT is not defined in environment.");
}
const fs = require("fs");
const fsPromises = fs.promises;
const pptr = require("puppeteer");
const browser = await pptr.launch({
args: [
"--proxy-server=https://127.0.0.1:8000",
"--ignore-certificate-errors-spki-list=" + process.env.PROXY_SPKI_FINGERPRINT,
"--disable-web-security",
],
// headless: false,
});
const sites = (await fsPromises.readFile(process.argv[2])) // sites list in csv file
.toString()
.split("\n")
.map(line => line.split(",")[1])
.filter(s => s);
for (let i in sites) {
const site = sites[i];
console.log(`[${i}] ${site}`);
try {
await fsPromises.appendFile("data.txt", JSON.stringify(await crawl(browser, site)) + "\n");
} catch (e) {
console.error(e);
}
}
await browser.close();
async function crawl(browser, site) {
const page = await browser.newPage();
try {
const grepResult = [];
page.on("request", async request => {
request.continue();
})
page.on("response", async response => {
try {
if (response.request().resourceType() === "script" &&
response.headers()["content-type"] &&
response.headers()["content-type"].includes("javascript")) {
const js = await response.text();
const grepPartResult = grepMagicWords(js);
grepResult.push([response.request().url(), grepPartResult]);
}
} catch (e) {}
});
await page.setRequestInterception(true);
try {
await page.goto("http://" + site, {waitUntil: "load", timeout: 60000});
await new Promise(resolve => { setTimeout(resolve, 10000); });
} catch (e) { console.error(e); }
const [flows, url] = await Promise.race([
page.evaluate(() => [J$.FLOWS, document.URL]),
new Promise((_, reject) => { setTimeout(() => { reject(); }, 5000); })
]);
return {url: url, grepResult: grepResult, flows: flows};
} finally {
await page.close();
}
function grepMagicWords(js) {
var re = /(?:\'|\")(?:g|s)etItem(?:\'|\")/g, match, result = [];
while (match = re.exec(js)) {
result.push(js.substring(match.index - 100, match.index + 100));
}
return result;
}
}
})();
You can launch multiple browsers and run them in parallel. You would have to restructure your app slighltly for that. Create a wrapper for crawl which launches it with a new browser instance. I created crawlNewInstance which does that for you. You would also need to run crawlNewInstance() in parallel.
Checkout this code:
const sites = (await fsPromises.readFile(process.argv[2])) // sites list in csv file
.toString()
.split("\n")
.map(line => line.split(",")[1])
.filter(s => s);
const crawlerProms = sites.map(async (site, index) => {
try {
console.log(`[${index}] ${site}`);
await fsPromises.appendFile("data.txt", JSON.stringify(await crawlNewInstance(site)) + "\n");
} catch (e) {
console.error(e);
}
}
// await all the crawlers!.
await Promise.all(crawlerProms)
async function crawlNewInstance(site) {
const browser = await pptr.launch({
args: [
"--proxy-server=https://127.0.0.1:8000",
"--ignore-certificate-errors-spki-list=" + process.env.PROXY_SPKI_FINGERPRINT,
"--disable-web-security",
],
// headless: false,
});
const result = await crawl(browser, site)
await browser.close()
return result
}
optional
The above answers basically the question. But If you want to go further I was in a run and had nothing todo :)
If you have plenty of pages, which you wanted to crawl in parallel and for example limit the amount of parallel requests you could use a Queue:
var { EventEmitter} = require('events')
class AsyncQueue extends EventEmitter {
limit = 2
enqueued = []
running = 0
constructor(limit) {
super()
this.limit = limit
}
isEmpty() {
return this.enqueued.length === 0
}
// make sure to only pass `async` function to this queue!
enqueue(fn) {
// add to queue
this.enqueued.push(fn)
// start a job. If max instances are already running it does nothing.
// otherwise it runs a new job!.
this.next()
}
// if a job is done try starting a new one!.
done() {
this.running--
console.log('job done! remaining:', this.limit - this.running)
this.next()
}
async next() {
// emit if queue is empty once.
if(this.isEmpty()) {
this.emit('empty')
return
}
// if no jobs are available OR limit is reached do nothing
if(this.running >= this.limit) {
console.log('queueu full.. waiting!')
return
}
this.running++
console.log('running job! remaining slots:', this.limit - this.running)
// first in, first out! so take first element in array.
const job = this.enqueued.shift()
try {
await job()
} catch(err) {
console.log('Job failed!. ', err)
this.emit('error', err)
}
// job is done!
// Done() will call the next job if there are any available!.
this.done()
}
}
The queue could be utilised with this code:
// create queue
const limit = 3
const queue = new AsyncQueue(limit)
// listen for any errors..
queue.on('error', err => {
console.error('error occured in queue.', err)
})
for(let site of sites) {
// enqueue all crawler jobs.
// pass an async function which does whatever you want. In this case it crawls
// a web page!.
queue.enqueue(async() => {
await fsPromises.appendFile("data.txt", JSON.stringify(await crawlNewInstance(site)) + "\n");
})
}
// helper for watiing for the queue!
const waitForQueue = async () => {
if(queue.isEmpty) return Promise.resolve()
return new Promise((res, rej) => {
queue.once('empty', res)
})
}
await waitForQueue()
console.log('crawlers done!.')
Even further with BrowserPool
It would also be possible to reuse your browser instances, so it would not be necessary to start a new browser instance for every crawling process. This can be done using this Browserpool helper class
var pptr = require('puppeteer')
async function launchPuppeteer() {
return await pptr.launch({
args: [
"--proxy-server=https://127.0.0.1:8000",
"--ignore-certificate-errors-spki-list=" + process.env.PROXY_SPKI_FINGERPRINT,
"--disable-web-security",
],
// headless: false,
});
}
// manages browser connections.
// creates a pool on startup and allows getting references to
// the browsers! .
class BrowserPool {
browsers = []
async get() {
// return browser if there is one!
if(this.browsers.length > 0) {
return this.browsers.splice(0, 1)[0]
}
// no browser available anymore..
// launch a new one!
return await launchPuppeteer()
}
// used for putting a browser back in pool!.
handback(browser) {
this.browsers.push(browser)
}
// shuts down all browsers!.
async shutDown() {
for(let browser of this.browsers) {
await browser.close()
}
}
}
You can then remove crawlNewInstance() and adjust the code to look like this finally:
const sites = (await fsPromises.readFile(process.argv[2])) // sites list in csv file
.toString()
.split("\n")
.map(line => line.split(",")[1])
.filter(s => s);
// create browserpool
const pool = new BrowserPool()
// create queue
const limit = 3
const queue = new AsyncQueue(3)
// listen to errors:
queue.on('error', err => {
console.error('error in the queue detected!', err)
})
// enqueue your jobs
for(let site of sites) {
// enqueue an async function which takes a browser from pool
queue.enqueue(async () => {
try {
// get the browser and crawl a page!.
const browser = await pool.get()
const result = await crawl(browser, site)
await fsPromises.appendFile("data.txt", JSON.stringify(result) + "\n");
// return the browser back to pool so other crawlers can use it! .
pool.handback(browser)
} catch(err) {
console.error(err)
}
})
}
// helper for watiing for the queue!
const waitForQueue = async () => {
// maybe jobs fail in a few milliseconds so check first if its already empty..
if(queue.isEmpty) return Promise.resolve()
return new Promise((res, rej) => {
queue.once('empty', res)
})
}
// wait for the queue to finish :)
await waitForQueue()
// in the very end, shut down all browser:
await pool.shutDown()
console.log('done!.')
Have fun and feel free to leave a comment.
Here is a function to build db queries:
const buildDbQueries = async elements => elements.reduce(
async (acc, element) => {
// wait for the previous reducer iteration
const { firstDbQueries, secondDbQueries } = await acc
const asyncStuff = await someApi(element)
// leave if the API does not return anything
if (!asyncStuff) return { firstDbQueries, secondDbQueries }
// async db query, returns a Promise
const firstDbQuery = insertSomethingToDb({
id: asyncStuff.id,
name: asyncStuff.name
})
// another async db query, returns a Promise
// have to run after the first one
const secondDbQuery = insertAnotherthingToDb({
id: element.id,
name: element.name,
somethingId: asyncStuff.id
})
return {
firstDbQueries: [...firstDbQueries, firstDbQuery],
secondDbQueries: [...secondDbQueries, secondDbQuery]
}
},
// initial value of the accumulator is a resolved promise
Promise.resolve({
firstDbQueries: [],
secondDbQueries: []
})
)
This function returns promises which should not be executed until they are resolved.
Now we use that function
const myFunc = async elements => {
const { firstDbQueries, secondDbQueries } = await buildDbQueries(elements)
// we don't want any query to run before this point
await Promise.all(firstDbQueries)
console.log('Done with the first queries')
await Promise.all(secondDbQueries)
console.log('Done with the second queries')
}
The problems are:
the queries are executed before we call Promise.all.
the firstDbQueries queries are not executed before the secondDbQueries causing errors.
EDIT
As suggested in a comment, I tried not to use reduce, but a for … of loop.
const buildDbQueries = async elements => {
const firstDbQueries = []
const secondDbQueries = []
for (const element of elements) {
const asyncStuff = await someApi(element)
// leave if the API does not return anything
if (!asyncStuff) continue
// async db query, returns a Promise
const firstDbQuery = insertSomethingToDb({
id: asyncStuff.id,
name: asyncStuff.name
})
// another async db query, returns a Promise
// have to run after the first one
const secondDbQuery = insertAnotherthingToDb({
id: element.id,
name: element.name,
somethingId: asyncStuff.id
})
firstDbQueries.push(firstDbQuery)
secondDbQueries.push(secondDbQuery)
}
return { firstDbQueries, secondDbQueries }
}
This still produces the exact same problems as the previous version with reduce.
Don't use an async reducer. Especially not to build an array of promises. Or an array of things to run later. This is wrong on so many levels.
I guess you are looking for something like
function buildDbQueries(elements) {
return elements.map(element =>
async () => {
const asyncStuff = await someApi(element)
// leave if the api doesn't return anything
if (!asyncStuff) return;
await insertSomethingToDb({
id: asyncStuff.id,
name: asyncStuff.name
});
return () =>
insertAnotherthingToDb({
id: element.id,
name: element.name,
somethingId: asyncStuff.id
})
;
}
);
}
async function myFunc(elements) {
const firstQueries = buildDbQueries(elements)
// we don't want any query to run before this point
const secondQueries = await Promise.all(firstQueries.map(query => query()));
// this call actually runs the query ^^^^^^^
console.log('Done with the first queries');
await Promise.all(secondQueries.map(query => query()));
// this call actually runs the query ^^^^^^^
console.log('Done with the second queries')
}
Well, I am lost in await and async hell. The code below is supposed to loop through a list of files, check if they exist and return back the ones that do exist. But I am getting a zero length list.
Node V8 code: caller:
await this.sourceList()
if (this.paths.length == 0) {
this.abort = true
return
}
Called Functions: (I took out stuff not relevant)
const testPath = util.promisify(fs.access)
class FMEjob {
constructor(root, inFiles, layerType, ticket) {
this.paths = []
this.config = global.app.settings.config
this.sourcePath = this.config.SourcePath
}
async sourceList() {
return await Promise.all(this.files.map(async (f) => {
let source = path.join(this.sourcePath, f.path)
return async () => {
if (await checkFile(source)) {
this.paths.push(source)
}
}
}))
}
async checkFile(path) {
let result = true
try {
await testPath(path, fs.constants.R_OK)
}
catch (err) {
this.errors++
result = false
logger.addLog('info', 'FMEjob.checkFile(): File Missing Error: %s', err.path)
}
return result
}
Your sourceList function is really weird. It returns a promise for an array of asynchronous functions, but it never calls those. Drop the arrow function wrapper.
Also I recommend to never mutate instance properties inside async methods, that'll cause insane bugs when multiple methods are executed concurrently.
this.paths = await this.sourceList()
if (this.abort = (this.paths.length == 0)) {
return
}
async sourceList() {
let paths = []
await Promise.all(this.files.map(async (f) => {
const source = path.join(this.sourcePath, f.path)
// no function here, no return here!
if (await this.checkFile(source)) {
paths.push(source)
}
}))
return paths
}
async checkFile(path) {
try {
await testPath(path, fs.constants.R_OK)
return true
} catch (err) {
logger.addLog('info', 'FMEjob.checkFile(): File Missing Error: %s', err.path)
this.errors++ // questionable as well - better let `sourceList` count these
}
return false
}