I'm trying to download multiple file using request library, I need to download them one by one and also show a progress bar, the files links are stored in an array that passes them to a function to start the download
const request = require('request')
const fs = require('fs')
const ProgressBar = require('progress')
async function downloadFiles(links) {
for (let link of links) {
let file = request(link)
file.on('response', (res) => {
var len = parseInt(res.headers['content-length'], 10);
console.log();
bar = new ProgressBar(' Downloading [:bar] :rate/bps :percent :etas', {
complete: '=',
incomplete: ' ',
width: 20,
total: len
});
file.on('data', (chunk) => {
bar.tick(chunk.length);
})
file.on('end', () => {
console.log('\n');
})
})
file.pipe(fs.createWriteStream('./downloads/' + Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15)))
}
}
let links = ['https://speed.hetzner.de/100MB.bin', 'https://speed.hetzner.de/100MB.bin', 'https://speed.hetzner.de/100MB.bin', 'https://speed.hetzner.de/100MB.bin']
downloadFiles(links)
This is what I've got so far, the problem is that the request is asynchronous, I tried to use async/await but that way I couldn't get the progress bar to work.
How can make it so that the files are downloaded one at the time and also have a progress bar?
Based on my comment about async.queue, this is how I would write that up.
You can call dl.downloadFiles([]) as often as you want and it will just fetch everything that you have added to the queue one after another.
const request = require('request')
const async = require('async')
const fs = require('fs')
const ProgressBar = require('progress')
class Downloader {
constructor() {
this.q = async.queue(this.singleFile, 1);
// assign a callback
this.q.drain(function() {
console.log('all items have been processed');
});
// assign an error callback
this.q.error(function(err, task) {
console.error('task experienced an error', task);
});
}
downloadFiles(links) {
for (let link of links) {
this.q.push(link);
}
}
singleFile(link, cb) {
let file = request(link);
let bar;
file.on('response', (res) => {
const len = parseInt(res.headers['content-length'], 10);
console.log();
bar = new ProgressBar(' Downloading [:bar] :rate/bps :percent :etas', {
complete: '=',
incomplete: ' ',
width: 20,
total: len
});
file.on('data', (chunk) => {
bar.tick(chunk.length);
})
file.on('end', () => {
console.log('\n');
cb();
})
})
file.pipe(fs.createWriteStream('./downloads/' + Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15)))
}
}
const dl = new Downloader();
dl.downloadFiles([
'https://speed.hetzner.de/100MB.bin',
'https://speed.hetzner.de/100MB.bin'
]);
Related
am trying to build a web scraper that downloads all the pdfs in a website. i've written all the logic necessary to do this but for some reason it downloads an empty pdf file which is not suppose to be so, the problem seems to be coming from the downloadFile function when i try to pipe the data which for some reason seems not to be working because i get an empty pdf file after the function is ran. i'll would appreciate it if someone can help me out with this problem, thanks.
here's a sample of my code:
app.js
const fs = require("fs");
const path = require("path");
const cheerio = require("cheerio");
const axiosInstance = require("./getAxios");
const axios = axiosInstance();
const Surl = "https://www.health.gov.ng/";
// linkList sample: "https://www.health.gov.ng/index.php?option=com_content&view=article&id=143&Itemid=512";
let = connectionFailCount = 0;
let linkList = [];
let dlinkList = [];
const getWebsiteLinks = async (Surl) => {
try {
console.log(`Crawling all links from: ${Surl}`);
const response = await axios.get(Surl);
const $ = cheerio.load(response.data);
const ranges = $("a").each(function (idx, el) {
if ($(el).attr("href")) {
return $(el).attr("href");
}
});
for (let index = 0; index < ranges.length; index++) {
let raw_links = $("a")[index].attribs.href;
if (raw_links.startsWith("/")) {
linkList.push(Surl + raw_links);
}
}
if (linkList.length > 0) {
console.log(`Finished crawling links: Found ${linkList.length} links`);
console.log(
"--------------------------------------------------------\n\n"
);
}
return;
} catch (error) {
if (connectionFailCount === 0) {
connectionFailCount += 1;
getWebsiteLinks(Surl);
console.log(`Connection error. \n
Reconnecting to server....`);
} else if (connectionFailCount === 5) {
console.error(`Can not connect to server. Try again later.`);
}
}
};
const downloadLinks = async (linkList) => {
try {
console.log("Crawling links to find pdf links. this may take a while...");
for (const link of linkList) {
const response = await axios.get(link);
// Skip where there's delayed server response
if (response.code === "ECONNRESET") continue;
const $ = cheerio.load(response.data);
$("a").each(function (idx, el) {
if ($(el)?.attr("href")?.endsWith(".pdf")) {
let addr = $(el).attr("href");
let dlink = Surl + addr;
dlinkList.push({
pathName: addr,
url: dlink,
});
}
});
}
console.log(dlinkList);
if (dlinkList.length > 0) {
console.log(`Crawling Finish: Found ${dlinkList.length} pdf links`);
console.log(
"--------------------------------------------------------\n\n"
);
}
} catch (error) {
if (connectionFailCount === 0) {
connectionFailCount += 1;
console.log(`Connection error. \n
Reconnecting to server: ${connectionFailCount} count`);
downloadLinks(linkList);
}
if (connectionFailCount === 3) {
console.error(`Can not connect to server. Try again later.`);
return;
}
// console.error("downloadLinksError: ", error);
}
};
const downloadFiles = async (dlinkList) => {
console.log("Creating directory to save PDF files");
const appRoot = path.dirname(path.resolve(__dirname));
// Had to change and restructure code due to error
const folderName = `PDF/${Surl.split("/").pop()}`;
const subFolderName = Surl.split("/").pop();
try {
if (!fs.existsSync(path.join(appRoot, folderName))) {
fs.mkdirSync(path.join(appRoot, "PDF"));
fs.mkdirSync(path.join(`${appRoot}/PDF`, subFolderName));
}
dlinkList.forEach(async (link) => {
let name = link.pathName;
let url = link.url;
let file = fs
.createWriteStream(
`${appRoot}/${folderName}/${name.split("/").pop()}`,
"utf-8"
)
.on("error", (err) => {
console.error("createWriteStreamError: ", err);
});
try {
console.log("Downloading PDF file...");
const { data } = await axios({
url,
method: "GET",
responseType: "stream",
});
if (data) {
console.log("PDF file Downloaded");
data.pipe(file);
}
} catch (error) {
console.error(error);
}
});
return;
} catch (error) {
console.error("downloadFilesError: ", error);
}
};
(async () => {
await getWebsiteLinks(Surl);
await downloadLinks(linkList);
await downloadFiles(dlinkList);
})();
getAxios.js
const axios = require("axios");
const https = require("https");
module.exports = function () {
const domain = "https://www.health.gov.ng/";
let instance;
if (!instance) {
//create axios instance
instance = axios.create({
baseURL: domain,
timeout: 60000, // Increase time out incase of network delay or delayed server response
maxContentLength: 500 * 1000 * 1000, // Increase maximum response ata length
httpsAgent: new https.Agent({ keepAlive: true }),
headers: { "Content-Type": "application/xml" },
});
}
return instance;
};
I'm trying to write a program that can unzip a zip file, read the images in the file and apply grayscale to them.
right now i have these two functions :
var fs = require('fs'),
PNG = require('pngjs').PNG
const unzipper = require('unzipper')
PNG = require('pngjs').PNG
const dir = __dirname + "/";
const myFile = (fileName) => {
let createdFile = dir + fileName
fs.createReadStream(createdFile)
.pipe(unzipper.Extract({ path: 'myfile' }));
console.log('file unzipped')
}
myFile("myfile.zip")
function applyFilter(Name) {
fs.readdir(Name, 'utf-8', (err, data) => {
if (err) {
console.log(err)
} else {
data.forEach(function (file) {
if (file.includes('png')) {
let greyPNG = (__dirname + '/' + 'myfile' + '/' + file)
console.log (greyPNG)
fs.createReadStream(greyPNG)
.pipe(new PNG({
colorType: 0,
}))
.on('parsed', function () {
this.pack().pipe(fs.createWriteStream(__dirname + "/" + "myfile" + "/" + file));
});
}
})
}
})
}
applyFilter ('myfile')
these two function works fine individually, however, it will not run together, if I comment out "applyFilter". A zip file will be unzipped. if there is a file in the directory, "applyFilter" will apply grayscale on those pictures. I know that this is because both functions runs at the same time which causes the problem. So how do I implement promises to solve this issue. I know that I can use "Sync" version of the functions. I just want to know how to do it in promises.
There are Example's in official documentation about ‘promisify’:
https://nodejs.org/dist/latest-v12.x/docs/api/util.html#util_util_promisify_original
which will gives you a "promised" version of the same function (as long as the original function has a standard signature / a custom promisified definition).
const util = require('util');
const fs = require('fs');
const stat = util.promisify(fs.stat);
async function callStat() {
const stats = await stat('.');
console.log(`This directory is owned by ${stats.uid}`);
}
You can also implement your own, just return a promise (this example is reading a http request):
function read_request(request) {
request.setEncoding("utf-8");
return new Promise((resolve, reject) => {
var cache = "";
request.on("data", (chunk) => {
cache += cache;
}).on("end", () => {
resolve(cache);
}).on("error", reject);
});
}
You can use the stream finish event to determine when the file unzip is complete. We can then use promises and async / await to ensure we don't try to apply the filter before the files are ready.
const fs = require('fs');
const PNG = require('pngjs').PNG;
const unzipper = require('unzipper');
const dir = __dirname + "/";
function unzipFile(fileName, outputPath) {
return new Promise((resolve, reject) => {
let createdFile = dir + fileName
let stream = fs.createReadStream(createdFile)
.pipe(unzipper.Extract({ path: outputPath }));
stream.on('finish', () => {
console.log('file unzipped');
resolve(outputPath);
});
});
}
function applyFilter(Name) {
fs.readdir(dir, 'utf-8', (err, data) => {
if (err) {
console.log(err)
} else {
data.filter(file => file.includes("png")).forEach(file => {
let greyPNG = (__dirname + '/' + Name + '/' + file)
console.log (greyPNG)
fs.createReadStream(greyPNG)
.pipe(new PNG({
colorType: 0,
}))
.on('parsed', function () {
this.pack().pipe(fs.createWriteStream(greyPNG));
});
})
}
})
}
async function unzipAndApplyFilter(zipFile, outputPath) {
await unzipFile(zipFile, outputPath); // Wait until unzip is complete.
applyFilter(outputPath);
}
unzipAndApplyFilter('myfile.zip', 'myfile');
I have written one POST endpoint in expressJS with node.when I a make call to API It runs a utility with setInterval() and I want to send the API response after utility executes clearInterval().
How I can I wait and send response after utility execution is finished?
Please see the code below
REST API code:
const router= express.Router();
const multer= require('multer');
const {readCSVFile}= require('../util/index');
var storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, 'uploads');
},
filename: (req, file, cb) => {
cb(null, file.fieldname + '-' + Date.now()+'.xlsx');
}
});
var upload = multer({storage: storage});
router.post('/fileUpload', upload.single('filename'), async (req, res) => {
readCSVFile();
res.status(201).json({id:1});
});
router.get('/',(req,res)=>{
res.sendFile(__dirname+'/index.html');
});
module.exports=router;
Utilty Code
const config = require('config')
const excelToJson = require('convert-excel-to-json')
const HttpsProxyAgent = require('https-proxy-agent')
const AWS = require('aws-sdk')
const json2xls = require('json2xls')
const fs = require('fs')
const awsConfig = {
httpOptions: {
agent: new HttpsProxyAgent(
config.get('aws.proxy')
),
}
}
AWS.config.credentials = new AWS.SharedIniFileCredentials({
profile: config.get('aws.profile'),
})
AWS.config.update(awsConfig)
let uuidv4 = require('uuid/v4')
let csv = [];
const lexRunTime = new AWS.LexRuntime({
region: config.get('aws.region'),
})
let refreshId
const readCSVFile = () => {
const csvSheet = excelToJson({
sourceFile: './Test.xlsx',
})
csvSheet.Sheet1.forEach(element => {
csv.push((element.A.slice(0, element.A.length)))
})
runTask()
refreshId = setInterval(runTask, 1000)
}
let botParams = {
botAlias: config.get('bot.alias'),
botName: config.get('bot.name'),
sessionAttributes: {},
}
const missedUtterancesArray = []
const matchedUtterancesArray = []
let start = 0
let end = 50
let count = 50
const runTask = () => {
let itemsProcessed = 0
console.log('executing...')
const arrayChunks = csv.slice(start, end)
arrayChunks.forEach((element) => {
botParams.inputText = element
botParams.userId = `${uuidv4()}`
lexRunTime.postText(botParams, function (err, data) {
itemsProcessed++
if (err) console.log(err, err.stack)
else {
if (data.intentName === null) {
missedUtterancesArray.push({
Utterance: element,
})
}
else{
matchedUtterancesArray.push({
Utterance: element,
})
}
}
if (itemsProcessed === arrayChunks.length) {
start = csv.indexOf(csv[end])
end = start + count
}
if (start === -1) {
let xls = json2xls(missedUtterancesArray)
fs.writeFileSync('./MissedUtterances.xlsx', xls, 'binary')
let matchedXls = json2xls(matchedUtterancesArray)
fs.writeFileSync('./MatchedUtterances.xlsx', matchedXls, 'binary')
console.log('File saved successfully!! ')
console.log('Total Matched utterances count: ',csv.length-missedUtterancesArray.length)
console.log('Total Missed utterances count: ',missedUtterancesArray.length)
console.log('Total Utterances count: ',csv.length)
clearInterval(refreshId)
}
})
})
}
I would have needed few more information to answer this but pardon my try if this does not work -
the setInterval method in the readCSVFile the reason. Being an asynchronous function, this will not stop the code progression.
lexRunTime.postText also looks like asynchronous. I think you'd be better off with using promises while responding to the client.
Problem
I am trying to create an app with react native and firebase. One of the features I would like for this app is the ability to upload images. I am having some trouble uploading the images to firebase storage though. I am using expo's image picker to find the path of the image that the user wants to upload, but once I have the path I don't know how to convert that to something I can upload to firebase.
Can somebody help me convert the path of an image to something I can upload to firebase storage with react native?
What I've tried
I tried using:
_pickImage = async () => {
let result = await ImagePicker.launchImageLibraryAsync({
MediaTypeOptions: 'Images',
quality: 0.4,
_uploadAsByteArray = async (pickerResultAsByteArray, progressCallback) => {
try {
var metadata = {
contentType: 'image/jpeg',
};
var storageRef = firebase.storage().ref();
var ref = storageRef.child('images/'+expoID+'/'+this.state.time)
let uploadTask = ref.put(pickerResultAsByteArray, metadata)
uploadTask.on('state_changed', function (snapshot) {
progressCallback && progressCallback(snapshot.bytesTransferred / snapshot.totalBytes)
var progress = (snapshot.bytesTransferred / snapshot.totalBytes) * 100;
console.log('Upload is ' + progress + '% done');
}, function (error) {
console.log("in _uploadAsByteArray ", error)
}, function () {
var downloadURL = uploadTask.snapshot.downloadURL;
console.log("_uploadAsByteArray ", uploadTask.snapshot.downloadURL)
this.setState({imageUploaded:true})
});
} catch (ee) {
console.log("when trying to load _uploadAsByteArray ", ee)
}
}
convertToByteArray = (input) => {
var binary_string = this.atob(input);
var len = binary_string.length;
var bytes = new Uint8Array(len);
for (var i = 0; i < len; i++) {
bytes[i] = binary_string.charCodeAt(i);
}
return bytes
}
atob = (input) => {
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=';
let str = input.replace(/=+$/, '');
let output = '';
if (str.length % 4 == 1) {
throw new Error("'atob' failed: The string to be decoded is not correctly encoded.");
}
for (let bc = 0, bs = 0, buffer, i = 0;
buffer = str.charAt(i++);
~buffer && (bs = bc % 4 ? bs * 64 + buffer : buffer,
bc++ % 4) ? output += String.fromCharCode(255 & bs >> (-2 * bc & 6)) : 0
) {
buffer = chars.indexOf(buffer);
}
return output;
}
uploadImage(bsfdata){
this.setState({imageUploaded:false})
this._uploadAsByteArray(this.convertToByteArray(bsfdata), (progress) => {
this.setState({ progress:progress })
})
}
base64:true,
});
/* if (!result.cancelled) {
this.setState({ image: result.uri });
let formData = new FormData();
formData.append('photo', {
uri,
name: `photo.${fileType}`,
type: `image/${fileType}`,
});}*/
this.uploadImage(result.base64);
};
}
I've tried it with the commented code added, which doesn't upload anything, and I've tried it with how the code is now, which gives me the error Can currently only create a Blob from other Blobs, and the uploading progress never gets above 0%.
If you are using expo (>=26), then you can do it easily with the following lines of code.
uploadImage = async(imageUri) => {
const response = await fetch(imageUri);
const blob = await response.blob();
var ref = firebase.storage().ref().child("image.jpg");
return ref.put(blob);
}
Reference: https://youtu.be/KkZckepfm2Q
Refer this link - https://github.com/dailydrip/react-native-firebase-storage/blob/master/src/App.js#L43-L69
Following block of code is working fine.
uploadImage(uri, mime = 'application/octet-stream') {
return new Promise((resolve, reject) => {
const uploadUri = Platform.OS === 'ios' ? uri.replace('file://', '') : uri
let uploadBlob = null
const imageRef = FirebaseClient.storage().ref('images').child('image_001')
fs.readFile(uploadUri, 'base64')
.then((data) => {
return Blob.build(data, { type: `${mime};BASE64` })
})
.then((blob) => {
uploadBlob = blob
return imageRef.put(blob, { contentType: mime })
})
.then(() => {
uploadBlob.close()
return imageRef.getDownloadURL()
})
.then((url) => {
resolve(url)
})
.catch((error) => {
reject(error)
})
})
}
You need to install rn-fetch-blob module:
npm install --save rn-fetch-blob
Then, do the following:
import RNFetchBlob from 'rn-fetch-blob';
const Blob = RNFetchBlob.polyfill.Blob;
const fs = RNFetchBlob.fs;
window.XMLHttpRequest = RNFetchBlob.polyfill.XMLHttpRequest;
window.Blob = Blob;
function uploadImage(path) {
const imageFile = RNFetchBlob.wrap(path);
// 'path/to/image' is where you wish to put your image in
// the database, if you would like to put it in the folder
// 'subfolder' inside 'mainFolder' and name it 'myImage', just
// replace it with 'mainFolder/subfolder/myImage'
const ref = firebase.storage().ref('path/to/image');
var uploadBlob = null;
Blob.build(imageFile, { type: 'image/jpg;' })
.then((imageBlob) => {
uploadBlob = imageBlob;
return ref.put(imageBlob, { contentType: 'image/jpg' });
})
.then(() => {
uploadBlob.close();
return ref.getDownloadURL();
})
.((url) => {
// do something with the url if you wish to
})
.catch(() => {
dispatch({
type: UPDATE_PROFILE_INFO_FAIL,
payload: 'Unable to upload profile picture, please try again'
});
});
}
Please do ask if there's any part of the code that you don't understand. To upload multiple images, simply wrap this code with a for loop. Or if you want to make sure that every image is uploaded without any error, use Promise
Not sure whom this might help, but if you're using MediaLibrary to load images from the gallery, then the uri comes in the format of uri = file:///storage/emulated/0/DCIM/Camera/filename.jpg
In this case, using fetch(uri) didn't help me get the blob.
But if you use fetch(uri.replace("file:///","file:/")) and then follow #sriteja Sugoor's answer, you'll be able to upload the file blob.
const Blob = RNFetchBlob.polyfill.Blob;
const fs = RNFetchBlob.fs;
let uploadBlob;
await fs
.readFile(params?.file.path, 'base64')
.then((data) => {
return Blob.build(data, {type: `BASE64`});
})
.then((blob) => {
uploadBlob = blob;
console.log(uploadBlob, 'uploadBlob');
});
I have 9577 unique records in a csv file.
This code inserts 9800 records and insert not all records, but duplicates of some of them. Any idea why it does not inserts the unique 9577 records and also duplicates of some of them? Below I also insert the remain part of the code so you get the whole picture
function bulkImportToMongo(arrayToImport, mongooseModel) {
const Model = require(`../../../models/${mongooseModel}`);
let batchCount = Math.ceil(arrayToImport.length / 100);
console.log(arrayToImport.length);
let ops = [];
for (let i = 0; i < batchCount; i++) {
// console.log(i);
let batch = arrayToImport.slice(i, i + 100);
console.log(batch.length);
ops.push(Model.insertMany(batch));
}
return ops;
return Promise.all(ops).then(results => {
// results is an array of results for each batch
console.log("results: ", results);
});
}
I parse the csv file
const Promise = require("bluebird");
const csv = require("fast-csv");
const path = require("path");
const fs = Promise.promisifyAll(require("fs"));
const promiseCSV = Promise.method((filePath, options) => {
return new Promise((resolve, reject) => {
var records = [];
csv
.fromPath(filePath, options)
.on("data", record => {
records.push(record);
})
.on("end", () => {
// console.log(records);
resolve(records);
});
});
});
And here is the script that connects it all together:
const path = require("path");
const promiseCSV = require("./helpers/ImportCSVFiles");
const {
connectToMongo,
bulkImportToMongo
} = require("./helpers/mongoOperations");
const filePath = path.join(__dirname, "../../data/parts.csv");
const options = {
delimiter: ";",
noheader: true,
headers: [
"facility",
"partNumber",
"partName",
"partDescription",
"netWeight",
"customsTariff"
]
};
connectToMongo("autoMDM");
promiseCSV(filePath, options).then(records => {
bulkImportToMongo(records, "parts.js");
});
//It looks like your issue is simply i++. Perhaps you meant i += 100?
for (let i = 0; i < batchCount; i+=100 /* NOT i++ */) {
//...
}
I solved it.
I hope this helps other... :-)
I had two errors, in the function promiseCSV (changed to parseCSV) and second I had bad logic in bulkImportToMongo.
Complete solution:
I parsed and imported 602.198 objects and here is how long time it took using node --max_old_space_size=8000 on a MacBook Pro with 8gb of ram.
console
➜ database git:(master) ✗ node --max_old_space_size=8000 partImport.js
Connected to db!
Time to parse file: : 5209.325ms
Disconnected from db!
Time to import parsed objects to db: : 153606.545ms
➜ database git:(master) ✗
parseCSV.js
const csv = require("fast-csv");
function promiseCSV(filePath, options) {
return new Promise((resolve, reject) => {
console.time("Time to parse file");
var records = [];
csv
.fromPath(filePath, options)
.on("data", record => {
records.push(record);
})
.on("end", () => {
console.timeEnd("Time to parse file");
resolve(records);
});
});
}
module.exports = promiseCSV;
mongodb.js
const mongoose = require("mongoose");
mongoose.Promise = global.Promise;
function connectToMongo(databaseName) {
mongoose.connect(`mongodb://localhost:27017/${databaseName}`, {
keepAlive: true,
reconnectTries: Number.MAX_VALUE,
useMongoClient: true
});
console.log("Connected to db!");
}
function disconnectFromMongo() {
mongoose.disconnect();
console.log("Disconnected from db!");
}
function bulkImportToMongo(arrayToImport, mongooseModel) {
const Model = require(`../../../models/${mongooseModel}`);
const batchSize = 100;
let batchCount = Math.ceil(arrayToImport.length / batchSize);
let recordsLeft = arrayToImport.length;
let ops = [];
let counter = 0;
for (let i = 0; i < batchCount; i++) {
let batch = arrayToImport.slice(counter, counter + batchSize);
counter += batchSize;
ops.push(Model.insertMany(batch));
}
return Promise.all(ops);
}
module.exports.bulkImportToMongo = bulkImportToMongo;
module.exports.connectToMongo = connectToMongo;
module.exports.disconnectFromMongo = disconnectFromMongo;
partImport.js
const path = require("path");
const parseCSV = require("./helpers/parseCSV");
const {
connectToMongo,
disconnectFromMongo,
bulkImportToMongo
} = require("./helpers/mongodb");
const filePath = path.join(__dirname, "../../data/parts.csv");
const options = {
delimiter: ";",
noheader: true,
headers: [
"facility",
"partNumber",
"partName",
"partDescription",
"netWeight",
"customsTariff"
]
};
connectToMongo("autoMDM");
parseCSV(filePath, options)
.then(records => {
console.time("Time to import parsed objects to db");
return bulkImportToMongo(records, "parts.js");
})
/* .then(result =>
console.log("Total batches inserted: ", result, result.length)
) */
.then(() => {
disconnectFromMongo();
console.timeEnd("Time to import parsed objects to db");
})
.catch(error => console.log(error));