Node FTP download files from one server and upload to another server - javascript

I've been looking around for a while now and this is the only resource I've found on the internet related to my problem. I'm trying to download files from one ftp server then upload them to another ftp server, one by one using promises and without having to save the files locally during the process.
First I'm calling client.List() recursively from ftp module to get an array of file paths I'll need to download from the source ftp server. This works fine.
getRecursively(client, path) {
var _this = this;
let downloadList = [];
let paths = [];
let promise = new Promise((resolve, reject) => {
client.list(path, function(err, list) {
async function loop() {
for (var i = 0; i < list.length; i++) {
if (list[i].type == 'd') {
let _list = await _this.getRecursively(client, path + '/' + list[i].name)
downloadList = downloadList.concat(_list);
} else {
if ( list[i].name.match(/\.(jpg|jpeg)$/i) ) {
downloadList.push({path: path, name: list[i].name});
}
}
}
console.log("One complete");
resolve(downloadList);
}
loop();
})
})
return promise;
}
Next, I'm looping through the list of file paths and sending off promises which are throttled using es6-promise-pool module, so right now its concurrency limit is set to 10.
This is what each promise looks like:
getAndInsert(file) {
let _this = this;
let promise = new Promise((resolve, reject) => {
let c = new Client();
c.on('ready', () => {
let d = new Client();
d.on('ready', () => {
c.get(file.path + '/' + file.name, function(err, stream) {
if (err) {console.log(err); console.log("FILE NAME: " + file.name)}
d.put(stream.pipe(passThrough()), '/images/' + file.name, function() {
_this.uploadCount += 1;
_this.uploadedImages.push(file.name)
console.log(_this.uploadCount + '/' + _this._list.length + " uploaded.")
c.end();
d.end();
resolve(true);
});
});
})
d.on('error', (err) => {
if (err) console.log(err);
_this.onCompleteCallback();
})
d.connect(destinationFTP);
})
c.on('error', (err) => {
if (err) console.log(err);
_this.onCompleteCallback();
})
c.connect(sourceFTP);
})
return promise;
}
Each promise makes its own connection to the source and destination ftp server. I'm using the stream module's Transform object as well when I call d.put(stream.pipe(passThrough()). Here is that function.
const passThrough = () => {
var passthrough = new Transform();
passthrough._transform = function(data, encoding, done) {
this.push(data);
done();
};
return passthrough;
}
Finally, here is the main code that fires off the promises.
*buildPromises(list) {
for (let i = 0; i < list.length; i++) {
yield this.getAndInsert(list[i]);
}
}
let iterator = _this.buildPromises(list);
var pool = new PromisePool(iterator, 10);
pool.start()
.then(function(){
console.log("Finished")
}).catch((err) => {
console.log(err);
console.log("error processing pool promise");
})
This will go through and build the list just fine, however when I send the promises off I'm getting the following error:
Error: write after end
at writeAfterEnd (_stream_writable.js:236:12)
at Transform.Writable.write (_stream_writable.js:287:5)
at Socket.ondata (_stream_readable.js:639:20)
at emitOne (events.js:116:13)
at Socket.emit (events.js:211:7)
at Socket.Readable.read (_stream_readable.js:475:10)
at flow (_stream_readable.js:846:34)
at Transform.<anonymous> (_stream_readable.js:707:7)
at emitNone (events.js:106:13)
It might make it through like 5 and then error out, and sometimes more, but it seems to be pretty consistent. I've also noticed sometimes I'll get similar errors saying that 'The file is already in use', but each file im uploading has a unique name. Any help is appreciated, and if you need any more info I'll do my best to provide more information. Thank you.

So I found a solution. In my getAndInsert() function was doing:
c.get(file.path + '/' + file.name, function(err, stream) {
if (err) {console.log(err); console.log("FILE NAME: " + file.name)}
d.put(stream.pipe(passThrough()), '/images/' + file.name, function() {
_this.uploadCount += 1;
_this.uploadedImages.push(file.name)
console.log(_this.uploadCount + '/' + _this._list.length + " uploaded.")
c.end();
d.end();
resolve(true);
});
});
The problem resided in stream.pipe(passThrough()). It seemed as if I was trying to write after the stream had already ended. This is what solved my problem:
let chunks = [];
stream.on('data', (chunk) => {
chunks.push(chunk);
})
stream.on('end', () => {
d.put(Buffer.concat(chunks), '/images/' + file.name, function() {
_this.uploadCount += 1;
_this.uploadedImages.push(file.name)
console.log(_this.uploadCount + '/' + _this._list.length + " uploaded.")
c.end();
d.end();
resolve(true);
});
})
When new data is available from the stream push to an array called chunks. When the stream is finished, call .put and pass in Buffer.concat(chunks)
Hope this helps anyone going through a similar problem.

Related

Promise chaining causing increased execution time?

I am create a simple NODE-JS function that Converts PDF to Image > Crops Image > Merge Them back with ImageMagick.
and this is the complete code i am using :
var os = require('os');
var fs = require('fs');
var path = require('path');
var gs = require('node-gs');
var sharp = require('sharp');
var areaMap = require('./areaMap');
const { performance } = require('perf_hooks');
var spawn = require('child_process').spawnSync;
var pExcep = 'someException';
var gsPath = 'Ghostscript/gs26';
var src = path.join(os.tmpdir(), '/');
var Files = {
file1: path.join(src, 'out1.jpeg'),
file2: path.join(src, 'out2.jpeg'),
OutImg: path.join(src, 'out.jpeg')
}
var crop = function (s, sFile) {
return new Promise((res, rej) => {
s = areaMap[s];
sharp(Files.OutImg).extract(s)
.toFile(sFile)
.then(()=> res())
.catch((err) => rej(err));
});
};
var getBaseCard = function (s) {
if (RegExp('^([0-9]{8})$').test(s)) { return 'SOMETHINGHERE' } else { return 'inception'; }
//This can be done on client side.
}
var GetCardType = function (base, sInfo) {
return new Promise((res, rej) => {
if (base === 'SOEMTHINGHERE') {
if (sInfo.includes('SOMETHINGHERE2')) {
if (sInfo.includes(pExcep)) {
res('PA_S_')
} else {
res('PA_S2')
}
} else {
res('PA_ST')
}
} else {
res('SA_')
}
})
}
var PdfToText = function (file, pass) {
return new Promise((res, rej) => {
gs()
.batch().safer().nopause().res(2).option('-dDEVICEWIDTHPOINTS=20').option('-dDEVICEHEIGHTPOINTS=20').option('-dFIXEDMEDIA').option('-sPDFPassword=' + pass).device('txtwrite').output('-').input(file).executablePath(gsPath)
.exec((err, stdout, stderr) => {
if (!err) {
res(stdout);
} else {
console.log(stdout);
console.log(err);
console.log(stderr);
}
})
});
}
var getBaseImage = function (file, pass, quality) {
return new Promise((res, rej) => {
gs()
.batch().nopause().safer().res(300 * quality).option('-dTextAlphaBits=4').option('-dGraphicsAlphaBits=4').option('-sPDFPassword=' + pass)
.executablePath(gsPath).device('jpeg').output(Files.OutImg).input(file)
.exec((err, stdout, stderr) => {
if (!err) { res(); } else { rej(stdout) };
})
})
}
exports.processCard = function (file, password, quality) {
return new Promise((resolve, reject) => {
getBaseImage(file, password, quality) // Convert PDF to Image
.then(() => {
PdfToText(file, password) // Extract Text from pdf
.then((res) => {
GetCardType(getBaseCard(password), res) // finally get PDF Type
.then((ct) => {
// crop image here using Sharp
Promise.all([
crop(ct + 'A_' + quality, Files.file1),
crop(ct + 'B_' + quality, Files.file2)])
.then(() => {
// Merge Above two image into one using ImageMagick convert
spawn('convert', [Files.file1, Files.file2, '+append', 'files/out1.jpg']);
fs.unlinkSync(Files.OutImg); // Unlink tmp folders
fs.unlinkSync(Files.file1);
fs.unlinkSync(Files.file2);
resolve(); // finally resolve
}).catch((err) => reject(err));
}).catch((err) => reject(err))
}).catch((err) => reject(err))
}).catch((err) => reject(err))
})
}
and now these are the problem i am facing:
1. ImageMagick isn't creating the output file.
2. fs.unlinksysnc throws ENOENT: no such file or directory, unlink '/tmp/out1.jpeg'
on average every second execution.
3. Using above code increases execution time.
For Example: getBaseImage should complete in 600ms but it takes 1400 using above code.
About speed in General it (The Complete Function not just getBaseImage) should finish in 1100-1500ms(*) on average but the time taken is ~2500ms.
*1100-1500ms time is achievable by using function chaining but that is hard to read and maintaine for me.
I am going to use this function in Firebase Functions.
How to properly chain these functions ?
EDIT
exports.processCard = function (file, password, quality) {
return new Promise((resolve, reject) => {
console.log(performance.now());
getBaseImage(file, password, quality) //Convert PDF TO IMAGE
.then(() => { return PdfToText(file, password) })
.then((res) => {return GetCardType(getBaseCard(password), res) })
.then((ct) => {
return Promise.all([
crop(ct + 'A_' + quality, Files.file1),
crop(ct + 'B_' + quality, Files.file2)])
})
.then(() => {
spawn('convert', [Files.file1, Files.file2, '+append', 'files/out1.jpg']);
fs.unlinkSync(Files.OutImg); // Unlink tmp folders
fs.unlinkSync(Files.file1);
fs.unlinkSync(Files.file2);
resolve();
})
.catch((err) => { console.log(err) });
Using above pattern didn't solved my issues here.
There's a good chance this weirdness is caused by using the file system. If I understand it correctly, the fs in cloud functions is in memory, so when you write to it, read from it, and remove from it, you're using more and less os memory. That can get weird if a function is called repeatedly and re uses the loaded module.
One thing to try to keep the state clean for each invocation is to put everything (including the requires) inside the scope of the handler. That way you instantiate everything freshly on each invocation.
Finally, you don't seem to be waiting for the spawned convert command to run, you'll need to wait for it to complete:
const convertProc = spawn('convert', [Files.file1, Files.file2, '+append', 'files/out1.jpg']);
convertProc.on('close', function() {
fs.unlinkSync(Files.OutImg); // Unlink tmp folders
fs.unlinkSync(Files.file1);
fs.unlinkSync(Files.file2);
resolve();
})
convertProc.on('close', function(error) {
reject(error);
});
Then you wait for it to complete before you resolve.

How to wait until a for loop finishes using async await in javascript?

My use case.
I upload 5 images to the s3 server in the browser and get that images uploaded urls.
Pass that urls to the back-end.
This is my async function
try{
await uploadImagesToS3(imagesArray);
await saveUrlsInBackend();
}catch(error){
}
In my uploadImagesToS3 function I'm trying to do something like this.
uploadImagesToS3(){
resolve(FORLOOP)
}
After for loop run 5 times I want to resolve it to the my main async function.
This my real uploadImagesToS3 function
onUpload(array, albumName) {
return new Promise((resolve, reject) => {
resolve(
for (let index = 0; index < array.length; index++) {
var files = document.getElementById(array[index]).files;
if (!files.length) {
return alert("Please choose a file to upload first.");
}
var file = files[0];
var fileName = file.name;
var albumPhotosKey = encodeURIComponent(albumName) + "//";
var photoKey = albumPhotosKey + fileName;
self;
s3.upload(
{
Key: photoKey,
Body: file,
ACL: "public-read"
},
(err, data) => {
if (err) {
return alert(
"There was an error uploading your photo: ",
err.message
);
}
// alert("Successfully uploaded photo.");
this.images[index].image_path = data.Location;
}
);
}
);
});
}
But it doesn't let me to use a for loop inside a resolve function.
How could I achieve this async await mechanisms?
"resolve(FORLOOP)" - no, that's not how it would work.
You should promisify the s3.upload method alone, into a function that just calls it and returns a promise for the result and nothing else:
function upload(value) {
return new Promise((resolve, reject) => {
s3.upload(value, (err, res) => {
if (err) reject(err);
else resolve(res);
});
});
}
Now you can use that in your method, either by chaining the promises together or by simply using async/await:
async onUpload(array, albumName) { /*
^^^^^ */
for (const id of array) {
const files = document.getElementById(id).files;
if (!files.length) {
alert("Please choose a file to upload first.");
return;
}
const file = files[0];
const albumPhotosKey = encodeURIComponent(albumName) + "//";
const photoKey = albumPhotosKey + file.name;
try {
const data = await upload({
// ^^^^^
Key: photoKey,
Body: file,
ACL: "public-read"
});
// alert("Successfully uploaded photo.");
this.images[index].image_path = data.Location;
} catch(err) {
alert("There was an error uploading your photo: ", err.message);
return;
}
}
}

Node.js handling successive asynchronous operations

I'm trying to write a asynchronous function to create some user directories with node.js.
I would like the callback to be executed with a response containing the status of the operations for later processing. But the object is not being logged from within the for loop and the second mkdir. Also the subdirectory name is logged as the same thing even though all the directories are created correctly?
I have been looking for a while as how to solve this i think its due to closures and needs an IIFE? just i am totally lost now on how to solve it. Can anyone help point me in the right direction please?
here is my code:
const fs = require('fs');
const path = require('path');
var showSettings = {
"userDirectories": ["shows", "uploads", "backups", "logs"],
"showsFolder": "shows"
};
var Files = function() {};
Files.prototype.makeUserDirectories = (username, callback) => {
let directory = (path.join(__dirname, "../users", username));
let response = {};
fs.mkdir(directory, err => {
if (err) {
response.status = "ERROR";
response.error = err;
console.log('failed to create directory', err);
} else {
console.log(`creating directory ${directory} succeeded`);
let subdirectory = "";
for (let i = 0; i < showSettings.userDirectories.length; i++) {
subdirectory = (path.join(__dirname, "../users", username, showSettings.userDirectories[i]));
fs.mkdir(subdirectory, err => {
if (err) {
response.status = "ERROR";
response.error = err;
console.log('error creating subdirectory', err);
} else {
response.status = "OK";
console.log(`creating directory ${subdirectory} succeeded`);
};
});
}
console.log(response);
}
if (callback && typeof(callback) === "function") {
console.log(response);
callback(response);
}
});
};
testFiles.makeUserDirectories("mr.test#somedomain.com", function(data) {
console.log("in callback function");
console.log(data);
});
My problem is that the returned response object to the callback is empty.
i think its something to do with the for loop and an IIFE but i am not entirely sure how to do this or if there is abetter way to achieve what i am trying to do?
Many thanks!
Your issue is that you're trying to execute your callback before your asynchronous operations have completed. Asynchronous operations can be very complicated, and there are many libraries to make things simpler, many based on the concept of Promises. These are objects that allow you to chain multiple operations together, with the cost of more overhead. I would highly recommend using Promises to produce a more intuitive function:
const fs = require('fs');
const path = require('path');
var showSettings = {
"userDirectories": ["shows", "uploads", "backups", "logs"],
"showsFolder": "shows"
};
var Files = function() {};
function mkdir(path) {
return new Promise((resolve, reject) => {
fs.mkdir(path, (err) => {
if(err) {
reject(err);
} else {
resolve("OK");
}
})
});
}
Files.prototype.makeUserDirectories = (username, callback) => {
let directory = (path.join(__dirname, "../users", username));
return mkdir(directory).then(() => {
console.log(`creating directory ${directory} succeeded`);
let subdirectory = "";
const operations = [];
for (let i = 0; i < showSettings.userDirectories.length; i++) {
subdirectory = (path.join(__dirname, "../users", username, showSettings.userDirectories[i]));
operations.push(mkdir(subdirectory));
}
return Promise.all(operations);
}).then((status) => { // this will not be called until all operations complete
console.log({status});
if (callback && typeof(callback) === "function") {
callback({status});
}
}).catch((error) => { // this will be called if an error is encountered at any point
callback({status: 'ERROR', error});
})
};
var testFiles = new Files();
testFiles.makeUserDirectories("mr.test#somedomain.com", function(data) {
console.log("in callback function");
console.log(data);
});
EDIT: Updated with a cleaner implementation.

s3.getObject().createReadStream() : How to catch the error?

I am trying to write a program to get a zip file from s3, unzip it, then upload it to S3.
But I found two exceptions that I can not catch.
1. StreamContentLengthMismatch: Stream content length mismatch. Received 980323883 of 5770104761 bytes. This occurs irregularly.
2. NoSuchKey: The specified key does not exist. This happens when I input the wrong key.
When these two exceptions occur, this program crashes.
I'd like to catch and handle these two exceptions correctly.
I want to prevent a crash.
const unzipUpload = () => {
return new Promise((resolve, reject) => {
let rStream = s3.getObject({Bucket: 'bucket', Key: 'hoge/hoge.zip'})
.createReadStream()
.pipe(unzip.Parse())
.on('entry', function (entry) {
if(entry.path.match(/__MACOSX/) == null){
// pause
if(currentFileCount - uploadedFileCount > 10) rStream.pause()
currentFileCount += 1
var fileName = entry.path;
let up = entry.pipe(uploadFromStream(s3,fileName))
up.on('uploaded', e => {
uploadedFileCount += 1
console.log(currentFileCount, uploadedFileCount)
//resume
if(currentFileCount - uploadedFileCount <= 10) rStream.resume()
if(uploadedFileCount === allFileCount) resolve()
entry.autodrain()
}).on('error', e => {
reject()
})
}
}).on('error', e => {
console.log("unzip error")
reject()
}).on('finish', e => {
allFileCount = currentFileCount
})
rStream.on('error', e=> {
console.log(e)
reject(e)
})
})
}
function uploadFromStream(s3,fileName) {
var pass = new stream.PassThrough();
var params = {Bucket: "bucket", Key: "hoge/unzip/" + fileName, Body: pass};
let request = s3.upload(params, function(err, data) {
if(err) pass.emit('error')
if(!err) pass.emit('uploaded')
})
request.on('httpUploadProgress', progress => {
console.log(progress)
})
return pass
}
This is the library I use when unzipping.
https://github.com/mhr3/unzip-stream
Help me!!
If you'd like to catch the NoSuchKey error thrown by createReadStream you have 2 options:
Check if key exists before reading it.
Catch error from stream
First:
s3.getObjectMetadata(key)
.promise()
.then(() => {
// This will not throw error anymore
s3.getObject().createReadStream();
})
.catch(error => {
if (error.statusCode === 404) {
// Catching NoSuchKey
}
});
The only case when you won't catch error if file was deleted in a split second, between parsing response from getObjectMetadata and running createReadStream
Second:
s3.getObject().createReadStream().on('error', error => {
// Catching NoSuchKey & StreamContentLengthMismatch
});
This is a more generic approach and will catch all other errors, like network problems.
You need to listen for the emitted error earlier. Your error handler is only looking for errors during the unzip part.
A simplified version of your script.
s3.getObject(params)
.createReadStream()
.on('error', (e) => {
// handle aws s3 error from createReadStream
})
.pipe(unzip)
.on('data', (data) => {
// retrieve data
})
.on('end', () => {
// stream has ended
})
.on('error', (e) => {
// handle error from unzip
});
This way, you do not need to make an additional call to AWS to find out if out if it exists.
You can listen to events (like error, data, finish) in the stream you are receiving back. Read more on events
function getObjectStream (filePath) {
return s3.getObject({
Bucket: bucket,
Key: filePath
}).createReadStream()
}
let readStream = getObjectStream('/path/to/file.zip')
readStream.on('error', function (error) {
// Handle your error here.
})
Tested for "No Key" error.
it('should not be able to get stream of unavailable object', function (done) {
let filePath = 'file_not_available.zip'
let readStream = s3.getObjectStream(filePath)
readStream.on('error', function (error) {
expect(error instanceof Error).to.equal(true)
expect(error.message).to.equal('The specified key does not exist.')
done()
})
})
Tested for success.
it('should be able to get stream of available object', function (done) {
let filePath = 'test.zip'
let receivedBytes = 0
let readStream = s3.getObjectStream(filePath)
readStream.on('error', function (error) {
expect(error).to.equal(undefined)
})
readStream.on('data', function (data) {
receivedBytes += data.length
})
readStream.on('finish', function () {
expect(receivedBytes).to.equal(3774)
done()
})
})
To prevent a crash, you need to asynchronously listen to the object's head metadata, where it does not return the whole object, which will take less time. Try this one!
isObjectErrorExists = async functions () => {
try {
const s3bucket = {
secret key: '',
client id: ''
}
const params = {
Bucket: 'your bucket name',
Key: 'path to object'
};
await s3bucket.headObject(params).promise(); // adding promise will let you add await to listen to process untill it completes.
return true;
} catch (err) {
return false; // headObject threw error.
}
throw new Error(err.message);
}
}
public yourFunction = async() => {
if (await this.isObjectErrorExists()) {
s3Bucket.getObject().createReadStream(); // works smoothly
}
}

Node JS for loop and array push

I have 1,211,434 IP addresses that needed to be converted into geolocations. I found an api that answers this question by using GET request. But the thing is, the when using a for loop, I can not send the ip address and receive the description correctly.
Majorly I have two questions:
I just can not output the ip_and_info array, and can't find the reason. Can anybody tell me what went wrong?
Now, the code I wrote can retrieve all the information that I need, there are around 200 ip addresses in the test_ip.txt. Would there be a potential problem if I try to send all those 1M IP addresses?
Is there anyone can give me some advice?
Much Appreciated.
My code is as below:
fs = require('fs')
async = require("async")
http = require('http')
ip_and_info = []
// getIPInfo("1.171.58.24")
fs.readFile("../test_ips.txt", "utf-8", (err, content, printArr) => {
content = content.split("\n")
async.each(content, (ip) => {
content = getIPInfo(ip)
// console.log(ip)
}, (err) => {
if (err) {
console.log(err)
} else {
console.log(ip_and_info)
}
})
// for (i in content) {
// ((ip) => {
// getIPInfo(ip)
// })(content[i])
// }
});
function getIPInfo(ipAddress) {
options = {
host: 'freegeoip.net',
path: '/csv/' + ipAddress
}
request = http.get(options, function(response) {
// console.log('STATUS: ' + response.statusCode)
// console.log('HEADERS: ' + JSON.stringify(response.headers))
// Buffer the body entirely for processing as a whole.
bodyChunks = []
response.on('data', function(chunk) {
bodyChunks.push(chunk)
}).on('end', function() {
body = Buffer.concat(bodyChunks)
content = body.toString('ascii')
ip_and_info.push(content)
console.log(content)
return content
})
})
request.on('error', function(e) {
console.log('ERROR: ' + e.message)
})
}
Much Appreciated!
The problem lies in this line
content = getIPInfo(ip)
getIPInfo should be an async function. One way of doing it would be to send a callback to the function and in the function return the output in the callback.
async.each(content, getIPInfo, (err) => {
if (err) {
console.log(err)
} else {
console.log(ip_and_info)
}
})
And in the getIPInfo function
function getIPInfo(ipAddress, callback) {
.....
.....
ip_and_info.push(content)
callback();
}
Also, instead of using async.each use async.eachSeries or async.eachLimit else it will try to send request for all 1,211,434 ips .
Use Promise.
Use the let and const keywords. Seriously, implicit global aren't fun.
Decide whether to use ' or " and stick with it, it is way more readable.
With Promise, no need for async or your ip_and_info variable.
'use strict';
const fs = require('fs'),
http = require('http');
fs.readFile('../test_ips.txt', 'utf-8', (err, content) => {
content = content.split('\n');
Promise.resolve().then(() => {
return getAllIPInfo(content);
}).then((ipsInfos) => {
console.log('Info:' + ipsInfos);
}).catch((error) => {
console.error('Error: ' + error);
});
});
function getAllIPInfo(ipsAddress) {
return new Promise((resolve, reject) => {
let ipsInfo = [];
ipsAddress.reduce((previous, current, index, ips) => {
return previous.then(() => {
return getIPInfo(ips[index]).then((content) => {
ipsInfo.push(content);
return Promise.resolve();
});
});
}, Promise.resolve()).then(() => {
resolve(ipsInfo);
}).catch((error) => {
reject(error);
});
});
}
function getIPInfo(ipAddress) {
return new Promise((resolve, reject) => {
let options = {
host: 'freegeoip.net',
path: '/csv/' + ipAddress
};
http.get(options, function(response) {
// console.log('STATUS: ' + response.statusCode)
// console.log('HEADERS: ' + JSON.stringify(response.headers))
// Buffer the body entirely for processing as a whole.
let bodyChunks = [];
response.on('data', function(chunk) {
bodyChunks.push(chunk);
}).on('end', function() {
let body = Buffer.concat(bodyChunks),
content = body.toString('ascii');
resolve(content);
});
}).on('error', function(e) {
console.log('ERROR: ' + e.message);
reject(e);
});
});
}
I think your problem might be that you are re-declaring the 'content' variable each loop you make.
So perhaps change the loop to this so you don't reset the variable each time the loop executes. I hope that fixes your issue:
IPList = content.split("\n")
async.each(IPList, (ip) => {
IPGeoLocation = getIPInfo(ip)
console.log(IPGeoLocation)
}, (err) => {
As for doing this with a million IPs, I cant see major problem as long as you have a decent amount of memory on your computer. You might like to add a 'wait' call so you don't hammer the server so consistently. They might block you!
I would wait 1 second between each call by adding
sleep(1000);
after getting the IP.

Categories

Resources