How to implement a for-loop for an SFTP-Server - javascript

I am setting up an SFTP-Server with Nodejs with several functions
implemented. (Read, delete, download…)
The problem is, that the server only sees the content of the first
file in the directory he reads. Meaning while I can see all files
lined up with the help of my WinSCP Client, it only opens the
content of the first file. When I click on the second file, it shows
the content of the first again, although the file is named different
and so on. When I try to download the files, same happens; you can
download each file, but as soon as you open them there is the
content of the very first file in my directory. I know that it has
to do with the for-Loop I wasn't able to implement for a while now.
I hope you guys can help with some experience/ expertise.
I used dummy user names and Paths for this website.
session.on("readdir", function(files, responder) {
var i = 0;
responder.on("dir", function() {
**if (results[i])** {
console.warn(now + " Returning file: " + results[i]);
responder.file(results[i]);
**return i++;**
} else {
return responder.end();
}
});
return responder.on("end", function() {
return console.warn(now + " Directory has been read");
});
});
session.on('stat', function(path, statkind, statresponder) {
statresponder.is_file();
statresponder.permissions = 0o777;
statresponder.uid = 1;
statresponder.gid = 1;
statresponder.size = 1234;
});
session.on("readfile", function(path, writestream) {
var i = 0;
**if(results[i])** {
console.log(now + " Reading file: " **+ results[i]);**
return fs.createReadStream("KKK_Files/"+results[i]).pipe(writestream);
}
else {
console.warn(now + " no file to read");
}
})
session.on("delete", function(path, writestream) {
session.sftp(function(err, sftp) {
if (err) throw err;
var i = 0;
fs.unlink("KKK_Files/" **+ results[i],** function(err){
if ( err ) {
console.log( "Error, problem starting SFTP: %s", err );
}
else
{
console.log(now + " all Files have been deleted");
}
});
});
session.on("download", function(path, fastGet) {
var moveTo = "C:/Users/User/Downloads/file.DAT";
if (err) throw err;
var i = 0;
fs.fastGet("KKK_Files/" +
**results[i],**moveTo,function
(err) {if ( err ) {
console.log("Error downloading the file", err);
}
else
{
console.log(now + "Successfully downloaded");
}
});
});
files.forEach(files => {
results.push(files);
});
});
});
});
for**(i=0; i<results.length; i++)** {
console.log(now + " Reading file: " + results[i]);
return fs.createReadStream("KKK_Files/" + results[i]).pipe(writestream);
}if (err) throw err;

You are using local variable i for index variable on your results. This local variable has value 0 each time the event occurs.
// problem with your variable name, change variable names and try to understand scope.
var i = 0;
You may want to use index variable outside of the the event listener. Try with one event at a time.

Related

How can I add a setTimeOut() to run a function and insert data multiple times?

A few days ago I did a project and I had some problems, which where solved in this question, let me try to resume it.
I need to insert multiple objects into a DB in SQLServer, for that, I did a function that loops another function, which opens a connection, inserts and closes the connection, then, repeats it over and over again.
It worked fine, till today that was tested in a collegue PC, in the server of the job, I get this error:
Error: Requests can only be made in the LoggedIn state, not the LoggedInSendingInitialSql state
Error: Requests can only be made in the LoggedIn state, not the SentLogin7WithStandardLogin state
Here's the code we tested (the same in my last question), it works in my PC, but not in the other:
var config = {
...
};
function insertOffice(index) {
var connection = new Connection(config);
connection.on("connect", function (err) {
console.log("Successful connection");
});
connection.connect();
let url = `https://api.openweathermap.org/data/2.5/weather?lat=${offices[index].latjson}&lon=${offices[index].lonjson}&appid=${api_key}&units=metric&lang=sp`;
fetch(url)
.then((response) => { return response.json(); })
.then(function (data) {
var myObject = {
Id_Oficina: offices[index].IdOficina,
...
};
const request = new Request(
"EXEC USP_BI_CSL_insert_reg_RegistroTemperaturaXidOdicina #IdOficina, ...",
function (err) {
if (err) {
console.log("Couldnt insert data (" + index + "), " + err);
} else {
console.log("Data with ID: " + myObject.Id_Oficina +" inserted succesfully(" + index + ").")
}
}
);
request.addParameter("IdOficina", TYPES.SmallInt, myObject.Id_Oficina);
...
request.on("row", function (columns) {
columns.forEach(function (column) {
if (column.value === null) {
console.log("NULL");
} else {
console.log("Product id of inserted item is " + column.value);
}
});
});
request.on("requestCompleted", function () {
connection.close();
});
connection.execSql(request);
});
}
function functionLooper() {
for (let i = 0; i < offices.length; i++) {
let response = insertOffice(i);
}
}
functionLooper();
So, I thought it would be a good idea to use a setTimeOut, to:
Run functionLooper().
Open connection, insert and close.
Wait a few seconds.
Repeat.
So, I changed to this:
setTimeout(functionLooper, 2000);
function functionLooper() {
for (let i = 0; i < offices.length; i++) {
let response = insertOffice(i);
}
}
It works, but, as you can see, only waits when I first run it, so tried to make a function that runs setTimeout(functionLooper, 2000); like functionLooper() does, but it didn't work either.
function TimerLooper() {
for (let i = 0; i < offices.length; i++) {
setTimeout(functionLooper, 500);
}
}
function functionLooper() {
for (let i = 0; i < offices.length; i++) {
let response = insertOffice(i);
}
}
TimerLooper();
This shows me this error:
Error: Validation failed for parameter 'Descripcion'. No collation was set by the server for the current connection.
file:///...:/.../.../node_modules/node-fetch/src/index.js:95
reject(new FetchError(request to ${request.url} failed, reason: ${error.message}, 'system', error));
^ FetchError: request to https://api.openweathermap.org/data/2.5/weather?lat=XX&lon=XX&appid=XX&units=metric&lang=sp failed, reason: connect ETIMEDOUT X.X.X.X:X
So, I have some questions
How can I use properly setTimeOut? I did this function based on what I watch here in SO, but I just can't get it and I don't know what I'm doing wrong.
Why it works in my PC and the other don't? Do we have to change some kind of config or something?
Using setTimeOut, is the correct way to solve this problem? if not, what would you suggest me?
Could you do something like:
//edit: not disconnect but end
connection.on("end", function(){
functionLopper(index++)
})
function functionLooper(i) {
if(i<offices.length) insertOffice(i)
}
Edit: according to tidious doc
There is an end event emitted on connection.close()
Event: 'end'
function () { }
The connection has ended. This may be as a result of the client calling close(), the server closing the connection, or a network error.
My suggestion from above
var config = {
...
};
function insertOffice(index) {
var connection = new Connection(config);
connection.on("connect", function (err) {
console.log("Successful connection");
});
connection.connect();
let url = `...`;
fetch(url)
.then((response) => { return response.json(); })
.then(function (data) {
...
});
connection.on("end", function(){
functionLopper(index++)
})
}
function functionLooper(i) {
if(i<offices.length) insertOffice(i)
}
``

error in writing text to a file using node js fs?

I have a folder which is in my public folder called errorlogs.I want to append text to a file in that folder without overwriting the file. However, for some reason, I am unable to do that using the fs.appendfile. Please can someone suggest my error. Here is what I tried.It actually creates a file called errorlogsapikey.txt outside my public folder which is not what I want
var filepath = __dirname+'/errorlogs';
ensureExists(__dirname + '/errorlogs', 0744, function(err) {
if (err){
} // handle folder creation error
else {
console.log("we are all good");
var data = "url:" +" "+url+" "+"urlstatus:"+" "+urlstatus+" "+"time:"+ " "+formatted;
fs.appendFile(filepath + apikey+'.txt', data,function(err){
if(err) throw err;
});
}
});
function ensureExists(path, mask, cb) {
if (typeof mask == 'function') { // allow the `mask` parameter to be optional
cb = mask;
mask = 0777;
}
fs.mkdir(path, mask, function(err) {
if (err) {
if (err.code == 'EEXIST') cb(null); // ignore the error if the folder already exists
else cb(err); // something else went wrong
} else cb(null); // successfully created folder
});
}

Express and Mongodb insert same data multiple times

I am quite new to Express and Mongodb. The project that I am working on requires me to:
Take an object that contains multiple url
Download the content of the url and save it to a cloud storage
Generate links for each of the file saved
Save these links into Mongodb as individual documents
The incoming object looks something like this:
{
"id" : 12345678,
"attachments" : [
{
"original_url" : "https://example.com/1.png",
},
{
"original_url" : "https://example.com/2.png",
},
{
"original_url" : "https://example.com/3.png",
}
]
}
the end goal is to have 3 separate document like this saved on mongodb:
{
"id" : 87654321,
"some_other_data": "etc",
"new_url" : "https://mycloudstorage.com/name_1.png"
}
I have a simple loop like this:
for(var i = 0; i < original_data.attachments.length; i++){
var new_url = "https://example.com/" + i + ".png";
download(original_url, new_url, function(new_url){
console.log(new_url)
new_data.new_url = new_url;
save_new_url_to_mongodb(new_data);
});
}
and the saving function looks like this:
function save_new_url_to_mongodb (data, cb) {
getCollection(collection, function (err, collection) {
if (err) {
return cb(err);
}
collection.insert(data, {w: 1, ordered: false}, function (err, result) {
if (err) {
return cb(err);
}
var item = fromMongo(result.ops);
cb(null, item);
});
});
}
var download = function(original_url, new_url, callback){
request.head(original_url, function(err, res, body){
if(res === undefined){
console.log(err);
} else {
var localUrlStream = request(original_url);
var file = bucket.file(new_url);
var remoteWriteStream = file.createWriteStream();
var stream = localUrlStream.pipe(remoteWriteStream);
stream.on('error', function (err) {
next(err);
});
stream.on('finish', function(){
callback(new_url);
});
}
});
};
The downloading part is fine, I get 3 different image files in my cloud storage. The console.log also gives me 3 different new urls.
The problem is that the newly saved mongodb document all have the same new_url. And sometimes if there are more original_url in the original data, some of the new documents would fail to save.
Thanks a lot
It's a scoping issue in your assignment of new_url in the for loop. See here: JavaScript closure inside loops – simple practical example
A solution is to use Array.Prototype.forEach which inherently solves the scope issue since each iteration creates a closure for the callback
original_data.attachments.forEach(function(i) {
var new_url = "https://example.com/" + i + ".png";
download(original_url, new_url, function(new_url){
console.log(new_url)
new_data.new_url = new_url;
save_new_url_to_mongodb(new_data);
});
})

async.waterfall inside a for loop escapes the for loop

On a Form Action of type POST, we fetch all the values in Node.JS/Express and try saving it into MongoDB.
A hidden field determines the length of a property from the frontend javascript and it's value is updated as the hidden field's value.
This length is used in the backend (Node) to iterate over a list of items.
I have a async.waterfall function and a for loop running inside it like this.
async.waterfall([
function(callback){
var itemLength = req.body.itemLength;
var itemProp,itemComponent;
var destination;
var destinationsArray =[];
for(var k=1; k<=itemLength; k++){
destination = new Destination({
name: req.body['destinationName'+k],
});
itemComponent = {
"itemCompProp" : req.body['itemCompProp'+k]
};
itemProp = new ItemProp({
itemComponent: itemComponent
});
itemProp.save(function(err,itemPropSaved){
destination.newProperty = itemPropSaved._id
destination.save(function(err,destinationSaved){
if(err){
console.log("Error== " + err);
}
else{
destinationsArray.push(destinationSaved._id);
}
});
});
}// End of For
callback(null,destinationsArray);
},
function(destinationsArray,callback){
var brand = new Brand({
name : req.body.brandName,
});
brand.save(function(err,brandSaved){
if(err){
console.log("Error== " + err);
}else{
console.log('Brand Saved');
}
});
callback(null);
}
], function (err, status) {
if(err){
req.flash('error', {
msg: 'Error Saving Brands'
});
console.log("Error : " + err);
}
else{
console.log("Brand Saved.");
req.flash('success', {
msg: 'Brand Successfully Added!'
});
}
});
res.redirect('/redirectSomewhere');
When we run this, The destinationsArray is returned first as null, as opposed to going through the for loop and then returning the proper value of destinationsArray over a length (itemLength) of destinations.
We want the process to be synchronous. We also tried using a closure wrapping the for Loop but to no avail.
We can't use a async.eachSeries instead of the for Loop as I am just iterating over a numeric property and we don't have any documents to iterate over
Any feasible solution to run a for Loop inside a async.waterfall?
Cheers and Thanks in Advance.
There are few problems with the code you have there:
where the callbacks got called.
where res.redirect() got call.
the for loop.
save() is asynchronous. Regular for loop will just continue without waiting for all save() calls to finish. That's why destinationsArray is empty. As you said, you cannot use async.eachSeries() since you're iterating through numeric property. However, you're on the right track there. Async.whilst() does just that. Here is the revised code with Async.whilst() and proper calling locations of the callbacks:
async.waterfall([
function(callback){
var itemLength = req.body.itemLength;
var itemProp,itemComponent;
var destination;
var destinationsArray =[];
var k = 1; // 1st part of for loop: for(k=1; k<=itemLength; k++)
async.whilst(
function() {
return k <= itemLength; // 2nd part of for loop: for(k=1; k<=itemLength; k++)
},
function(whilstCb) {
destination = new Destination({
name: req.body['destinationName'+k]
});
itemComponent = {
"itemCompProp" : req.body['itemCompProp'+k]
};
itemProp = new ItemProp({
itemComponent: itemComponent
});
itemProp.save(function(err,itemPropSaved){
destination.newProperty = itemPropSaved._id
destination.save(function(err,destinationSaved){
if(err){
console.log("Error== " + err);
} else {
destinationsArray.push(destinationSaved._id);
}
k++; // 3rd part of for loop: for(k=1; k<=itemLength; k++)
whilstCb(null);
});
});
},
function(err) {
// It gets here once the loop is done
console.log(destinationsArray); // This array should have all the values pushed
callback(null, destinationsArray);
}
);
},
function(destinationsArray,callback){
var brand = new Brand({
name : req.body.brandName
});
brand.save(function(err,brandSaved){
if(err){
console.log("Error== " + err);
} else {
console.log('Brand Saved');
}
callback(null);
});
}
], function (err, status) {
if(err){
req.flash('error', {
msg: 'Error Saving Brands'
});
console.log("Error : " + err);
} else {
console.log("Brand Saved.");
req.flash('success', {
msg: 'Brand Successfully Added!'
});
}
res.redirect('/redirectSomewhere');
});
Its not so much the for loop that is causing you problems but that save is an asynchronous operation. The for loop completes and the callback is executed before any of the save callbacks have had chance to complete.
What you want to do is call the async.waterfall callback after all the destination save callbacks have been executed. Something like:
destination.save(function(err,destinationSaved){
if(err){
console.log("Error== " + err);
} else {
destinationsArray.push(destinationSaved._id);
if (k === itemLength) {
// all destination callbacks have completed successfully
callback(null, destinationsArray);
}
}
});
The issue has to do with callback(null, destinationsArray); getting called outside the for loop without checking first to see the loop has been finished.
Try replacing callback(null, destinationsArray); with something like this:
if (itemLength > 0 && destinationsArray.length === k - 1) {
callback(null, destinationsArray);
} else {
callback(true);
}
The above checks to make sure the destination.save() gets completed the proper number of times successfully.
I actually prefer the method proposed by djskinner. However, because of the console.log() that occurs when there is a save() error, the callbacked destinationsArray could possibly hold the incorrect number of items. To fix this, you could make sure to replace the console.log("Error== " + err); with something like callback(err) to end the waterfall with the error returned. In addition, the k === itemLength check doesn't properly account for the correct number of items that should be saved. This should be replaced with k === destinationsArray.length.
I made modifications to fix this and posted an updated version below.
destination.save(function(err, destinationSaved){
if (err) {
callback(err);
}
else {
destinationsArray.push(destinationSaved._id);
if (k === destinationsArray.length) {
callback(null, destinationsArray);
}
}
});
--EDIT-- I really like the solution that Ben posted using whilst(). This allows the creation of a loop where the iterations runs serially. For more info, view the npm page here.

Node.js: How to call a function again from its callback

I want to create a directory in server. But want to check if one exists already with the same name. If no directory exists then create one with the name provided. Otherwise append a random string to the provided name and recheck if one exists with the new name.
So far, I'm able to write a function that perform the initial check and create one if it doesn't exists. But no idea to run the check again if a directory exists.
var outputDir = __dirname + '/outputfiles/' + values.boxname;
function ensureExists(path, mask, cb) {
if (typeof mask == 'function') {
cb = mask;
mask = 484;
}
fs.mkdir(path, mask, function(err) {
if (err) {
cb(err);
} else cb(null); // successfully created folder
});
}
and calling the function
ensureExists(outputDir, 484, function(err) {
if (err) {
if (err.code == 'EEXIST') {
var outputDir = outputDir + '-' + Date.now();
// NEED Help here to call this function again to run the check again
return console.log("A Folder with same name already exists");
} else {
console.err(err);
};
} else {
console.log("Folder created");
}
});
So, In short I want to create directories in server with unique names..please help me to fix this problem..thanks
function callback(err) {
if (err) {
if (err.code == 'EEXIST') {
var outputDir = outputDir + '-' + Date.now();
// NEED Help here to call this function again to run the check again
ensureExists(outputDir, 484, callback); // Call again
return console.log("A Folder with same name already exists");
} else {
console.err(err);
};
} else {
console.log("Folder created");
}
}
ensureExists(outputDir, 484, callback); // Call first
Or you could merge the functionality into one:
function ensureExists(path, mask, cb) {
if (typeof mask == 'function') {
cb = mask;
mask = 484;
}
fs.mkdir(path, mask, function(err) {
if (err) {
if (err.code == 'EEXIST') {
var newpath = path + '-' + Date.now();
ensureExists(newpath, mask, cb); // rerun with new path
console.log("A Folder with same name already exists");
} else {
console.err(err);
};
} else cb(path); // successfully created folder
});
}

Categories

Resources