jsftp uploading crappy Files - javascript

today I've got a problem with jsftp.
When I'm running my script and upload.txt is empty,
jsftp is correctly uploading an empty file to the server.
When upload.txt is a utf-8 javascript file for example const x = "Hello World!";,
jsftp is uploading a file named .pureftpd-upload.5e35d750.15.79c5.1b3bbd87 with 0 bytes (= no data).
Here's my script:
var ftp = new jsftp({
host: "ftp.example.url",
port: 21,
user: user.name,
pass: user.pass
});
ftp.auth(user.name, user.pass, function(err){
if(err){
openDialog();
console.error(err);
throw err;
}
else{
ftp.put(path_datafile, '/directory/hello.js', function(err){
console.error(err);
throw err;
});
}
});
(There are no errors in the output)
For the people saying you need to read the file first:
var ftp = new jsftp({
host: "ftp.example.url",
port: 21,
user: user.name,
pass: user.pass
});
fs.readFile(path_datafile, 'utf8', function(err, buff){
if(err){
console.log(err);
}
else{
ftp.put(buff, '/directory/hello.js', function(err){
console.error(err);
throw err;
});
}
});
Didn't worked at all.
Then it is showing the error:
Local file doesn't exist
The error here is that the fs module is now converting the file
to utf-8 fs.readfile(file, 'utf8', action).
To solve that problem, only write fs.readfile(file, action),
so that the file is outputted in bytes.
The script will output false but work fine.
So Ragnar is right.
His answer is correct, when you aren't using utf8 encoding with fs.
Don't use it and it will work.
Ragnar:
https://stackoverflow.com/users/1052928/ragnar
His Answer:
https://stackoverflow.com/a/26939160/12470434

Related

How to retrieve a file using node.js fs readFile function without specifying the name?

I'm currently stuck trying to retrieve a file from file system in order to send it through api to the client. For my backend I'm using express js
I'm using fs library and currently I'm trying to do it with readFile function, but I want to do it without specifying the file name or just the file extension because it will depend from file file will be uploaded from client.
What I tried until now (unsuccessfully) is shown below:
router.get("/info/pic", async (req, res) => {
const file = await fs.readFile("./images/profile/me.*", (err, data) => {
if (err) {
console.log(err); // Error: ENOENT: no such file or directory, open './images/profile/me.*'
return;
}
console.log(data);
});
});
const file = await fs.readFile("./images/profile/*.*", (err, data) => {
if (err) {
console.log(err); // Error: ENOENT: no such file or directory, open './images/profile/*.*'
return;
}
console.log(data);
});
const file = await fs.readFile("./images/profile/*", (err, data) => {
if (err) {
console.log(err); // Error: ENOENT: no such file or directory, open './images/profile/*'
return;
}
console.log(data);
});
If I specify the file name everything works fine, like: fs.readFile("./images/profile/me.jpg". but as I said, I don't know for sure the right extension of that file.
Important info: In that directory there will be only one file!
Please help me!
Thank you in advance!
If there is only one file in the directory, the following loop will have only one iteration:
for await (const file of fs.opendirSync("./images/profile")) {
var image = fs.readFileSync("./images/profile/" + file.name);
...
}
const fs = require('fs');
fs.readdir('./images/profile', function (err, files) {
//handling error
if (err) {
return console.log('err);
}
files.forEach(function (file) {
// Do whatever you want to do with the file
});
});

Copy file to new location

I'm trying to copy a file with the data inside a new file in a folder.
I tried doing this but it didn't work:
copyFile(`./data/guilddata/guilds/default/GUILDID.json`, `./data/guilddata/guilds/${guild.id}/GUILDID.json`, (err) => {
if (err) throw err;
});
https://www.npmjs.com/package/fs-copy-file
Does anyone know what to do? (${guild.id} just means the guild id, the folder is already there). I also get no errors. Thank you
const fs = require('fs');
fs.copyFile('./data/guilddata/guilds/default/GUILDID.json', './data/guilddata/guilds/' + guild.id + '/GUILDID.json', (err) => {
if (err) throw err;
console.log('All done! The file is copied!');
});

Nodejs - catch upload to ftp error(ftp module)

How can I catch an upload error in node js via ftp? For example when my computer loses the connection. I am using ftp module and nothing of this(error, close or end) throws any errors. Or may be there are any other solutions, i mean another npm module?
var Client = require('ftp');
var c = new Client();
c.on('ready', callback);
c.on('error', function(err){
console.log(err);
});
c.on('close', function(err){
console.log(err);
});
c.on('end', function(){
console.log("END");
});
c.connect({
host: host,
port: port,
user: username,
password: password
});
...
c.put(uploadfile, uploadpath, onupload);
I know that this is not the best idea to edit the library, but i solved my problem by adding this code
if(err.indexOf("ECONNRESET") > -1){
this._reset();
}
after
sock.once('error', function(err) {
self.emit('error', err);
on 965 line in connection.js of ftp library.

Check what files are present in remote directory with grunt

I'm looking for a way to check which files are present in a remote directory i want to access via ssh or similar and write the filenames into an array.
So far I had no luck. unix rsync has an -n flag which can print every file which is present at the destinated location, but I don't get how to use the rsync-output in grunt.
Here's how you might do it via sftp with ssh2:
var SSH2 = require('ssh2');
var conn = new SSH2();
conn.on('ready', function() {
conn.sftp(function(err, sftp) {
if (err) throw err;
sftp.readdir('/tmp', function(err, list) {
if (err) throw err;
console.dir(list);
conn.end();
});
});
}).connect({
host: '192.168.100.100',
port: 22,
username: 'frylock',
// password: 'foobarbaz',
privateKey: require('fs').readFileSync('/here/is/my/key')
});

NodeJS: Uploading a remote file to S3 with request and knox

I'm trying to devise a way to upload a file from a url to s3 using request and knox. Currently, my code looks like this:
request(item.productImage, function(err, res, body) {
if (!err && res.statusCode == 200) {
fs.writeFile('/tmp/' + filename, body, 'base64', function(err, data){
if (err) {
return console.log(err);
}
client.putFile('/tmp/' + filename, '/item/' + item._id + '/' + filename, function(err, res) {
if (err) {
return console.log(err);
}
});
});
}
});
This doesn't work as it downloads about 652 bytes of a 4kb file before it stops. Strangely, if I don't provide a callback to fs.writeFile() it downloads the entire 4kb locally.
What's the best way of accomplishing this?
There are a number of questions about this here on Stackoverflow, but I can't seem to find one that answers your question. The solution below should work, however, I'm having trouble getting knox to work at all on my machine right now. I hope you will be more lucky!
UPDATE: I seem to have had some problems with s3 here, the code below works -- I did change one thing, you need to specify encoding as null to request, so you get a Buffer back. Otherwise, binary data won't work so well.
request(item.productImage, {encoding: null}, function(err, res, body) {
if(!err && res.statusCode == 200) {
var req = client.put('/item/' + item._id + '/' + filename, {
'Content-Type': res.headers['content-type'],
'Content-Length': res.headers['content-length']
});
req.on('response', function(res) {
console.log('response from s3, status:', res.statusCode, 'url:', req.url);
});
req.on('error', function(err) {
console.error('Error uploading to s3:', err);
});
req.end(body);
}
});
Note: With this solution, you avoid having to buffer the files to disk - that's why I chose to use the lower level put method of the knox client.

Categories

Resources