NodeJS: Uploading a remote file to S3 with request and knox - javascript

I'm trying to devise a way to upload a file from a url to s3 using request and knox. Currently, my code looks like this:
request(item.productImage, function(err, res, body) {
if (!err && res.statusCode == 200) {
fs.writeFile('/tmp/' + filename, body, 'base64', function(err, data){
if (err) {
return console.log(err);
}
client.putFile('/tmp/' + filename, '/item/' + item._id + '/' + filename, function(err, res) {
if (err) {
return console.log(err);
}
});
});
}
});
This doesn't work as it downloads about 652 bytes of a 4kb file before it stops. Strangely, if I don't provide a callback to fs.writeFile() it downloads the entire 4kb locally.
What's the best way of accomplishing this?

There are a number of questions about this here on Stackoverflow, but I can't seem to find one that answers your question. The solution below should work, however, I'm having trouble getting knox to work at all on my machine right now. I hope you will be more lucky!
UPDATE: I seem to have had some problems with s3 here, the code below works -- I did change one thing, you need to specify encoding as null to request, so you get a Buffer back. Otherwise, binary data won't work so well.
request(item.productImage, {encoding: null}, function(err, res, body) {
if(!err && res.statusCode == 200) {
var req = client.put('/item/' + item._id + '/' + filename, {
'Content-Type': res.headers['content-type'],
'Content-Length': res.headers['content-length']
});
req.on('response', function(res) {
console.log('response from s3, status:', res.statusCode, 'url:', req.url);
});
req.on('error', function(err) {
console.error('Error uploading to s3:', err);
});
req.end(body);
}
});
Note: With this solution, you avoid having to buffer the files to disk - that's why I chose to use the lower level put method of the knox client.

Related

What's the issue which fails me to download data from mongodb using GridFS?

I am trying to write an application which functionality includes storing files in MongoDB. I succeeded in uploading files there, with the GridFS library, but then I failed many times trying to get access to the data. I constantly get a response with internal server error while loading files metadata with this request:
router.get('/uploads', async (req,res) => {
try {
gfs.files.find().toArray( (err, files) => {
if(!files || files.length === 0) {
return res.status(404).json({message: 'No files exisits'})
}
})
res.status(200).json(files);
} catch (error) {
return res.status(500).json({ message: "Could not find files, please try again" });
}})
content-type: application/json; charset=utf-8
The other request for downloading a certain file data ruins my whole backend and I get this error:
Proxy error: Could not proxy request /api/user/getuser from localhost:3000 to http://localhost:4000/ (ECONNREFUSED).
After that none of my requests work properly on any page.
And that's a nodejs code of that request:
router.get('/uploads/:filename', async (req,res) => {
try {
gfs.files.findOne({filename: req.params.filename}, (err, file) => {
if(!file || file.length === 0) {
return res.status(404).json({message: 'No file exisits'})
}
})
res.status(200).json(file);
} catch (error) {
return res.status(500).json({ message: "Could not find files, please try again" });
}})
GridFs configurations are next:
const conn = mongoose.createConnection(config.get('mongoURI'));
conn.once('open', () => {
gfs = Grid(conn.db, mongoose.mongo);
gfs.collection('uploads');
})
const storage = new GridFsStorage({
url: config.get('mongoURI'),
file: (req, file) => {
return new Promise((resolve, reject) => {
crypto.randomBytes(15, (err, buf) => {
if (err) {
return reject(err);
}
const filename = buf.toString('hex') + path.extname(file.originalname);
const fileInfo = {
filename: filename,
bucketName: 'uploads'
};
resolve(fileInfo);
});
});
}
});
I assume that I missed something important in documentation, and I'm not even trying to download a whole image, I am stuck. Useful advices would be highly appreciated!
I found the issue which caused an error! While emulating my request I got this error:
[0] Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client
Therefore the problem was in that part of my code:
res.status(200).json(files);
Nodejs does not allow to set the header of status after the actual body of request was sent.
So all the fixes I had to do were:
res.status(200).json(files) to --> res.json(files);
I hope the solution would be useful for someone.

How to do delete folder or file through requests s3 in digitalocean space

I trying to handle a great service called Space in Digital Ocean. Here is a pretty nice explanation about uploading files, but there is only about GET request and nothing about DELETE. Maybe someone has got experience with S3 together with Digital Ocean Space on Node JS?
Using localhost server I found out in settings of Space that "Advanced CORS Options" you can provide "Origin" and only "Allowed Methods" GET.
But PUT, DELETE, POST, HEAD is disable.
Here is what I trying:
export default {
upload: (req, res) => {
const storage = multerS3({
s3,
bucket,
contentType: multerS3.AUTO_CONTENT_TYPE,
acl: 'public-read',
key: function(req, file, callback) {
const { email } = req.user;
callback(null, email + '/' + file.originalName);
},
});
const upload = multer({ storage }).array('upload', 3);
upload(req, res, err => {
if (err) {
return res.status(422).send({
errors: [{ title: 'Image Upload Error', detail: err.message }],
});
} else {
console.log('Success upload file');
}
res.end();
});
},
delete: (req, res) => {
const params = { Bucket: bucket, Key: 'some-folder-name-here' };
s3.deleteObjects(params, function(err, data) {
if (err) {
return res.send({ error: err });
}
res.send({ data });
});
}
}
Actually it should be great if it's gonna be possible to delete and upload files through my localhost using simple s3 api.
Thank you.
UPDATE:
Mistake was here: s3.deleteObjects, have to use s3.deleteObject, and yes, have to use in Key whole path without bucket name.
Perfect explain here. Thanks everyone.

How to write a program to download bunch of images from url and upload it to AWS S3 Server

Can someone please help me to write a program to download image files from url and upload same file to AWS S3 and delete the downloaded image.
var download = function(uri, filename, callback){
request.head(uri, function(err, res, body){
console.log('content-type:', res.headers['content-type']);
console.log('content-length:', res.headers['content-length']);
request(uri).pipe(fs.createWriteStream(filename)).on('close', callback);
});
};
download('https://www.google.com/images/srpr/logo3w.png', 'google.png', function(){
console.log('done');
fs.readFile('google.png', function (err, data) {
if (err) {
console.log("Read file failed: "+ err)
}
let params = {
Bucket: waftBucket,
Key: 'google.png',
Body: data,
ContentType: 'image/png',
ACL: 'public-read'
};
let s3 = new AWS.S3();
s3.putObject(params, function(err, data) {
if (err) {
return console.log('There was an error uploading image: ' + err.message);
}
console.log('Successfully Uploaded.');
fs.unlink(google.png);
});
});
});
The above code is working for 1 File, but it's not working for loops
var download = function(uri, filename, callback){
request.head(uri, function(err, res, body){
console.log('content-type:', res.headers['content-type']);
console.log('content-length:', res.headers['content-length']);
request(uri).pipe(fs.createWriteStream(filename)).on('close', callback);
});
};
var getFileName = function(url){
return url.split("/")[5];
}
var data = {
frags : [
{url: "https://fimgs.net/images/perfume/375x500.39678.jpg", fileName: getFileName("https://fimgs.net/images/perfume/375x500.39678.jpg")},
{url: "https://fimgs.net/images/perfume/375x500.4506.jpg", fileName: getFileName("https://fimgs.net/images/perfume/375x500.4506.jpg")},
{url: "https://fimgs.net/images/perfume/375x500.29601.jpg", fileName: getFileName("https://fimgs.net/images/perfume/375x500.29601.jpg")},
{url: "https://fimgs.net/images/perfume/375x500.32597.jpg", fileName: getFileName("https://fimgs.net/images/perfume/375x500.32597.jpg")}
]};
for(var i=0; i<data.frags.length; i++){
download(data.frags[i].url, data.frags[i].fileName, function(){
fs.readFile(data.frags[i].fileName, function (err, data) {
if (err) {
console.log("Read file failed: "+ err)
}
let params = {
Bucket: waftBucket,
Key: 'images/'+data.frags[i].fileName,
Body: data,
ContentType: 'image/jpeg',
ACL: 'public-read'
};
let s3 = new AWS.S3();
s3.putObject(params, function(err, data) {
if (err) {
return console.log('There was an error uploading image: ' + err.message);
}
console.log('Successfully Uploaded.');
fs.unlink(data.frags[i].fileName);
});
});
});
}
Is there any other way to do this? Is it possible to read the contents of URL and save it directly to AWS S3 bucket without downloading and deleting the file in temp folder?
Thanks in Advance.
Without knowing much about the exact use case, I would recommend checking out the AWS CLI to do this. It provides a sync option which is described as below:
Syncs directories and S3 prefixes. Recursively copies new and updated
files from the source directory to the destination. Only creates
folders in the destination if they contain one or more files.
So, you need to download your files to one temp folder, sync it with S3 using the CLI and then delete the folder (using rimraf or fs).
s3.putObject only supports single object upload per HTTP request.
As all functions are async using for loop will create a lot of mess.
So, either you can use promises to make multiple requests one after another or use a better solution as pointed out by #kmukkamala.
Install S3 tools on the server and run the following command to sync the entire directory.
s3cmd sync myserverDir s3://bucket/
You can schedule a corntab to run the script every 10-20 mins and then delete the folder or files.

Dropbox API - Force Direct download

I using the dropbox API to upload files into dropbox and then generate a shareable links and sending those to the users.
But the issue is I want to force download the file instead of preview via dropbox share link.
I know I can force download setting ?dl=1 at the end of the link but the shareable link generated by dropbox comes back with ?dl=0
Code:
request.put('https://api-content.dropbox.com/1/files_put/auto/reports/' + req.body.Name +'.pdf', {
headers: {
Authorization: 'TOKEN',
'Content-Type': 'application/pdf'
},
body: content
}, function optionalCallback(err, httpResponse, bodymsg) {
if (err) {
console.log(err);
}
else {
console.log("File uploaded to dropbox successfully!");
fs.unlink(temp_dir + 'report.pdf', function(err) {
if (err)
throw err;
else {
console.log("file deleted from server!");
}
});
request.post('https://api.dropboxapi.com/1/shares/auto/reports/' + req.body.Name + '.pdf'+ '?short_url=false?dl=1', {
headers: {
Authorization: 'TOKEN'
}
}, function optionalCallback(err, httpResponse, bodymsg) {
if (err) {
console.log(err);
}
else {
console.log('Shared link 2 ' + JSON.parse(httpResponse.body).url);
res.json(JSON.parse(httpResponse.body).url);
}
});
}
});
I am using the V1 of dropbox API which will be deprecated soon but for now I need to use it.
Using the dl=1 URL parameter is the correct way to force downloads on these links. You can find more information on this here:
https://www.dropbox.com/help/desktop-web/force-download
To do this properly, you should use an actual URL parser to parse the URL. Then, if it already has a dl parameter, set it to 1. If it doesn't, then add dl parameter set to 1.
You can take advantage of streams and just redirect download stream into client. Here is an example using my minimalistic dropbox-v2-api wrapper and hapijs route configuration:
{
path: '/getFile',
method: 'GET',
handler: (request, response) => {
dropbox({
resource: 'files/download',
parameters: {
path: '/dropbox/image.jpg'
}
}, (err, result) => {
//download completed
}).pipe(response); //piping file stream
}
}

Node.JS server side script to update JSON file

I have partially written a NODE.JS file to update the JSON file with data received from the client. The post works successfully. The Get command does not. I was wondering if there's a better way to do this? I have about 6 different callback options to write for. All different. I was wondering if there's a node.JS script already done that has all of the things I need. Or if there's a different language that would make it easier.
Here's the NODE:
var http = require('http');
var fs = require('fs');
http.createServer(function (req, res) {
console.log('Request received: ');
if (req.method == 'POST') {
req.on('data', function (chunk) {
fs.writeFile("comments-data.json", chunk, function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
})
});
res.end('{"msg": "success"}');
};
if (req.method == 'GET') {
req.on('data', function (chunk) {
fs.readFile('comments-data.json', 'utf8', function (err, data) {
if (err) throw err;
obj = JSON.parse(data);
return data;
});
});
res.end(data);
};
}).listen(8080, '127.0.0.1');
console.log('Server running at http://127.0.0.1:8080/');
Here's the AJAX call:
postComment: function(commentJSON, success, error) {
$.ajax({
type: 'post',
url: 'http://127.0.0.1:8080',
data: commentJSON,
success: function(comment) {
success(comment)
},
error: error
});
},
But there's an ajax call for all sorts of things with the jquery plugin that i'm using. I need to GET, POST, PUT, DELETE, and sometimes multiple within the call.
Here's a full list of all of the callbacks i'm using:
http://viima.github.io/jquery-comments/#link-3-6
Using express you can do this much easily.
const express = require('express');
const app = express.Router();
//POST Request
app.post('/',(req, res, next)=>{
fs.writeFile("comments-data.json", chunk, function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
res.json({'status': 'Success'})
})
})
//GET Request
app.get('/',(req, res, next)=>{
fs.readFile('comments-data.json', 'utf8', function (err, data) {
if (err) throw err;
obj = JSON.parse(data);
res.json({'status': 'Success', 'data':data})
});
})
As for your question regarding writing it in a different module. That is based on the pattern adopted by you. There are various nodejs patterns available eg. Controller based or classes based. It all depends on what you find comfortable.

Categories

Resources