Download file from Amazon S3 using REST API - javascript

I have my own REST API to call in order to download a file. (At the end, the file could be store in different kind of server... Amazon s3, locally etc...)
To get a file from s3, I should use this method:
var url = s3.getSignedUrl('getObject', params);
This will give me a downloadable link to call.
Now, my question is, how can I use my own rest API to download a file when it comes from that link? Is there a way to redirect the call?
I'm using Hapi for my REST server.
{
method: "GET", path: "/downloadFile",
config: {auth: false},
handler: function (request, reply) {
// TODO
reply({})
}
},

Instead of using a redirect to download the desired file, just return back an unbufferedStream instead from S3. An unbufferedStream can be returned from the HttpResponse within the AWS-SDK. This means there is no need to download the file from S3, then read it in, and then have the requester download the file.
FYI I use this getObject() approach with Express and have never used Hapi, however I think that I'm pretty close with the route definition but hopefully it will capture the essence of what I'm trying to achieve.
Hapi.js route
const getObject = require('./getObject');
{
method: "GET", path: "/downloadFile",
config: {auth: false},
handler: function (request, reply) {
let key = ''; // get key from request
let bucket = ''; // get bucket from request
return getObject(bucket, key)
.then((response) => {
reply.statusCode(response.statusCode);
response.headers.forEach((header) => {
reply.header(header, response.headers[header]);
});
return reply(response.readStream);
})
.catch((err) => {
// handle err
reply.statusCode(500);
return reply('error');
});
}
},
getObject.js
const AWS = require('aws-sdk');
const S3 = new AWS.S3(<your-S3-config>);
module.exports = function getObject(bucket, key) {
return new Promise((resolve, reject) => {
// Get the file from the bucket
S3.getObject({
Bucket: bucket,
Key: key
})
.on('error', (err) => {
return reject(err);
})
.on('httpHeaders', (statusCode, headers, response) => {
// If the Key was found inside Bucket, prepare a response object
if (statusCode === 200) {
let responseObject = {
statusCode: statusCode,
headers: {
'Content-Disposition': 'attachment; filename=' + key
}
};
if (headers['content-type'])
responseObject.headers['Content-Type'] = headers['content-type'];
if (headers['content-length'])
responseObject.headers['Content-Length'] = headers['content-length'];
responseObject.readStream = response.httpResponse.createUnbufferedStream();
return resolve(responseObject);
}
})
.send();
});
}

Return a HTTP 303 Redirect with the Location header set to the blob's public URL in the S3 bucket.
If your bucket is private then you need to proxy the request instead of performing a redirect, unless your clients also have access to the bucket.

Related

Upload file to google drive after http get request

I have two functions in separate files to split up the workflow.
const download = function(url){
const file = fs.createWriteStream("./test.png");
const request = https.get(url, function(response) {
response.pipe(file);
});
}
This function in my fileHelper.js is supposed to take a URL with an image in it and then save it locally to test.png
function uploadFile(filePath) {
fs.readFile('credentials.json', (err, content) => {
if (err) return console.log('Error loading client secret file:', err);
// Authorize a client with credentials, then call the Google Drive API.
authorize(JSON.parse(content), function (auth) {
const drive = google.drive({version: 'v3', auth});
const fileMetadata = {
'name': 'testphoto.png'
};
const media = {
mimeType: 'image/png',
body: fs.createReadStream(filePath)
};
drive.files.create({
resource: fileMetadata,
media: media,
fields: 'id'
}, (err, file) => {
if (err) {
// Handle error
console.error(err);
} else {
console.log('File Id: ', file.id);
}
});
});
});
}
This function in my googleDriveHelper.js is supposed to take the filePath of call and then upload that stream into my google drive. These two functions work on their own but it seems that the https.get works asynchronously and if I try to call the googleDriveHelper.uploadFile(filePath) function after the download, it doesn't have time to get the full file to upload so instead a blank file will be uploaded to my drive.
I want to find a way so that when the fileHelper.download(url) is called, it automatically uploads into my drive.
I also don't know if there is a way to create a readStream directly from the download function to the upload function, so I can avoid having to save the file locally to upload it.
I believe your goal as follows.
You want to upload a file retrieving from an URL to Google Drive.
When you download the file from the URL, you want to upload it to Google Drive without creating the file.
You want to achieve this using googleapis with Node.js.
You have already been able to upload a file using Drive API.
For this, how about this answer?
Modification points:
At download function, the retrieved buffer is converted to the stream type, and the stream data is returned.
At uploadFile function, the retrieved stream data is used for uploading.
When the file ID is retrieved from the response value of Drive API, please use file.data.id instead of file.id.
By above modification, the file downloaded from the URL can be uploaded to Google Drive without creating a file.
Modified script:
When your script is modified, please modify as follows.
download()
const download = function (url) {
return new Promise(function (resolve, reject) {
request(
{
method: "GET",
url: url,
encoding: null,
},
(err, res, body) => {
if (err && res.statusCode != 200) {
reject(err);
return;
}
const stream = require("stream");
const bs = new stream.PassThrough();
bs.end(body);
resolve(bs);
}
);
});
};
uploadFile()
function uploadFile(data) { // <--- Modified
fs.readFile("drive_credentials.json", (err, content) => {
if (err) return console.log("Error loading client secret file:", err);
authorize(JSON.parse(content), function (auth) {
const drive = google.drive({ version: "v3", auth });
const fileMetadata = {
name: "testphoto.png",
};
const media = {
mimeType: "image/png",
body: data, // <--- Modified
};
drive.files.create(
{
resource: fileMetadata,
media: media,
fields: "id",
},
(err, file) => {
if (err) {
console.error(err);
} else {
console.log("File Id: ", file.data.id); // <--- Modified
}
}
);
});
});
}
For testing
For example, when above scripts are tested, how about the following script?
async function run() {
const url = "###";
const data = await fileHelper.download(url);
googleDriveHelper.uploadFile(data);
}
References:
Class: stream.PassThrough
google-api-nodejs-client

Capturing an image with ngx-webcam and sending it to face recognition api

I am currently trying to directly send an image via ngx-webcam without saving it to my backend server and send it to a Face Detection API via my node.js. The problem is that I keep getting an error for my header in my node.js file. How can I resolve this issue?
I noticed that the image url being passed is quite long. Could that be an issue?
Image url:
"data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAMCAgICAgMCAgIDAwMDBAYEBAQEBAgGBgUGCQgKCgkICQkKDA8MCgsOCwkJDRENDg8QEBEQCgwSExIQEw8QEBD/2wBDAQMDAwQDBAgEBAgQCwkLEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBD/wAARCAHgAoADASIAAhEBAxE..."
My error is:
TypeError [ERR_HTTP_INVALID_HEADER_VALUE]: Invalid value "undefined" for header "Content-Length"
at ClientRequest.setHeader (_http_outgoing.js:473:3)
at FormData.<anonymous> (C:\Users\Roger\Documents\GitHub\angular-face-recognition-app\back-end\node_modules\form-data\lib\form_data.js:321:13)
at C:\Users\Roger\Documents\GitHub\angular-face-recognition-app\back-end\node_modules\form-data\lib\form_data.js:265:7
at C:\Users\Roger\Documents\GitHub\angular-face-recognition-app\back-end\node_modules\form-data\node_modules\async\lib\async.js:251:17
at done (C:\Users\Roger\Documents\GitHub\angular-face-recognition-app\back-end\node_modules\form-data\node_modules\async\lib\async.js:126:15)
at C:\Users\Roger\Documents\GitHub\angular-face-recognition-app\back-end\node_modules\form-data\node_modules\async\lib\async.js:32:16
at C:\Users\Roger\Documents\GitHub\angular-face-recognition-app\back-end\node_modules\form-data\node_modules\async\lib\async.js:248:21
at C:\Users\Roger\Documents\GitHub\angular-face-recognition-app\back-end\node_modules\form-data\node_modules\async\lib\async.js:572:34
at C:\Users\Roger\Documents\GitHub\angular-face-recognition-app\back-end\node_modules\form-data\lib\form_data.js:105:13
at FSReqWrap.oncomplete (fs.js:153:21)
Front end: Angular
Component file:
//captures image function
public handleImage(webcamImage: WebcamImage): void {
//stores it into webcamImageg variable
this.webcamImage = webcamImage;
//uses fda.sendImage function to send webcamImage to api via a service
this.fda.sendImage(this.webcamImage.imageAsDataUrl).subscribe(res => {});
}
Service file
sendImage(imgUrl){
console.log(imgUrl);
const obj = {
url: imgUrl
};
return this.http.post(`${this.uri}`, obj);
}
Backend: node.js
Route file
facedetAPIRoutes.route("/").post(function (req, res){
let imageUrl = req.body.url;
myFaceDetAPI.recognizeImg(imageUrl).then(function(result) {
// here is your response back
res.json(result);
});
});
Function file for api call: uses a promise
//I believe problem lies here somewhere
this.recognizeImg = (url)=>{
let requestString = "https://lambda-face-recognition.p.rapidapi.com/recognize";
let req = unirest("POST", requestString);
let imgURL = url;
let promise = new Promise(function(resolve, reject) {
unirest.post(requestString)
.header("X-RapidAPI-Key", API_KEY)
.attach("files", fs.createReadStream(imgURL))
.field("album", ALBUM_NAME)
.field("albumkey", ALBUM_KEY)
.end(result => {
console.log("successfully recognized image");
resolve(result.body) // giving response back
});
});
return promise;
}
You should try adding x-rapidapi-host and content-type headers.
.headers({
"content-type": "application/x-www-form-urlencoded",
"x-rapidapi-host": "lambda-face-recognition.p.rapidapi.com",
"x-rapidapi-key": "",
"useQueryString": true
})

How do I get a presigned url from the javascript aws-sdk

I am trying to generate a presigned url, but when I call the following, I get
s3.createPresignedPost() is not a function
I am running aws-sdk#2.3.14 and in the docs it clearly shows that createPresignedPost() is a function. Here is my code:
getPresignedURL(bucket: string, key: string) {
let s3 = new AWS.S3()
let params = {
Bucket: bucket,
Fields: {
key: key
}
}
return s3.createPresignedPost(params, (err, data) => {
if(err) {
console.log(err)
} else {
console.log(data)
}
})
}
createPresignedPost was introduced at version 2.19.0 and the current version is 2.222.1.
aws-sdk-js CHANGELOG
feature: S3: Added an instance method to S3 clients to create POST
form data with presigned upload policies

Send a file from mobile to Node js server

I'm doing an application with react-native. Now I'm trying to send an image from the mobile to the server (Node Js). For this I'm using react-native-image-picker. And the problem is that when I send the image it save a file but it's empty not contain the photo. I think that the problem probably is that the server can't access to the path of the image because is in a different device. But I don't know how I can do it.
React-Native:
openImagePicker(){
const options = {
title: 'Select Avatar',
storageOptions: {
skipBackup: true,
path: 'images'
}
}
ImagePicker.showImagePicker(options, (imagen) =>{
if (imagen.didCancel) {
console.log('User cancelled image picker');
}
else if (imagen.error) {
console.log('ImagePicker Error: ', imagen.error);
}
else if (imagen.customButton) {
console.log('User tapped custom button: ', imagen.customButton);
}
else {
let formdata = new FormData();
formdata.append("file[name]", imagen.fileName);
formdata.append("file[path]", imagen.path);
formdata.append("file[type]", imagen.type);
fetch('http://X/user/photo/58e137dd5d45090d0b000006', {
method: 'PUT',
headers: {
'Content-Type': 'multipart/form-data'
},
body: formdata
})
.then(response => {
console.log("ok");
})
.catch(function(err) {
console.log(err);
})
}})}
Node Js:
addPhotoUser = function (req, res) {
User.findById(req.params.id, function(err, user) {
fs.readFile(req.body.file.path, function (err, data) {
var pwd = 'home/ubuntu/.../';
var newPath = pwd + req.body.file.name;
fs.writeFile(newPath, data, function (err) {
imageUrl: URL + req.body.file.name;
user.save(function(err) {
if(!err) {
console.log('Updated');
} else {
console.log('ERROR: ' + err);
}
res.send(user);
});
});
});
});
};
Yes, the problem is that the filepath is on the local device and not the server. You want to send the actual data returned to you by react-native-image-picker not the uri. It looks like that library encodes the data with base64 so you're going to want send that to your server, not the uri returned from the library because it won't be accessible on a remote server.
What this means is that you won't be reading any files on your server but instead just decoding a base64 string in the response body and writing that to your filesystem.
For the client side:
let formdata = new FormData();
formdata.append("file[name]", imagen.fileName);
formdata.append("file[data]", imagen.data); // this is base64 encoded!
formdata.append("file[type]", imagen.type);
fetch('http://X/user/photo/58e137dd5d45090d0b000006', {
method: 'PUT',
headers: {
'Content-Type': 'multipart/form-data'
},
body: formdata
})
On the server side atob to decode from base64 before writing to the filesystem:
let decoded = atob(req.body.data)
// now this is binary and can written to the filesystem
From there:
fs.writeFile(newPath, decoded, function (err) {
imageUrl: newPath;
user.save(function(err) {
if(!err) {
console.log('Updated');
} else {
console.log('ERROR: ' + err);
}
res.send(user);
});
});
Note, you don't need the filesystem write that's in your code because you're decoding the image that was sent as a b64 string in your request.
There also seems to be some oddities with how you're using that user object. You seem to be only passing a function that handles errors and not any actual data. I don't know what ORM you're using so it's hard to say how it should work. Maybe something like this?
user.save({imageUrl:uriReturnedByFsWrite}, (err, data)=>{...})
Good luck :)
Make an object then send that object to the server. The object will consist of name,path and type, like this:
var imageData = {name: 'image1', path: uri, type: 'image/jpeg'}
Above is a one way to send the image data. The other way is to convert it into BLOB so that server side programmer doesn't have to do this task on their end. You can make BLOB by use of react-native-fetch-blob.
One more way is to directly upload the images to the amazon server(s3) and send the link to the backend..
Function that returns base64 string:
var RNFetchBlob = require('react-native-fetch-blob').default;
getImageAttachment: function(uri_attachment, mimetype_attachment) {
return new Promise((RESOLVE, REJECT) => {
// Fetch attachment
RNFetchBlob.fetch('GET', config.apiRoot+'/app/'+uri_attachment)
.then((response) => {
let base64Str = response.data;
var imageBase64 = 'data:'+mimetype_attachment+';base64,'+base64Str;
// Return base64 image
RESOLVE(imageBase64)
})
}).catch((error) => {
// error handling
console.log("Error: ", error)
});
},
Cheers :)

File Upload Angular 2 & Sails Js

I am trying to upload multiple files from Angular 2 and Sails Js server. I want to place file inside public folder of SailJs App.
The file is uploaded from Angular 2 App by getting file from an event fired. the code for single file upload is as follows:
Angular 2 service:
fileChange(event: any): Promise<string> {
let fileList: FileList = event.target.files;
if(fileList.length > 0) {
let file: File = fileList[0];
let formData:FormData = new FormData();
formData.append('myFile', file, file.name);
let headers = new Headers();
let cToken = this.cookie.getCookie("token");
headers.append('Authorization', 'Bearer ' + cToken);
headers.append('Content-Type', undefined);
//headers.append('Content-Type', 'multipart/form-data');
headers.append('Accept', 'application/json');
let options: RequestOptionsArgs = { headers: headers, withCredentials: true }
return new Promise((resolve, reject) => {
this.http.post( this.apiEndpoint + "project/reffile/add/all", formData, options).toPromise()
.then(response => {
// The promise is resolved once the HTTP call is successful.
let jsonData = response.json();
if (jsonData.apiStatus == 1) {
resolve(jsonData);
}
else reject(jsonData.message);
})
// The promise is rejected if there is an error with the HTTP call.
// if we don't get any answers the proxy/api will probably be down
.catch(reason => reject(reason.statusText));
});
}
}
SailsJs method:
/**
* `FileController.upload()`
*
* Upload file(s) to the server's disk.
*/
addAll: function (req, res) {
// e.g.
// 0 => infinite
// 240000 => 4 minutes (240,000 miliseconds)
// etc.
//
// Node defaults to 2 minutes.
res.setTimeout(0);
console.log("req.param('filename')");
console.log(req.param('filename'));
req.file('myFile')
.upload({
// You can apply a file upload limit (in bytes)
maxBytes: 1000000
}, function whenDone(err, uploadedFiles) {
if (err) return res.serverError(err);
else return res.json({
files: uploadedFiles,
textParams: req.allParams()
});
});
},
after posting form, I didn't get file in HTTP call response also not able to console.log(req.param('filename'));.
please help me what I am doing wrong here. I also tried changing/removing header, but still not working,
some expert says that HTTP currently cant upload files, need to use native XHR request for this. please view Thierry Templier's answer here
Try specifying a directory for file upload:
req.file('file').upload({
dirname: '../../assets/uploads'
},function (err, files) {
if (err) return res.serverError(err);
var fileNameArray = files[0].fd.split("/");
var fileName = fileNameArray[fileNameArray.length - 1];
console.log("fileName: ",fileName);
});
To access the uploaded file - you can append the fileName to the upload directory that you have specified. File will be accessible

Categories

Resources