Uploading file from local file system in node giving 400 error - javascript

I am trying to upload a file to an API using multipart/form-data.
I am able to successfully do this using insomnia as follows:
Headers are set to Content-Type multipart/form-data and the basic auth is also set.
When I try to replicate this in my code I receive a 400 error.
Here is what I have at the moment:
const URL = 'https://api.app.uploadplace.com/v1/';
const token = 'randomkey';
async function readFile(path: string) {
return new Promise((resolve, reject) => {
fs.readFile(path, 'utf-8', function (err, data) {
if (err) {
reject(err);
}
resolve(data);
});
});
}
export const Upload = async (path: string) => {
const FormData = require('form-data');
const form = new FormData();
let file = await readFile(path);
form.append('file', file);
const config = {
headers: {
'content-type': 'multipart/form-data'
},
auth: {
username: token,
password: ''
}
}
await axios.post(URL, form, config).then(() => {
console.log('success')
}).catch(err => {
console.log(err);
})
};

The problem is that you are appending the file contents as a string to your form data.
Instead, append the file readable stream...
form.append("file", fs.createReadStream(path));
const config = {
headers: form.getHeaders() // this includes correct mime boundary tokens
auth: {
username: token,
password: ''
}
}
See https://github.com/axios/axios#form-data

Related

Upload file to s3 using presigned post url in the server

TDLR: Using s3 presigned post url to upload file to s3. Works fine on the browser but fails on the server.
I have a simple lambda function that generates presigned post url that can be consumed either in the browser or in the server.
During testing I noticed that the upload works fine one the browser but fails if I try to upload a file from a server even tho the code is identical.
The error i get is:
You must provide the Content-Length HTTP header
Detailed error:
<?xml version="1.0" encoding="UTF-8"?>
<Error>
<Code>MissingContentLength</Code>
<Message>You must provide the Content-Length HTTP header.</Message>
<RequestId>JP75YMFARK0G3X5Z</RequestId>
<HostId>toHsKmxmVYYAtac94cQoy8wXoregKG3PNBm97c3gQewEmKxLggcumTAP882T/pJNWx/lxRgH98A=</HostId>
</Error>
Request failed with status code 411
I checked online and found many threads about this issue but unfortunately not a single suggestion helped me.
Code I am running in the server
const axios = require('axios');
const { createReadStream, readFileSync } = require('fs');
const FormData = require('form-data');
const getPostPresignedUrl = async () => {
var config = {
method: 'post',
url: LAMBDA_GET_URL,
headers: {
'Content-Type': 'application/json',
},
data: JSON.stringify({
key: 'test-2.jpg',
fileType: 'image/jpeg',
}),
};
const {
data: { data },
} = await axios(config);
return data;
};
const uploadFileToS3 = async (fields, url) => {
const formData = new FormData();
Object.entries(fields).map(([key, value]) => {
formData.append(key, value);
});
const file = createReadStream('./test-1.jpg');
formData.append('file', file);
try {
const { data } = await axios({
url,
method: 'post',
headers: {
'Content-Type': 'multipart/form-data',
},
data: formData,
});
} catch (error) {
if (error instanceof axios.AxiosError) {
console.log(error.response.data);
}
console.log(error.message);
}
};
const init = async () => {
const { fields, url } = await getPostPresignedUrl();
await uploadFileToS3(fields, url);
};
init();
Code I am running in the browser:
const form = document.getElementById('form');
const input = document.getElementById('file');
const getPostPresignedUrl = async (name) => {
var config = {
method: 'post',
url: LAMBDA_GET_URL,
headers: {
'Content-Type': 'application/json',
},
data: JSON.stringify({
key: name,
fileType: 'image/jpeg',
}),
};
const {
data: { data },
} = await axios(config);
return data;
};
const uploadFileToS3 = async (fields, url, file) => {
const formData = new FormData();
Object.entries(fields).map(([key, value]) => {
formData.append(key, value);
});
formData.append('file', file);
try {
const { data } = await axios({
url,
method: 'post',
headers: {
'Content-Type': 'multipart/form-data',
},
data: formData,
});
} catch (error) {
if (error instanceof axios.AxiosError) {
console.log(error.response.data);
}
console.log(error.message);
}
};
const handleSubmit = async (e) => {
e.preventDefault();
const file = input.files[0];
const data = await getPostPresignedUrl(file.name);
await uploadFileToS3(data.fields, data.url, file);
};
form.onsubmit = handleSubmit;

Upload files VueJS to NodeJS backend by axios

I would like to upload a file in vuejs to my nodejs server. However, I can't receive my file in the backend. Someone could help me. I have tried several things but I still can't get it. If anyone can help me I would be very grateful :-)
VueJS Upload Page Code
<input id="input-file" name="input-file" type="file" ref="file" #change="newFile($event)"/>
const formData = new FormData();
formData.append("file", document.getElementById("input-file"))
const createUserResponse = await RequestManager.executePostRequest("/users/upload", formData, {
headers: {
'Content-Type': 'multipart/form-data'
}
});
RequestManager executePostRequest Function
static async executePostRequest(url: string, params: any, specialConfig?: AxiosRequestConfig) {
const postToken = Utils.buildHmacSha256Signature(params);
let headers: AxiosRequestHeaders = {
"x-access-token": config.server.backendAccessToken,
}
if (RequestManager.token) {
headers = {
"x-access-token": config.server.backendAccessToken,
"x-token-data": RequestManager.token,
}
}
if (specialConfig && specialConfig.headers) {
headers = Utils.mergeObjects(headers, specialConfig.headers) as AxiosRequestHeaders;
}
const instance = axios.create({
baseURL: config.server.host,
headers: headers
});
console.log(headers);
const paramsPost = {
data: params,
token: postToken
}
return new Promise<any>((resolve, reject) => {
instance
.post(url, paramsPost)
.then(response => {
resolve(response.data);
})
.catch(error => {
reject(error);
});
});
}
Backend NodeJS
UserRouter.get("/upload", (req, res) => {
console.log(req.files);
res.send("ok");
});

Copy images from one folder to another in digitalocean spaces

I am working with digital ocean spaces and I have uploaded the images on it in a temporary folder. Now I want to move that image from temporary folder to permanent folder. I have searched the things from all over but got nothing much satisfying. Is it possible to do so ?
and if yes please help me with the javascript code.
First I generated the signed url and with the help of that signed url I uploaded the image on digitalocean spaces. Following is the code for generating signed url and uploading images.
const getSignedUrl = async () => {
const body = {
fileName: 'temp/' + file.name,
fileType: file.type,
}
const response = await fetch(`${API_URL}/presigned_url`, {
method: 'POST',
body: JSON.stringify(body),
headers: { 'Content-Type': 'application/json' }
})
const { signedUrl } = await response.json()
return signedUrl
}
const uploadFile = async signedUrl => {
const res = await fetch(signedUrl, {
method: 'PUT',
body: file,
headers: {
'Content-Type': file.type,
'x-amz-acl': 'public-read',
}
})
return res
}
please help me how can I move my image from temporary folder to permanent folder.
So finally after searching I got the answer,
From the front end side I call an API to copy my image
const copyFile = async (file) => {
try {
const body = {
fileName: file.name
}
const res = await fetch(`${API_URL}/copy_file`, {
method: 'PUT',
body: JSON.stringify(body),
headers: { 'Content-Type': 'application/json' }
})
return res
} catch (error) {
console.log(error);
}
}
And in the backend side the API I made is
app.put('/copy_file', (req, res) => {
const fileName = req.body.fileName
console.log("body", fileName);
const params = {
Bucket: config.spaces.spaceName,
CopySource: `/bucketName/temp/${fileName}`,
Key: `original/${fileName}`,
}
spaces.copyObject(params, function (err, data) {
if (err) {
console.log("Error", err)
// console.log(err, err.stack); // an error occurred
} else {
res.json({ data })
}
});
});
This will copy you image in original folder

Trying to send a generated PDF to a google cloud function to send an email with nodemailer

The goal is to have users enter in some information into a form and spit that out into a PDF. I'm using JSPDF to parse and create the PDF. I've successfully gotten my code to make a printable PDF, but in an effort to not have paper floating around the office, I made a cloud function to instead email that PDF to the customer.
Here is my code on the front end. maildoc is the pdf that I've made, it hasn't been printed or anything. So it only exists in memory.
mailDoc = mailDoc.output('datauri');
mailFunction += "&?data=" + mailDoc;
//axios request to the cloud function
axios.get(mailFunction).then( function (response) {
console.log(response);
}).catch(function (error) {
console.log(error)
})
And here is my code on the cloud function
exports.sendMail = functions.https.onRequest((req, res) => {
cors(req, res, () => {
// getting dest email by query string
//?dest= DestinationEmail
const dest = req.query.dest;
const data = req.query.data;
const mailOptions = {
from: 'whatever <whatever#hoobashaka.com>',
to: dest,
subject: "You're Equipment Return to HBCI", // email subject
attachments :[
{
filename: 'return.pdf',
contentType: 'application/pdf',
path: data,
}
],
};
return transporter.sendMail(mailOptions, (erro, info) => {
if(erro){
return res.send(erro.toString());
}
return res.send('Sended');
});
});
If I try and send the data via URI, I get a 413 error, probably because that URI is enormous. But I can't think of another way of sending that generated PDF to the function.
On your client, instead of uploading the file as a datauri, I'd instead use POST and send the PDF inside the request body (just as if you had submitted a file using a form).
mailDocBlob = mailDoc.output('blob');
const data = new FormData();
data.set('dest', someEmail);
data.append('file', mailDocBlob, 'return.pdf');
axios({
method: 'post',
url: 'https://your-cloud-function.here/sendMail',
data: data,
headers: {
'Content-Type': `multipart/form-data; boundary=${data._boundary}`,
}
})
.then(function (response) {
console.log(response);
})
.catch(function (error) {
console.log(error);
});
On the server you would handle the multipart form data using the busboy package.
const BusBoy = require('busboy');
exports.sendMail = functions.https.onRequest((req, res) => {
cors(req, res, (err) => {
if (err) {
// CORS failed. Abort.
console.log("CORS failed/rejected");
res.sendStatus(403); // 403 FORBIDDEN
return;
}
if (req.method !== 'POST') {
res.set('Allow', 'POST, OPTIONS').sendStatus(405); // 405 METHOD_NOT_ALLOWED
return;
}
let busboy = new BusBoy({headers: req.headers, limits: {files: 1}}); // limited to only a single file
const mailOptions = {
from: 'whatever <whatever#hoobashaka.com>',
to: dest,
subject: "Your Equipment Return to HBCI", // email subject - fixed typo
attachments: []
};
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
// add new file attachment
mailOptions.attachments.push({
filename: 'return.pdf',
contentType: 'application/pdf',
content: file, // file is a stream
});
})
.on('finish', () => {
if (mailOptions.attachments.length == 0) {
// not enough attachments
res.status(400).send('Error: not enough attachments');
return;
}
return transporter.sendMail(mailOptions, (erro, info) => {
if (erro) {
return res.status(500).send('Error: ' + erro.toString());
}
return res.send('Sent');
})
})
.on('error', (err) => {
console.error(err);
res.status(500).send('Error: ' + err.code);
});
req.pipe(busboy);
});

Amazon S3 Remote File Upload with Axios

I am trying to write a function that would:
Take a remote URL as a parameter,
Get the file using axios
Upload the stream to amazon s3
And finally, return the uploaded url
I found help here on stackoverflow. So far, I have this:
/*
* Method to pipe the stream
*/
const uploadFromStream = (file_name, content_type) => {
const pass = new stream.PassThrough();
const obj_key = generateObjKey(file_name);
const params = { Bucket: config.bucket, ACL: config.acl, Key: obj_key, ContentType: content_type, Body: pass };
s3.upload(params, function(err, data) {
if(!err){
return data.Location;
} else {
console.log(err, data);
}
});
return pass;
}
/*
* Method to upload remote file to s3
*/
const uploadRemoteFileToS3 = async (remoteAddr) => {
axios({
method: 'get',
url: remoteAddr,
responseType: 'stream'
}).then( (response) => {
if(response.status===200){
const file_name = remoteAddr.substring(remoteAddr.lastIndexOf('/')+1);
const content_type = response.headers['content-type'];
response.data.pipe(uploadFromStream(file_name, content_type));
}
});
}
But uploadRemoteFileToS3 does not return anything (because it's a asynchronous function). How can I get the uploaded url?
UPDATE
I have further improved upon the code and wrote a class. Here is what I have now:
const config = require('../config.json');
const stream = require('stream');
const axios = require('axios');
const AWS = require('aws-sdk');
class S3RemoteUploader {
constructor(remoteAddr){
this.remoteAddr = remoteAddr;
this.stream = stream;
this.axios = axios;
this.config = config;
this.AWS = AWS;
this.AWS.config.update({
accessKeyId: this.config.api_key,
secretAccessKey: this.config.api_secret
});
this.spacesEndpoint = new this.AWS.Endpoint(this.config.endpoint);
this.s3 = new this.AWS.S3({endpoint: this.spacesEndpoint});
this.file_name = this.remoteAddr.substring(this.remoteAddr.lastIndexOf('/')+1);
this.obj_key = this.config.subfolder+'/'+this.file_name;
this.content_type = 'application/octet-stream';
this.uploadStream();
}
uploadStream(){
const pass = new this.stream.PassThrough();
this.promise = this.s3.upload({
Bucket: this.config.bucket,
Key: this.obj_key,
ACL: this.config.acl,
Body: pass,
ContentType: this.content_type
}).promise();
return pass;
}
initiateAxiosCall() {
axios({
method: 'get',
url: this.remoteAddr,
responseType: 'stream'
}).then( (response) => {
if(response.status===200){
this.content_type = response.headers['content-type'];
response.data.pipe(this.uploadStream());
}
});
}
dispatch() {
this.initiateAxiosCall();
}
async finish(){
//console.log(this.promise); /* return Promise { Pending } */
return this.promise.then( (r) => {
console.log(r.Location);
return r.Location;
}).catch( (e)=>{
console.log(e);
});
}
run() {
this.dispatch();
this.finish();
}
}
But still have no clue how to catch the result when the promise is resolved. So far, I tried these:
testUpload = new S3RemoteUploader('https://avatars2.githubusercontent.com/u/41177');
testUpload.run();
//console.log(testUpload.promise); /* Returns Promise { Pending } */
testUpload.promise.then(r => console.log); // does nothing
But none of the above works. I have a feeling I am missing something very subtle. Any clue, anyone?
After an upload you can call the getsignedurl function in s3 sdk to get the url where you can also specify the expiry of the url as well. You need to pass the key for that function. Now travelling will update with example later.
To generate a simple pre-signed URL that allows any user to view the
contents of a private object in a bucket you own, you can use the
following call to getSignedUrl():
var s3 = new AWS.S3();
var params = {Bucket: 'myBucket', Key: 'myKey'};
s3.getSignedUrl('getObject', params, function (err, url) {
console.log("The URL is", url);
});
Official documentation link
http://docs.amazonaws.cn/en_us/AWSJavaScriptSDK/guide/node-examples.html
Code must be something like this
function uploadFileToS3AndGenerateUrl(cb) {
const pass = new stream.PassThrough();//I have generated streams from file. Using this since this is what you have used. Must be a valid one.
var params = {
Bucket: "your-bucket", // required
Key: key , // required
Body: pass,
ContentType: 'your content type',
};
s3.upload(params, function(s3Err, data) {
if (s3Err) {
cb(s3Err)
}
console.log(`File uploaded successfully at ${data.Location}`)
const params = {
Bucket: 'your-bucket',
Key: data.key,
Expires: 180
};
s3.getSignedUrl('getObject', params, (urlErr, urlData) => {
if (urlErr) {
console.log('There was an error getting your files: ' + urlErr);
cb(urlErr);
} else {
console.log(`url: ${urlData}`);
cb(null, urlData);
}
})
})
}
Please check i have update your code might its help you.
/*
* Method to upload remote file to s3
*/
const uploadRemoteFileToS3 = async (remoteAddr) => {
const response = await axios({
method: 'get',
url: remoteAddr,
responseType: 'stream'
})
if(response.status===200){
const file_name = remoteAddr.substring(remoteAddr.lastIndexOf('/')+1);
const content_type = response.headers['content-type'];
response.data.pipe(uploadFromStream(file_name, content_type));
}
return new Promise((resolve, reject) => {
response.data.on('end', (response) => {
console.log(response)
resolve(response)
})
response.data.on('error', () => {
console.log(response);
reject(response)
})
})
};
*
* Method to pipe the stream
*/
const uploadFromStream = (file_name, content_type) => {
return new Promise((resolve, reject) => {
const pass = new stream.PassThrough();
const obj_key = generateObjKey(file_name);
const params = { Bucket: config.bucket, ACL: config.acl, Key: obj_key, ContentType: content_type, Body: pass };
s3.upload(params, function(err, data) {
if(!err){
console.log(data)
return resolve(data.Location);
} else {
console.log(err)
return reject(err);
}
});
});
}
//call uploadRemoteFileToS3
uploadRemoteFileToS3(remoteAddr)
.then((finalResponse) => {
console.log(finalResponse)
})
.catch((err) => {
console.log(err);
});

Categories

Resources