Is it possible to send data and files in the same request? - javascript

I have an API that receives uploads of APP files and images
To send from APP to the API I use fetch
const data = new FormData();
let i = 0;
export const dataPush = (fileUri, fileType, fileName) => {
data.append('file'+i, {
uri: fileUri,
type: fileType,
name: fileName
});
i++;
};
export const uploadFiles = () => {
console.log(data);
fetch('http://192.168.0.23/apiapp/public/api/annex', {
method: 'post',
body: data
}).then(res => {
console.log(res)
});
}
But I'd like to send in the same request data obtained from a form
But I did not find a way to do it, always or just send the data, or just send the files
Is it possible to send everything in the same request? And if possible, how?

You just append whatever data that you desire that isn't file data to the FormData object.
data.append("not_a_file", "This is a string");

I did so based on Quentin's response and it worked
const formData = new FormData();
const i = 0;
export const filePush = (fileUri, fileType, fileName) => {
formData.append('file'+i, {
uri: fileUri,
type: fileType,
name: fileName
});
i++;
};
export const dataPush = (name, content) => {
formData.append(name, content);
};
export const uploadFiles = () => {
fetch('http://192.168.0.23/apiapp/public/api/annex', {
method: 'post',
body: formData
}).then(res => {
console.log(res._bodyText)
}).catch(error => {
console.log(error.message)
});
}

Related

Upload file to s3 using presigned post url in the server

TDLR: Using s3 presigned post url to upload file to s3. Works fine on the browser but fails on the server.
I have a simple lambda function that generates presigned post url that can be consumed either in the browser or in the server.
During testing I noticed that the upload works fine one the browser but fails if I try to upload a file from a server even tho the code is identical.
The error i get is:
You must provide the Content-Length HTTP header
Detailed error:
<?xml version="1.0" encoding="UTF-8"?>
<Error>
<Code>MissingContentLength</Code>
<Message>You must provide the Content-Length HTTP header.</Message>
<RequestId>JP75YMFARK0G3X5Z</RequestId>
<HostId>toHsKmxmVYYAtac94cQoy8wXoregKG3PNBm97c3gQewEmKxLggcumTAP882T/pJNWx/lxRgH98A=</HostId>
</Error>
Request failed with status code 411
I checked online and found many threads about this issue but unfortunately not a single suggestion helped me.
Code I am running in the server
const axios = require('axios');
const { createReadStream, readFileSync } = require('fs');
const FormData = require('form-data');
const getPostPresignedUrl = async () => {
var config = {
method: 'post',
url: LAMBDA_GET_URL,
headers: {
'Content-Type': 'application/json',
},
data: JSON.stringify({
key: 'test-2.jpg',
fileType: 'image/jpeg',
}),
};
const {
data: { data },
} = await axios(config);
return data;
};
const uploadFileToS3 = async (fields, url) => {
const formData = new FormData();
Object.entries(fields).map(([key, value]) => {
formData.append(key, value);
});
const file = createReadStream('./test-1.jpg');
formData.append('file', file);
try {
const { data } = await axios({
url,
method: 'post',
headers: {
'Content-Type': 'multipart/form-data',
},
data: formData,
});
} catch (error) {
if (error instanceof axios.AxiosError) {
console.log(error.response.data);
}
console.log(error.message);
}
};
const init = async () => {
const { fields, url } = await getPostPresignedUrl();
await uploadFileToS3(fields, url);
};
init();
Code I am running in the browser:
const form = document.getElementById('form');
const input = document.getElementById('file');
const getPostPresignedUrl = async (name) => {
var config = {
method: 'post',
url: LAMBDA_GET_URL,
headers: {
'Content-Type': 'application/json',
},
data: JSON.stringify({
key: name,
fileType: 'image/jpeg',
}),
};
const {
data: { data },
} = await axios(config);
return data;
};
const uploadFileToS3 = async (fields, url, file) => {
const formData = new FormData();
Object.entries(fields).map(([key, value]) => {
formData.append(key, value);
});
formData.append('file', file);
try {
const { data } = await axios({
url,
method: 'post',
headers: {
'Content-Type': 'multipart/form-data',
},
data: formData,
});
} catch (error) {
if (error instanceof axios.AxiosError) {
console.log(error.response.data);
}
console.log(error.message);
}
};
const handleSubmit = async (e) => {
e.preventDefault();
const file = input.files[0];
const data = await getPostPresignedUrl(file.name);
await uploadFileToS3(data.fields, data.url, file);
};
form.onsubmit = handleSubmit;

including image in request body as binary data

I need to include the image as binary data in my uploading request using multipart form data, and it seems not working, any advise will be appreciated.
my code:
const [selectedFile, setSelectedFile] = useState(null);
const handleSubmit = async (event) => {
event.preventDefault();
const formData = new FormData();
formData.append('selectedFile', new Blob([selectedFile], { type: 'application/octet-stream' }));
const data = {
uploadLink,
formData,
};
const headers = {
'Content-Type': 'application/octet-stream' ,
Accept: 'application/vnd.vimeo.*+json;version=3.4',
};
try {
await axios
.post(`${backendPostPath}/thumbnail-upload`, data, {
headers,
})
.then((response) => {
applyThumbnial();
console.log(response);
});
} catch (error) {
console.log(error);
}
};
const handleFileSelect = (event) => {
setSelectedFile(event.target.files[0]);
};
include formData as axios data parameter instead of your data object, so you can also include uploadLink in the formData:
const formData = new FormData();
formData.append('selectedFile', new Blob([selectedFile], { type: 'application/octet-stream' }));
formData.append('uploadLink', uploadLink)
//...
await axios
.post(`${backendPostPath}/thumbnail-upload`, formData, {
headers,
})

Why am I getting a 500 when uploading file via the browser but not via Postman? [duplicate]

Using raw HTML when I post a file to a flask server using the following I can access files from the flask request global:
<form id="uploadForm" action='upload_file' role="form" method="post" enctype=multipart/form-data>
<input type="file" id="file" name="file">
<input type=submit value=Upload>
</form>
In flask:
def post(self):
if 'file' in request.files:
....
When I try to do the same with Axios the flask request global is empty:
<form id="uploadForm" enctype="multipart/form-data" v-on:change="uploadFile">
<input type="file" id="file" name="file">
</form>
uploadFile: function (event) {
const file = event.target.files[0]
axios.post('upload_file', file, {
headers: {
'Content-Type': 'multipart/form-data'
}
})
}
If I use the same uploadFile function above but remove the headers json from the axios.post method I get in the form key of my flask request object a csv list of string values (file is a .csv).
How can I get a file object sent via axios?
Add the file to a formData object, and set the Content-Type header to multipart/form-data.
var formData = new FormData();
var imagefile = document.querySelector('#file');
formData.append("image", imagefile.files[0]);
axios.post('upload_file', formData, {
headers: {
'Content-Type': 'multipart/form-data'
}
})
Sample application using Vue. Requires a backend server running on localhost to process the request:
var app = new Vue({
el: "#app",
data: {
file: ''
},
methods: {
submitFile() {
let formData = new FormData();
formData.append('file', this.file);
console.log('>> formData >> ', formData);
// You should have a server side REST API
axios.post('http://localhost:8080/restapi/fileupload',
formData, {
headers: {
'Content-Type': 'multipart/form-data'
}
}
).then(function () {
console.log('SUCCESS!!');
})
.catch(function () {
console.log('FAILURE!!');
});
},
handleFileUpload() {
this.file = this.$refs.file.files[0];
console.log('>>>> 1st element in files array >>>> ', this.file);
}
}
});
https://codepen.io/pmarimuthu/pen/MqqaOE
If you don't want to use a FormData object (e.g. your API takes specific content-type signatures and multipart/formdata isn't one of them) then you can do this instead:
uploadFile: function (event) {
const file = event.target.files[0]
axios.post('upload_file', file, {
headers: {
'Content-Type': file.type
}
})
}
Sharing my experience with React & HTML input
Define input field
<input type="file" onChange={onChange} accept ="image/*"/>
Define onChange listener
const onChange = (e) => {
let url = "https://<server-url>/api/upload";
let file = e.target.files[0];
uploadFile(url, file);
};
const uploadFile = (url, file) => {
let formData = new FormData();
formData.append("file", file);
axios.post(url, formData, {
headers: {
"Content-Type": "multipart/form-data",
},
}).then((response) => {
fnSuccess(response);
}).catch((error) => {
fnFail(error);
});
};
const fnSuccess = (response) => {
//Add success handling
};
const fnFail = (error) => {
//Add failed handling
};
This works for me, I hope helps to someone.
var frm = $('#frm');
let formData = new FormData(frm[0]);
axios.post('your-url', formData)
.then(res => {
console.log({res});
}).catch(err => {
console.error({err});
});
this is my way:
var formData = new FormData(formElement);
// formData.append("image", imgFile.files[0]);
const res = await axios.post(
"link-handle",
formData,
{
headers: {
"Content-Type": "multipart/form-data",
},
}
);
How to post file using an object in memory (like a JSON object):
import axios from 'axios';
import * as FormData from 'form-data'
async function sendData(jsonData){
// const payload = JSON.stringify({ hello: 'world'});
const payload = JSON.stringify(jsonData);
const bufferObject = Buffer.from(payload, 'utf-8');
const file = new FormData();
file.append('upload_file', bufferObject, "b.json");
const response = await axios.post(
lovelyURL,
file,
headers: file.getHeaders()
).toPromise();
console.log(response?.data);
}
There is an issue with Axios version 0.25.0 > to 0.27.2 where FormData object in a PUT request is not handled correctly if you have appended more than one field but is fine with one field containing a file, POST works fine.
Also Axios 0.25.0+ automatically sets the correct headers so there is no need to specify Content-Type.
For me the error was the actual parameter name in my controller... Took me a while to figure out, perhaps it will help someone. Im using Next.js / .Net 6
Client:
export const test = async (event: any) => {
const token = useAuthStore.getState().token;
console.log(event + 'the event')
if (token) {
const formData = new FormData();
formData.append("img", event);
const res = await axios.post(baseUrl + '/products/uploadproductimage', formData, {
headers: {
'Authorization': `bearer ${token}`
}
})
return res
}
return null
}
Server:
[HttpPost("uploadproductimage")]
public async Task<ActionResult> UploadProductImage([FromForm] IFormFile image)
{
return Ok();
}
Error here because server is expecting param "image" and not "img:
formData.append("img", event);
public async Task<ActionResult> UploadProductImage([FromForm] IFormFile image)

"Multipart: Boundary not found": File upload issue with Reactjs, Express, Multer and S3

I am trying to upload an image from my React frontend, but running into an Unprocessable Entity error. The server route works as I have successfully uploaded an image through Postman.
const handleFileUpload = async (file: any) => {
const imageData = new FormData();
imageData.append("image", file[0]);
const config = {
method: "POST",
headers: {
"Content-Type": "multipart/form-data",
Accept: "application/json",
},
body: imageData,
};
try {
const req = await fetch(url, config);
if (req.ok) {
const res = await req.json();
console.log(res);
}
} catch (err) {
console.log(err);
}
};
Input:
<input
type="file"
accept="image/png, image/jpeg"
onChange={(e: any) => { handleFileUpload(e.target.files ? e.target.files : url); }}
/>
Here is my working example:
// the input
<input
type="file"
accept="image/x-png,image/jpeg,image/gif"
className={classes.uploadInput}
ref={ref => upload = ref}
onChange={e => uploadAvatar(e.target.files[0])}
/>
// redux action
const uploadAvatar = avatar => ({type: ActionType.TRY_CHANGE_AVATAR, avatar})
// redux-saga
const data = new FormData()
data.append('avatar', avatar)
// axios request
const result = yield axios.post(API_LINK, data)
// in my php I have this headers
header("Access-Control-Allow-Origin: *");
header("Content-Type: application/json; charset=UTF-8");
// and
if (!move_uploaded_file($_FILES['avatar']['tmp_name'], $uploadFolder . $filename))
...
// to upload it.
This is just an example not really an answer to your question, but still i hope this helps in some way.
To solve, I removed the headers from the POST request so that fetch would automatically generate them. Server wise, I needed to set a limit on bodyParser to 50mb. Here are my edits:
const handleFileUpload = async (file: any) => {
const imageData = new FormData();
imageData.append("image", file);
const config = {
method: "POST",
body: imageData,
};
try {
const req = await fetch(url, config);
if (req.ok) {
const res = await req.json();
console.log(res);
if (res.success) {
setURL(res.user.profilePicture);
}
}
} catch (err) {
console.log(err);
}
};

Amazon S3 Remote File Upload with Axios

I am trying to write a function that would:
Take a remote URL as a parameter,
Get the file using axios
Upload the stream to amazon s3
And finally, return the uploaded url
I found help here on stackoverflow. So far, I have this:
/*
* Method to pipe the stream
*/
const uploadFromStream = (file_name, content_type) => {
const pass = new stream.PassThrough();
const obj_key = generateObjKey(file_name);
const params = { Bucket: config.bucket, ACL: config.acl, Key: obj_key, ContentType: content_type, Body: pass };
s3.upload(params, function(err, data) {
if(!err){
return data.Location;
} else {
console.log(err, data);
}
});
return pass;
}
/*
* Method to upload remote file to s3
*/
const uploadRemoteFileToS3 = async (remoteAddr) => {
axios({
method: 'get',
url: remoteAddr,
responseType: 'stream'
}).then( (response) => {
if(response.status===200){
const file_name = remoteAddr.substring(remoteAddr.lastIndexOf('/')+1);
const content_type = response.headers['content-type'];
response.data.pipe(uploadFromStream(file_name, content_type));
}
});
}
But uploadRemoteFileToS3 does not return anything (because it's a asynchronous function). How can I get the uploaded url?
UPDATE
I have further improved upon the code and wrote a class. Here is what I have now:
const config = require('../config.json');
const stream = require('stream');
const axios = require('axios');
const AWS = require('aws-sdk');
class S3RemoteUploader {
constructor(remoteAddr){
this.remoteAddr = remoteAddr;
this.stream = stream;
this.axios = axios;
this.config = config;
this.AWS = AWS;
this.AWS.config.update({
accessKeyId: this.config.api_key,
secretAccessKey: this.config.api_secret
});
this.spacesEndpoint = new this.AWS.Endpoint(this.config.endpoint);
this.s3 = new this.AWS.S3({endpoint: this.spacesEndpoint});
this.file_name = this.remoteAddr.substring(this.remoteAddr.lastIndexOf('/')+1);
this.obj_key = this.config.subfolder+'/'+this.file_name;
this.content_type = 'application/octet-stream';
this.uploadStream();
}
uploadStream(){
const pass = new this.stream.PassThrough();
this.promise = this.s3.upload({
Bucket: this.config.bucket,
Key: this.obj_key,
ACL: this.config.acl,
Body: pass,
ContentType: this.content_type
}).promise();
return pass;
}
initiateAxiosCall() {
axios({
method: 'get',
url: this.remoteAddr,
responseType: 'stream'
}).then( (response) => {
if(response.status===200){
this.content_type = response.headers['content-type'];
response.data.pipe(this.uploadStream());
}
});
}
dispatch() {
this.initiateAxiosCall();
}
async finish(){
//console.log(this.promise); /* return Promise { Pending } */
return this.promise.then( (r) => {
console.log(r.Location);
return r.Location;
}).catch( (e)=>{
console.log(e);
});
}
run() {
this.dispatch();
this.finish();
}
}
But still have no clue how to catch the result when the promise is resolved. So far, I tried these:
testUpload = new S3RemoteUploader('https://avatars2.githubusercontent.com/u/41177');
testUpload.run();
//console.log(testUpload.promise); /* Returns Promise { Pending } */
testUpload.promise.then(r => console.log); // does nothing
But none of the above works. I have a feeling I am missing something very subtle. Any clue, anyone?
After an upload you can call the getsignedurl function in s3 sdk to get the url where you can also specify the expiry of the url as well. You need to pass the key for that function. Now travelling will update with example later.
To generate a simple pre-signed URL that allows any user to view the
contents of a private object in a bucket you own, you can use the
following call to getSignedUrl():
var s3 = new AWS.S3();
var params = {Bucket: 'myBucket', Key: 'myKey'};
s3.getSignedUrl('getObject', params, function (err, url) {
console.log("The URL is", url);
});
Official documentation link
http://docs.amazonaws.cn/en_us/AWSJavaScriptSDK/guide/node-examples.html
Code must be something like this
function uploadFileToS3AndGenerateUrl(cb) {
const pass = new stream.PassThrough();//I have generated streams from file. Using this since this is what you have used. Must be a valid one.
var params = {
Bucket: "your-bucket", // required
Key: key , // required
Body: pass,
ContentType: 'your content type',
};
s3.upload(params, function(s3Err, data) {
if (s3Err) {
cb(s3Err)
}
console.log(`File uploaded successfully at ${data.Location}`)
const params = {
Bucket: 'your-bucket',
Key: data.key,
Expires: 180
};
s3.getSignedUrl('getObject', params, (urlErr, urlData) => {
if (urlErr) {
console.log('There was an error getting your files: ' + urlErr);
cb(urlErr);
} else {
console.log(`url: ${urlData}`);
cb(null, urlData);
}
})
})
}
Please check i have update your code might its help you.
/*
* Method to upload remote file to s3
*/
const uploadRemoteFileToS3 = async (remoteAddr) => {
const response = await axios({
method: 'get',
url: remoteAddr,
responseType: 'stream'
})
if(response.status===200){
const file_name = remoteAddr.substring(remoteAddr.lastIndexOf('/')+1);
const content_type = response.headers['content-type'];
response.data.pipe(uploadFromStream(file_name, content_type));
}
return new Promise((resolve, reject) => {
response.data.on('end', (response) => {
console.log(response)
resolve(response)
})
response.data.on('error', () => {
console.log(response);
reject(response)
})
})
};
*
* Method to pipe the stream
*/
const uploadFromStream = (file_name, content_type) => {
return new Promise((resolve, reject) => {
const pass = new stream.PassThrough();
const obj_key = generateObjKey(file_name);
const params = { Bucket: config.bucket, ACL: config.acl, Key: obj_key, ContentType: content_type, Body: pass };
s3.upload(params, function(err, data) {
if(!err){
console.log(data)
return resolve(data.Location);
} else {
console.log(err)
return reject(err);
}
});
});
}
//call uploadRemoteFileToS3
uploadRemoteFileToS3(remoteAddr)
.then((finalResponse) => {
console.log(finalResponse)
})
.catch((err) => {
console.log(err);
});

Categories

Resources