Is there an easier way to make these axios post reqest? - javascript

I'm trying to upload multiple images to cloudinary via api in my react app using axios.
I'm new to promises, so I'm not sure if I'm using the right approach here.
I am able to upload all the images to cloudinary; however, the response I get is the confusing part, as with the response I need to pass the images' urls to an object, so I can send them to an email using mailgun.
Here is my code to upload the images:
if(payment === "success") {
axios
.all([
props.imgForProduct.map((img) => {
const fd = new FormData();
fd.append("file", img);
fd.append("upload_preset", "sublimation");
return axios
.post(
"https://api.cloudinary.com/v1_1/ahkji7ation/image/upload",
fd
)
.then(async (res) => {
return (imgUrl = res.data.secure_url);
})
.catch((err) => console.log(err));
}),
props.screenshot.map((img) => {
const fd = new FormData();
fd.append("file", img);
fd.append("upload_preset", "sublimation");
return axios
.post(
"https://api.cloudinary.com/v1_1/ahkji7ation/image/upload",
fd
)
.then(async (res) => {
return (imgScrSht = res.data.secure_url);
})
.catch((err) => console.log(err));
}),
])
.then(
axios.spread(async (...res) => {
imgUrl.push(res[0][0]);
imgScrSht.push(res[1][0]);
let dataObj = {
email: props.email,
img: imgUrl,
screenshot: imgScrSht,
};
console.log(dataObj);
new Promise((resolve, reject) => {
axios.post("/email_to_ayp_sublimation", dataObj);
resolve((res) => {
console.log(res);
});
reject((err) => {
console.log(err);
});
});
})
)
.catch((err) => console.error(err));
}
When I console log the dataObj this is what I'm sending in the last call (the new Promise):
screenshot: Array(1)
0: Promise {<resolved>: "https://res.cloudinary.com/ahkji7ation/image/u…
590871936/ahkji7ation/kqebmkjfj0prmyygls2y.jpg"}
length: 1
__proto__: Array(0)
I'm receiving [object Object] in the backend, instead of the url I need. Can anybody help me sort this out?
Thanks in advance

I think the issue is with your axios.all(...) code. You are passing in two values but the values have .map inside them which are returning urls. Because of this, axios.post() on both indexes will upload the images but axios.all() will have the return values from the .map() function which is an array of promises. You can try something like this.
async function uploadImages(imgForProduct, screenshots) {
const URL = "https://api.cloudinary.com/v1_1/ahkji7ation/image/upload";
//Collect all the form datas for img and screenshots
const imgForProductFormData = imgForProduct.map((img) => {
const fd = new FormData();
fd.append("file", img);
fd.append("upload_preset", "sublimation");
return fd;
});
const screenShotsFormData = screenshots.map((img) => {
const fd = new FormData();
fd.append("file", img);
fd.append("upload_preset", "sublimation");
return fd;
});
const imgForProductRequests = imgForProductFormData.map(
async (fd) => await axios.post(URL, fd).catch((err) => null)
);
const screenshotsRequests = screenShotsFormData.map(
async (fd) => await axios.post(URL, fd).catch((err) => null)
);
try {
const imgForProductResponses = await axios.all(imgForProductRequests);
imgForProductResponses.map((res) => (res[0] ? imgUrl.push(res.data.secure_url) : null));
const screenshotsResponses = await axios.all(screenshotsRequests);
screenshotsResponses.map((res) => (res[0] ?imgScrSht.push(res.data.secure_url) : null));
let dataObj = {
email: props.email,
img: imgUrl,
screenshot: imgScrSht,
};
console.log(dataObj);
new Promise((resolve, reject) => {
axios.post("/email_to_ayp_sublimation", dataObj);
resolve((res) => {
console.log(res);
});
reject((err) => {
console.log(err);
});
});
} catch(err) {console.log(err)}
}
Hope this works!

Related

Wait for a Javascript function to finish executing and fetch the response from it

I've a array of images and I am uploading these images to firebase storage.
data = {
...data,
downloadedUrl: [],
};
if (data?.image?.length) {
for (const image of data?.image) {
await uploadFile(image, data);
}
}
uploadFile handles the logic for uploading the image to firebase.
const uploadFile = useCallback((file, data) => {
if (!file) return;
const storageRef = ref(storage, `/images/${file.name}`);
const uploadTask = uploadBytesResumable(storageRef, file);
uploadTask.on(
"state_changed",
(snap_shot) => {},
(err) => console.log(err),
async () => {
await getDownloadURL(uploadTask.snapshot.ref).then((url) => {
data.downloadedUrl.push(url);
});
}
);
}, []);
It takes few seconds to get the downloadedUrl from uploadTask and I want to store this downloadedUrl in firebase firestore when I get all the urls.
Issue with the current approach is that before getting the urls, the other function start executing and I am not able to upload this data on firestore with the downloadedUrl
Here's the full function when someone clicks on form submit
const handleFormSubmit = useCallback(
async (data) => {
setLoading(true);
data = {
...data,
downloadedUrl: [],
};
if (data?.image?.length) {
for (const image of data?.image) {
await uploadFile(image, data);
}
}
if (data.downloadedUrl.length) {
uploadDataToFirestore(data);
}
if (!data.downloadedUrl?.length) {
dispatch(handleAlertState({ message: "Error Occured!!!" }));
router.push("/services");
return;
}
setLoading(false);
router.push("/");
},
[dispatch, router, uploadDataToFirestore, uploadFile]
);
const uploadDataToFirestore = useCallback(
async (data) => {
await setDoc(doc(db, "form-responses"), data)
.then((response) => {
console.log("response", response);
dispatch(
handleAlertState({
message: "Success. Your request has been sent. Thank You.",
})
);
})
.catch((error) => {
console.log("error", error);
});
},
[dispatch]
);
This bellow block of code executes the code while images are being uploaded to the cloud storage.
I want to wait for the downloadedUrl and then upload the urls to firebase firestore.
if (!data.downloadedUrl?.length) {
dispatch(handleAlertState({ message: "Error Occured!!!" }));
router.push("/services");
return;
}
Create array of promises
Use Promise.all to watch for every promise
const uploadFile = useCallback((file, data) => {
return new Promise((resolve, reject) => {
if (!file) reject();
const storageRef = ref(storage, `/images/${file.name}`);
const uploadTask = uploadBytesResumable(storageRef, file);
uploadTask.on(
'state_changed',
snap_shot => {},
err => reject(err),
() => resolve(getDownloadURL(uploadTask.snapshot.ref)),
);
});
}, []);
let allPromises = [];
if (data?.image?.length) {
for (const image of data?.image) {
allPromises.push(uploadFile(image, data));
}
}
let uploadedUrls = await Promise.all(allPromises);
console.log(uploadedUrls);
await Promise.all take an array of promises, we created a new array which holds the promise returned from uploadFile function. when all promises resolved then promise.all resolved as well and return array of urls. we await for Promise.all so it will not execute next line until resolved

Why doesn't promise return from Lambda work while returning stream to handler for uploading to S3?

I want to return a stream from a function for the main Lambda handler to create a pipe. This works:
const { S3Client } = require("#aws-sdk/client-s3")
const { Upload } = require('#aws-sdk/lib-storage')
const stream = require('stream')
const s3Region = 'us-east-1'
const bucketname = "my_bucket"
exports.handler = function (event, context, callback) {
let streamfrom = stream.Readable.from(["four"])
getS3Stream()
.then(streamto => {
stream.pipeline(
streamfrom,
streamto,
() => {
callback(null, { 'statusCode': 200 })
})
})
}
function getS3Stream() {
return new Promise(resolve => {
const pass = new stream.PassThrough()
const upload = new Upload({
client: new S3Client({ region: s3Region }),
params: {
Bucket: bucketname,
Key: "test/test.txt",
Body: pass
}
})
upload.done().then((res, error) => {
if (error) { reject(error) }
console.log("s3 uploaded")
})
resolve(pass)
})
}
But I want the handler function to return a promise instead of using a callback, at which point it no longer works:
exports.handler = async function (event, context) {
return new Promise(resolve => {
let streamfrom = stream.Readable.from(["five5"])
getS3Stream()
.then(streamto => {
stream.pipeline(
streamfrom,
streamto,
() => {
resolve({ 'statusCode': 200 })
})
})
})
}
It returns {"statusCode":200}, but "s3 uploaded" is not printed, and the file does not appear in S3.
Am I misunderstanding something about how to use promises here?
If you want your handler to return a Promise, you have to mark it as async.
I have not found any documentation on this but I believe the runtime caller first checks the handler definition so it know how it should call it.
Something like:
if (handler.constructor.name === 'AsyncFunction') {
result = await handler(event, context)
handleResult(null, result)
} else if (handler.constructor.name === 'Function') {
handler(event, context, handleResult)
} else {}
So, going back to your original code, it can be simply
exports.handler = async function (event, context) {
return new Promise(resolve => {
let streamfrom = stream.Readable.from(["five5"])
getS3Stream()
.then(streamto => {
stream.pipeline(
streamfrom,
streamto,
() => resolve({'statusCode': 200})
)
})
})
}
function getS3Stream() {
return new Promise(resolve => {
const pass = new stream.PassThrough()
const upload = new Upload({
client: new S3Client({region: s3Region}),
params: {
Bucket: bucketname,
Key: "test/test.txt",
Body: pass
}
})
upload.done().then((res, error) => {
if (error) {
reject(error)
}
console.log("s3 uploaded")
resolve(pass)
})
})
}
Inspired by this I came up with a solution that almost meets my qualifications. The function doesn't return just the stream, it also returns the promise. I am still hoping someone will come up with something that works without that crutch.
const { S3Client } = require("#aws-sdk/client-s3")
const { Upload } = require('#aws-sdk/lib-storage')
const stream = require('stream');
const s3Region = 'us-east-1'
const bucketname = "my_bucket"
exports.handler = async function (event, context) {
return new Promise(resolve => {
let streamfrom = stream.Readable.from(["five5"])
getS3Stream()
.then(({streamto, promise})=>{
stream.pipeline(
streamfrom,
streamto,
() => promise.then(()=>resolve({ 'statusCode': 200 }))
)
})
})
}
async function getS3Stream() {
const streamto = new stream.PassThrough()
const s3Client = new S3Client({ region: s3Region })
const upload = new Upload({
client: s3Client,
params: {
Bucket: bucketname,
Key: "test/test.txt",
Body: streamto
}
})
let promise = upload.done()
return({streamto,promise})
}

Read images with node.js from URL

Please excuse my ignorance on node.
I need to read an image through a url and resize it through sharp.
Currently I have it like this to read local.
For example.
I want to read this image
url= "https://miami.pfsrealty.com/wp-content/uploads/2020/02/Miami-y-su-bahia-con-nubes-al-atardecer-Compressed.jpg"
And my current code is this.
return new Promise(async (resolve, reject) => {
const fileSystem = require('fs');
const image = fileSystem.readFileSync(directoryPath, 'base64');
const sharp = require('sharp');
const height: number = parseInt(heightString);//parameter
const width: number = parseInt(widthString);//parameter
let img = new Buffer(image, 'base64');
await sharp(img)
.resize(height, width)
.toBuffer()
.then(resizedImageBuffer => {
const resizedImageData = resizedImageBuffer.toString('base64');
resolve(resizedImageData);
})
.catch(error => {
// error handeling
reject(error);
});
});
How should the call be?
Thanks !
try this
const sharp = require('sharp');
const fs = require('fs');
function readAndSave(url, width = 300, height = 300) {
const filename = url.replace(/^.*[\\\/]/, '');
require('axios').get(url, { responseType: 'arraybuffer' })
.then((response) => {
return Buffer.from(response.data, "utf-8")
}).then((buffer) => {
return new Promise((resolve, reject) => {
sharp(buffer)
.resize(height, width)
.toBuffer()
.then(resizedImageBuffer => {
const resizedImageData = resizedImageBuffer.toString('base64');
const buf = Buffer.from(resizedImageData, 'base64');
fs.writeFile(`./${filename}`, buf, function (err) {
if (err) throw err;
});
resolve()
})
.catch(error => {
// error handeling
reject(error);
});
})
}).catch(error => {
console.log('error', error)
});
}
readAndSave('https://miami.pfsrealty.com/wp-content/uploads/2020/02/Miami-y-su-bahia-con-nubes-al-atardecer-Compressed.jpg');

How to access data from ReadableStream response?

I made an API call to an endpoint and it returns this:
const test = () => {
fetch(endpoint)
.then((response) => {
console.log(response.body)
})
.catch((err) => {
console.log(err);
});
};
How can I obtain the ReadableStream in a Base64 format? This is returning a png file.
Using this answer, you can get the blob and convert it to base64.
const test = () => {
fetch(endpoint)
.then((response) => {
return response.blob();
})
.then((blob)=>{
var reader = new FileReader();
reader.readAsDataURL(blob);
reader.onloadend = function() {
var base64data = reader.result;
console.log(base64data);
}
})
.catch((err) => {
console.log(err);
});
};
MDN on .blob()

Send ArrayBuffer to S3 put to signedURL

I am progressively loading a file into a buffer, the buffer is valid, but the browser crashes when the ArrayBuffer is finished loading the file into it. What I need to do is to be able to send the pieces of the buffer buf = this.concatBuffers(buf, buffer); to the axios PUT request so I can progressively upload the file to s3, rather than load it into a single variable returned by the promise (as the memory gets exceeded).
How do I modify the link between readFileAsBuffer and the uploadFileToS3 method to do this?
This is my code so you can follow the process.
concatTypedArrays = (a, b) => {
const c = new a.constructor(a.length + b.length);
c.set(a, 0);
c.set(b, a.length);
return c;
};
concatBuffers = (a, b) =>
this.concatTypedArrays(
new Uint8Array(a.buffer || a),
new Uint8Array(b.buffer || b),
).buffer;
readFileAsBuffer = file =>
new Promise((resolve, reject) => {
const fileReader = new FileReader();
fileReader.file = file;
let buf = new ArrayBuffer();
const fileChunks = new FileChunker(file, 2097152);
fileReader.readAsArrayBuffer(fileChunks.blob());
fileReader.onload = e => {
this.onProgress(fileChunks);
const buffer = e.target.result;
buf = this.concatBuffers(buf, buffer);
if (fileChunks.hasNext()) {
fileChunks.next();
fileReader.readAsArrayBuffer(fileChunks.blob());
return;
}
resolve(buf);
};
fileReader.onerror = err => {
reject(err);
};
});
uploadFileToS3 = fileObject => {
new Promise((resolve, reject) => {
const decodedURL = decodeURIComponent(fileObject.signedURL);
this.readFileAsBuffer(fileObject.fileRef).then(fileBuffer => {
console.log(fileBuffer);
axios
.put(decodedURL, fileBuffer, {
headers: {
'Content-Type': fileObject.mime,
'Content-MD5': fileObject.checksum,
'Content-Encoding': 'UTF-8',
'x-amz-acl': 'private',
},
onUploadProgress: progressEvent => {
const { loaded, total } = progressEvent;
const uploadPercentage = parseInt(
Math.round((loaded * 100) / total),
10,
);
this.setState({ uploadProgress: uploadPercentage });
console.log(`${uploadPercentage}%`);
if (uploadPercentage === 100) {
console.log('complete');
}
},
})
.then(response => {
resolve(response.data);
})
.catch(error => {
reject(error);
});
});
});
};
uploadAllFilesToS3 = () => {
const { files } = this.state;
new Promise((resolve, reject) => {
Object.keys(files).map(idx => {
this.uploadFileToS3(files[idx])
.then(response => {
this.setState({ files: [] });
resolve(response.data);
})
.catch(error => {
reject(error);
});
});
});
};
calcFileMD5 = file =>
new Promise((resolve, reject) => {
const fileReader = new FileReader();
fileReader.file = file;
const spark = new SparkMD5.ArrayBuffer();
const fileChunks = new FileChunker(file, 2097152);
fileReader.readAsArrayBuffer(fileChunks.blob());
fileReader.onload = e => {
this.onProgress(fileChunks);
const buffer = e.target.result;
spark.append(buffer);
if (fileChunks.hasNext()) {
fileChunks.next();
fileReader.readAsArrayBuffer(fileChunks.blob());
return;
}
const hash = spark.end();
const checksumAWS = Buffer.from(hash, 'hex').toString('base64');
resolve(checksumAWS);
};
fileReader.onerror = err => {
reject(err);
};
});
I ended up not needing to create my own Buffer of the file, instead if I post the fileReference returned by the input directly to axios (or xhr) the request automatically chunked the upload.
Initially I could only make it work with XMLHttpRequest, but I quickly found a way to wrap this around axios which neatens the logic.
XMLHttpRequest
const xhr = createCORSRequest('PUT', url);
if (!xhr) {
console.log('CORS not supported');
} else {
xhr.onload = function(){
if(xhr.status == 200) {
console.log('completed');
} else {
console.log('Upload error: ' + xhr.status);
}
};
xhr.onerror = function(err) {
console.log(err)
};
xhr.upload.onprogress = function(progressEvent){
console.log(progressEvent);
};
xhr.setRequestHeader('Content-Type', file.type);
xhr.setRequestHeader('Content-MD5', md5_base64_binary);
xhr.setRequestHeader('Content-Encoding', 'UTF-8');
xhr.setRequestHeader('x-amz-acl', 'private');
xhr.send(file);
}
Or using axios;
uploadFileToS3 = fileObject => {
return new Promise((resolve, reject) => {
const { enqueueSnackbar } = this.props;
const decodedURL = decodeURIComponent(fileObject.signedURL);
axios
.put(decodedURL, fileObject.fileRef, {
headers: {
'Content-Type': fileObject.mime,
'Content-MD5': fileObject.checksum,
'Content-Encoding': 'UTF-8',
'x-amz-acl': 'private',
},
onUploadProgress: progressEvent => {
const { loaded, total } = progressEvent;
const uploadPercentage = parseInt(
Math.round((loaded * 100) / total),
10,
);
this.setState({ uploadProgress: uploadPercentage });
},
})
.then(response => {
resolve(response.data);
})
.catch(error => {
reject(error);
});
});
};
Have you tried uploading your file using formData? Let the browser deal with file reading.
const data = new FormData()
data.append('file', file)
axios.put(decodedURL, data, ....)
Another option is to use axios https://github.com/axios/axios#request-config transformRequest property. And call for file reading there.

Categories

Resources