Send ArrayBuffer to S3 put to signedURL - javascript

I am progressively loading a file into a buffer, the buffer is valid, but the browser crashes when the ArrayBuffer is finished loading the file into it. What I need to do is to be able to send the pieces of the buffer buf = this.concatBuffers(buf, buffer); to the axios PUT request so I can progressively upload the file to s3, rather than load it into a single variable returned by the promise (as the memory gets exceeded).
How do I modify the link between readFileAsBuffer and the uploadFileToS3 method to do this?
This is my code so you can follow the process.
concatTypedArrays = (a, b) => {
const c = new a.constructor(a.length + b.length);
c.set(a, 0);
c.set(b, a.length);
return c;
};
concatBuffers = (a, b) =>
this.concatTypedArrays(
new Uint8Array(a.buffer || a),
new Uint8Array(b.buffer || b),
).buffer;
readFileAsBuffer = file =>
new Promise((resolve, reject) => {
const fileReader = new FileReader();
fileReader.file = file;
let buf = new ArrayBuffer();
const fileChunks = new FileChunker(file, 2097152);
fileReader.readAsArrayBuffer(fileChunks.blob());
fileReader.onload = e => {
this.onProgress(fileChunks);
const buffer = e.target.result;
buf = this.concatBuffers(buf, buffer);
if (fileChunks.hasNext()) {
fileChunks.next();
fileReader.readAsArrayBuffer(fileChunks.blob());
return;
}
resolve(buf);
};
fileReader.onerror = err => {
reject(err);
};
});
uploadFileToS3 = fileObject => {
new Promise((resolve, reject) => {
const decodedURL = decodeURIComponent(fileObject.signedURL);
this.readFileAsBuffer(fileObject.fileRef).then(fileBuffer => {
console.log(fileBuffer);
axios
.put(decodedURL, fileBuffer, {
headers: {
'Content-Type': fileObject.mime,
'Content-MD5': fileObject.checksum,
'Content-Encoding': 'UTF-8',
'x-amz-acl': 'private',
},
onUploadProgress: progressEvent => {
const { loaded, total } = progressEvent;
const uploadPercentage = parseInt(
Math.round((loaded * 100) / total),
10,
);
this.setState({ uploadProgress: uploadPercentage });
console.log(`${uploadPercentage}%`);
if (uploadPercentage === 100) {
console.log('complete');
}
},
})
.then(response => {
resolve(response.data);
})
.catch(error => {
reject(error);
});
});
});
};
uploadAllFilesToS3 = () => {
const { files } = this.state;
new Promise((resolve, reject) => {
Object.keys(files).map(idx => {
this.uploadFileToS3(files[idx])
.then(response => {
this.setState({ files: [] });
resolve(response.data);
})
.catch(error => {
reject(error);
});
});
});
};
calcFileMD5 = file =>
new Promise((resolve, reject) => {
const fileReader = new FileReader();
fileReader.file = file;
const spark = new SparkMD5.ArrayBuffer();
const fileChunks = new FileChunker(file, 2097152);
fileReader.readAsArrayBuffer(fileChunks.blob());
fileReader.onload = e => {
this.onProgress(fileChunks);
const buffer = e.target.result;
spark.append(buffer);
if (fileChunks.hasNext()) {
fileChunks.next();
fileReader.readAsArrayBuffer(fileChunks.blob());
return;
}
const hash = spark.end();
const checksumAWS = Buffer.from(hash, 'hex').toString('base64');
resolve(checksumAWS);
};
fileReader.onerror = err => {
reject(err);
};
});

I ended up not needing to create my own Buffer of the file, instead if I post the fileReference returned by the input directly to axios (or xhr) the request automatically chunked the upload.
Initially I could only make it work with XMLHttpRequest, but I quickly found a way to wrap this around axios which neatens the logic.
XMLHttpRequest
const xhr = createCORSRequest('PUT', url);
if (!xhr) {
console.log('CORS not supported');
} else {
xhr.onload = function(){
if(xhr.status == 200) {
console.log('completed');
} else {
console.log('Upload error: ' + xhr.status);
}
};
xhr.onerror = function(err) {
console.log(err)
};
xhr.upload.onprogress = function(progressEvent){
console.log(progressEvent);
};
xhr.setRequestHeader('Content-Type', file.type);
xhr.setRequestHeader('Content-MD5', md5_base64_binary);
xhr.setRequestHeader('Content-Encoding', 'UTF-8');
xhr.setRequestHeader('x-amz-acl', 'private');
xhr.send(file);
}
Or using axios;
uploadFileToS3 = fileObject => {
return new Promise((resolve, reject) => {
const { enqueueSnackbar } = this.props;
const decodedURL = decodeURIComponent(fileObject.signedURL);
axios
.put(decodedURL, fileObject.fileRef, {
headers: {
'Content-Type': fileObject.mime,
'Content-MD5': fileObject.checksum,
'Content-Encoding': 'UTF-8',
'x-amz-acl': 'private',
},
onUploadProgress: progressEvent => {
const { loaded, total } = progressEvent;
const uploadPercentage = parseInt(
Math.round((loaded * 100) / total),
10,
);
this.setState({ uploadProgress: uploadPercentage });
},
})
.then(response => {
resolve(response.data);
})
.catch(error => {
reject(error);
});
});
};

Have you tried uploading your file using formData? Let the browser deal with file reading.
const data = new FormData()
data.append('file', file)
axios.put(decodedURL, data, ....)
Another option is to use axios https://github.com/axios/axios#request-config transformRequest property. And call for file reading there.

Related

How to get image dimensions on Cloudflare Workers?

I'm trying to create a Cloudflare Worker that receives an image URL and return width and height. But I receive the message ReferenceError: Image is not defined for new Image(). Is there a workaround to make the code below work?
addEventListener('fetch', event => {
event.respondWith(handleRequest(event.request))
})
async function handleRequest(request) {
let imageUrl = new URL(request.url).searchParams.get('imageUrl')
let response = await fetch(imageUrl)
let blob = await response.blob()
let image = new Image() // <------ ReferenceError: Image is not defined
return new Promise((resolve, reject) => {
image.onload = function() {
resolve(new Response(JSON.stringify({
width: image.width,
height: image.height
}), {
headers: {
'content-type': 'application/json'
}
}))
}
image.onerror = reject
image.src = URL.createObjectURL(blob)
})
}
It seems other functions for images have a similar problem. Example: ReferenceError: createImageBitmap is not defined. So any possible solution would be great.
Image are only available in the browse.
If you want to get the width and height of an image in node.js try this
const http = require('http');
const fs = require('fs');
const gm = require('gm');
async function handleRequest() {
const filename = "image.png"
const fileURL = "https://any-page.dom/"+filename
const file = fs.createWriteStream(filename);
return new Promise((resolve, reject) => {
const request = http.get(fileURL, function(response) {
response.pipe(file);
// after download completed close filestream
file.on("finish", () => {
console.log("Download Completed");
file.close();
gm(filename)
.size(function (err, size) {
if (!err) {
console.log('width = ' + size.width);
console.log('height = ' + size.height);
resolve(new Response(JSON.stringify({
width: size.width,
height: size.height
}), {
headers: {
'content-type': 'application/json'
}
}))
} else {
reject(err)
}
});
});
});
})
}

Why doesn't promise return from Lambda work while returning stream to handler for uploading to S3?

I want to return a stream from a function for the main Lambda handler to create a pipe. This works:
const { S3Client } = require("#aws-sdk/client-s3")
const { Upload } = require('#aws-sdk/lib-storage')
const stream = require('stream')
const s3Region = 'us-east-1'
const bucketname = "my_bucket"
exports.handler = function (event, context, callback) {
let streamfrom = stream.Readable.from(["four"])
getS3Stream()
.then(streamto => {
stream.pipeline(
streamfrom,
streamto,
() => {
callback(null, { 'statusCode': 200 })
})
})
}
function getS3Stream() {
return new Promise(resolve => {
const pass = new stream.PassThrough()
const upload = new Upload({
client: new S3Client({ region: s3Region }),
params: {
Bucket: bucketname,
Key: "test/test.txt",
Body: pass
}
})
upload.done().then((res, error) => {
if (error) { reject(error) }
console.log("s3 uploaded")
})
resolve(pass)
})
}
But I want the handler function to return a promise instead of using a callback, at which point it no longer works:
exports.handler = async function (event, context) {
return new Promise(resolve => {
let streamfrom = stream.Readable.from(["five5"])
getS3Stream()
.then(streamto => {
stream.pipeline(
streamfrom,
streamto,
() => {
resolve({ 'statusCode': 200 })
})
})
})
}
It returns {"statusCode":200}, but "s3 uploaded" is not printed, and the file does not appear in S3.
Am I misunderstanding something about how to use promises here?
If you want your handler to return a Promise, you have to mark it as async.
I have not found any documentation on this but I believe the runtime caller first checks the handler definition so it know how it should call it.
Something like:
if (handler.constructor.name === 'AsyncFunction') {
result = await handler(event, context)
handleResult(null, result)
} else if (handler.constructor.name === 'Function') {
handler(event, context, handleResult)
} else {}
So, going back to your original code, it can be simply
exports.handler = async function (event, context) {
return new Promise(resolve => {
let streamfrom = stream.Readable.from(["five5"])
getS3Stream()
.then(streamto => {
stream.pipeline(
streamfrom,
streamto,
() => resolve({'statusCode': 200})
)
})
})
}
function getS3Stream() {
return new Promise(resolve => {
const pass = new stream.PassThrough()
const upload = new Upload({
client: new S3Client({region: s3Region}),
params: {
Bucket: bucketname,
Key: "test/test.txt",
Body: pass
}
})
upload.done().then((res, error) => {
if (error) {
reject(error)
}
console.log("s3 uploaded")
resolve(pass)
})
})
}
Inspired by this I came up with a solution that almost meets my qualifications. The function doesn't return just the stream, it also returns the promise. I am still hoping someone will come up with something that works without that crutch.
const { S3Client } = require("#aws-sdk/client-s3")
const { Upload } = require('#aws-sdk/lib-storage')
const stream = require('stream');
const s3Region = 'us-east-1'
const bucketname = "my_bucket"
exports.handler = async function (event, context) {
return new Promise(resolve => {
let streamfrom = stream.Readable.from(["five5"])
getS3Stream()
.then(({streamto, promise})=>{
stream.pipeline(
streamfrom,
streamto,
() => promise.then(()=>resolve({ 'statusCode': 200 }))
)
})
})
}
async function getS3Stream() {
const streamto = new stream.PassThrough()
const s3Client = new S3Client({ region: s3Region })
const upload = new Upload({
client: s3Client,
params: {
Bucket: bucketname,
Key: "test/test.txt",
Body: streamto
}
})
let promise = upload.done()
return({streamto,promise})
}

Read images with node.js from URL

Please excuse my ignorance on node.
I need to read an image through a url and resize it through sharp.
Currently I have it like this to read local.
For example.
I want to read this image
url= "https://miami.pfsrealty.com/wp-content/uploads/2020/02/Miami-y-su-bahia-con-nubes-al-atardecer-Compressed.jpg"
And my current code is this.
return new Promise(async (resolve, reject) => {
const fileSystem = require('fs');
const image = fileSystem.readFileSync(directoryPath, 'base64');
const sharp = require('sharp');
const height: number = parseInt(heightString);//parameter
const width: number = parseInt(widthString);//parameter
let img = new Buffer(image, 'base64');
await sharp(img)
.resize(height, width)
.toBuffer()
.then(resizedImageBuffer => {
const resizedImageData = resizedImageBuffer.toString('base64');
resolve(resizedImageData);
})
.catch(error => {
// error handeling
reject(error);
});
});
How should the call be?
Thanks !
try this
const sharp = require('sharp');
const fs = require('fs');
function readAndSave(url, width = 300, height = 300) {
const filename = url.replace(/^.*[\\\/]/, '');
require('axios').get(url, { responseType: 'arraybuffer' })
.then((response) => {
return Buffer.from(response.data, "utf-8")
}).then((buffer) => {
return new Promise((resolve, reject) => {
sharp(buffer)
.resize(height, width)
.toBuffer()
.then(resizedImageBuffer => {
const resizedImageData = resizedImageBuffer.toString('base64');
const buf = Buffer.from(resizedImageData, 'base64');
fs.writeFile(`./${filename}`, buf, function (err) {
if (err) throw err;
});
resolve()
})
.catch(error => {
// error handeling
reject(error);
});
})
}).catch(error => {
console.log('error', error)
});
}
readAndSave('https://miami.pfsrealty.com/wp-content/uploads/2020/02/Miami-y-su-bahia-con-nubes-al-atardecer-Compressed.jpg');

How to access data from ReadableStream response?

I made an API call to an endpoint and it returns this:
const test = () => {
fetch(endpoint)
.then((response) => {
console.log(response.body)
})
.catch((err) => {
console.log(err);
});
};
How can I obtain the ReadableStream in a Base64 format? This is returning a png file.
Using this answer, you can get the blob and convert it to base64.
const test = () => {
fetch(endpoint)
.then((response) => {
return response.blob();
})
.then((blob)=>{
var reader = new FileReader();
reader.readAsDataURL(blob);
reader.onloadend = function() {
var base64data = reader.result;
console.log(base64data);
}
})
.catch((err) => {
console.log(err);
});
};
MDN on .blob()

Is there an easier way to make these axios post reqest?

I'm trying to upload multiple images to cloudinary via api in my react app using axios.
I'm new to promises, so I'm not sure if I'm using the right approach here.
I am able to upload all the images to cloudinary; however, the response I get is the confusing part, as with the response I need to pass the images' urls to an object, so I can send them to an email using mailgun.
Here is my code to upload the images:
if(payment === "success") {
axios
.all([
props.imgForProduct.map((img) => {
const fd = new FormData();
fd.append("file", img);
fd.append("upload_preset", "sublimation");
return axios
.post(
"https://api.cloudinary.com/v1_1/ahkji7ation/image/upload",
fd
)
.then(async (res) => {
return (imgUrl = res.data.secure_url);
})
.catch((err) => console.log(err));
}),
props.screenshot.map((img) => {
const fd = new FormData();
fd.append("file", img);
fd.append("upload_preset", "sublimation");
return axios
.post(
"https://api.cloudinary.com/v1_1/ahkji7ation/image/upload",
fd
)
.then(async (res) => {
return (imgScrSht = res.data.secure_url);
})
.catch((err) => console.log(err));
}),
])
.then(
axios.spread(async (...res) => {
imgUrl.push(res[0][0]);
imgScrSht.push(res[1][0]);
let dataObj = {
email: props.email,
img: imgUrl,
screenshot: imgScrSht,
};
console.log(dataObj);
new Promise((resolve, reject) => {
axios.post("/email_to_ayp_sublimation", dataObj);
resolve((res) => {
console.log(res);
});
reject((err) => {
console.log(err);
});
});
})
)
.catch((err) => console.error(err));
}
When I console log the dataObj this is what I'm sending in the last call (the new Promise):
screenshot: Array(1)
0: Promise {<resolved>: "https://res.cloudinary.com/ahkji7ation/image/u…
590871936/ahkji7ation/kqebmkjfj0prmyygls2y.jpg"}
length: 1
__proto__: Array(0)
I'm receiving [object Object] in the backend, instead of the url I need. Can anybody help me sort this out?
Thanks in advance
I think the issue is with your axios.all(...) code. You are passing in two values but the values have .map inside them which are returning urls. Because of this, axios.post() on both indexes will upload the images but axios.all() will have the return values from the .map() function which is an array of promises. You can try something like this.
async function uploadImages(imgForProduct, screenshots) {
const URL = "https://api.cloudinary.com/v1_1/ahkji7ation/image/upload";
//Collect all the form datas for img and screenshots
const imgForProductFormData = imgForProduct.map((img) => {
const fd = new FormData();
fd.append("file", img);
fd.append("upload_preset", "sublimation");
return fd;
});
const screenShotsFormData = screenshots.map((img) => {
const fd = new FormData();
fd.append("file", img);
fd.append("upload_preset", "sublimation");
return fd;
});
const imgForProductRequests = imgForProductFormData.map(
async (fd) => await axios.post(URL, fd).catch((err) => null)
);
const screenshotsRequests = screenShotsFormData.map(
async (fd) => await axios.post(URL, fd).catch((err) => null)
);
try {
const imgForProductResponses = await axios.all(imgForProductRequests);
imgForProductResponses.map((res) => (res[0] ? imgUrl.push(res.data.secure_url) : null));
const screenshotsResponses = await axios.all(screenshotsRequests);
screenshotsResponses.map((res) => (res[0] ?imgScrSht.push(res.data.secure_url) : null));
let dataObj = {
email: props.email,
img: imgUrl,
screenshot: imgScrSht,
};
console.log(dataObj);
new Promise((resolve, reject) => {
axios.post("/email_to_ayp_sublimation", dataObj);
resolve((res) => {
console.log(res);
});
reject((err) => {
console.log(err);
});
});
} catch(err) {console.log(err)}
}
Hope this works!

Categories

Resources