Upload csv file to aws s3 bucket directly from a server - javascript

Happy weekend all,
I'm working on a task that fetches data from an API then store them into a csv file then from there directly upload to AWS S3 bucket. I've tried several ways but I'm currently stuck at the very last point. Any help would be much appreciate
My code below would demonstrate most of the problems and also what I've been trying so far.
First, I will fetch the data from an API
async systems() {
const endpoint = sampleEndPoints.SYSTEMS
return this.aggregateEndpoint(endpoint)
}
Second, I will get the data that fetched back and put them in a csv file as buffer. (Because I have to store them in fs.createReadStream later on)
// generate JSON to Buffer
async generateCsvToBuffer(json){
const {aws} = this.config
var ws = xlsx.utils.json_to_sheet(json)
var wb = xlsx.utils.book_new();
await xlsx.utils.book_append_sheet(wb, ws, 'Systems')
const csvParsed = xlsx.write(wb, { type: 'buffer'})
return csvParsed;
}
Third, I get the buffer data from that csvParsed in order to upload it to the amazon AWS S3. The problem is right here, that the Body: fileStream.path is supposed to show the content of the file but unfortunately, it logs out like this which coming from the fs.createReadStream
'{"type":"Buffer","data":[80,75,3,4,10,0,0,0,0,0,249,117,199,78,214,146,124
async uploadSample(file){
const {aws} = this.config
AWS.config.update({
secretAccessKey: aws.secretAccessKey,
accessKeyId: aws.accessKeyId,
region: 'us-east-2'
})
const bufferObject = new Buffer.from(JSON.stringify(file))
/*** WE NEED THE FILE SYSTEM IN ORDER TO STORE */
const fileStream = fs.createReadStream(bufferObject)
const uploadParams = {Bucket: aws.bucket, Key: aws.key, Body: fileStream.path}
const s3 = new AWS.S3()
await s3.upload(uploadParams,null,function(error, file){
if(error){
console.log(error)
} else {
console.log('Successfully uploaded')
}
})
}
All of my function will be executed in the server.js. So if you have a look at this then you can actually get the whole picture of the problem
app.get('/systems/parsed', async(req, res) => {
const Sample = await Sample()
//Fetch the data from an API
const systems = await Cache.remember('systems', async() => {
return Sample.systems()
})
const integration = await IntegrationInstance()
/** GET THE RESPONSE DATA AND PUT THEM IN A CSV FILE*/
const result = await integration.generateCsvToBuffer(systems)
const aws = await AwsInstance()
/*** GET THE SYSTEMS FILE (CSV FILE) THEN UPLOAD THEM INTO THE AWS S3 BUCKET*/
const awsUpload = await aws.uploadWorkedWithBuffer(result)
return res.send(awsUpload);
})
My only concern here is that, the file has successfully uploaded to the Amazon AWS S3, but the content of the file is still in Buffer. Any help on the existing function / any shorter way would much appreciate.
Here's my summarize again: fetch data from a server -> put on the Csv file as buffer BUT from a web browser -> and from there upload it to Amazon AWS S3 bucket -> Problem is file is uploaded but the content of the file is still in buffer.

It looks like you are making things more complicated than necessary here. According to the documentation .upload you can pass a buffer to the upload directly instead of creating a stream from the buffer. I suspect your underlying issue is passing the path from the stream instead of the stream itself though.

I actually solved it.
First, whenever you created the function generateCsvToBuffer remember to have a bookType on your wb (Workbook) in order for s3 to recognize it. The function should be something like this
async generateCsvToBuffer(json){
const {aws} = this.config
var ws = xlsx.utils.json_to_sheet(json)
var wb = xlsx.utils.book_new();
await xlsx.utils.book_append_sheet(wb, ws, 'Systems')
const csvParsed = xlsx.write(wb, { type: 'buffer', bookType: 'csv'})
return csvParsed;
}
Second, you have to import Content-Disposition: attachment into the uploadParams for the Aws Configuration
async uploadSample(file){
const {aws} = this.config
AWS.config.update({
secretAccessKey: aws.secretAccessKey,
accessKeyId: aws.accessKeyId,
region: 'us-east-2'
})
const bufferObject = new Buffer.from(JSON.stringify(file))
/*** WE NEED THE FILE SYSTEM IN ORDER TO STORE */
const fileStream = fs.createReadStream(bufferObject)
const uploadParams = {Bucket: aws.bucket, Key: aws.key, Body: fileStream.path}
const s3 = new AWS.S3()
await s3.upload(uploadParams,null,function(error, file){
if(error){
console.log(error)
} else {
console.log('Successfully uploaded')
}
})
}

Related

AWS S3 V3 Error trying to get list of objects inside a bucket. SignatureDoesNotMatch

I have Reactjs project created using create-react-app and a aws s3 bucket in witch I've saved some images that I want to display on my website.
I have created a aws.js where I configure and make the call like this
import { S3Client } from "#aws-sdk/client-s3";
import { ListObjectsV2Command } from "#aws-sdk/client-s3";
const REGION = 'eu-central-1'
const credentials = {
accessKeyId: accessKeyId,
privateKeyId: privateKeyId,
}
const config = {
region: REGION,
credentials: credentials,
}
const bucketName = {
Bucket: bucketName,
}
const s3Client = new S3Client(config);
export const run = async () => {
try{
const command = new ListObjectsV2Command(bucketName);
const data = await s3Client.send(command);
console.log("SUCCESS\n", data);
}
catch(err) {
console.log("ERROR\n", err);
}
}
I have also created a .env filder where I saved the keys with and without REACT_APP prefix but the result is the same. Invalidating the credentials.
For credentials I've checked and rechecked 10 times and I also created a new user and use those keys but nothing. I also configured CORS to allow access from my localhost.
What I'm doing wrong? And is there a complete documentation from A-Z on what to use AWS services? Including v3, api doc, credentials set up and everything.
P.S. It's my first time using AWS so some docs would be much apreciated. Thanks in advance
UPDATE---
I tried to use aws javascript sdk v2 and now it works. Here is the code that I used to list objects inside a bucket
But it works only when I used AWS.config.update if I passed the configuration to the bucket it still thrown an error
const AWS = require('aws-sdk');
AWS.config.update({
region: region,
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
let s3 = new AWS.S3()
export const testFnc = () =>{
s3.listObjects({
Bucket: 'artgalleryszili.digital'
}, (res, err) => {
if(err){
console.log(err);
}
else{
console.log(res);
}
})
}

How to Upload File to AWS S3 Bucket via pre signed URL in Node.js

I am uploading file to S3 bucket using S3 upload function in Node.js. The frontend is built on Angular. But now the client's requirement is that all uploads should direct to s3 bucket via a presigned URL. Does this because of any security concern? The Code that i am currently using to upload files to S3 Bucket is:
async function uploadFile(object){
//object param contains two properties 'image_data' and 'path'
return new Promise(async(resolve, reject) => {
var obj = object.image_data;
var imageRemoteName = object.path+'/'+Date.now()+obj.name;
AWS.config.update({
accessKeyId: ACCESS_KEY,
secretAccessKey: SECRET_KEY,
region: REGION
})
var s3 = new AWS.S3()
s3.upload({
Bucket: BUCKET,
Body: obj.data,
Key: imageRemoteName
})
.promise()
.then(response => {
console.log(`done! - `, response)
resolve(response.Location)
})
.catch(err => {
console.log('failed:', err)
})
})
}
Any Help will be appreciated, Thanks!
Security wise it doesn't make a difference whether you call upload or first create a pre-signed URL, as long as the code you showed does not run within your Angular application, meaning on the client. In that case every client of your application has access to your AWS access key, and secret key. Still, swapping upload with a pre-signed URL won't solve the problem in this case. However, if you use a server such as express and that's where this code is running, you're basically fine.
AWS provides instructions on how to upload objects using a pre-signed URL. The basic steps are:
import { getSignedUrl } from "#aws-sdk/s3-request-presigner";
import { S3Client, PutObjectCommand } from "#aws-sdk/client-s3";
const s3Client = new S3Client({
accessKeyId: ACCESS_KEY,
secretAccessKey: SECRET_KEY,
region: REGION
});
/* ... */
const command = new PutObjectCommand({
Bucket: BUCKET,
Body: obj.data,
Key: imageRemoteName
});
// upload image and return a new signed URL,
// with expiration to download image, if needed.
// Otherwise you can leave `signedUrl` unused.
const signedUrl = await getSignedUrl(s3Client, command, {
expiresIn: 3600,
});

express.js: pass an uploaded image to s3

I am trying to pass an image uploaded from a react app through express to a managed s3 bucket. The platform/host I am using creates and manages the s3 bucket and generates upload and access urls. This all works fine (I have tested a generated upload url in postman with an image in a binary body and it worked perfectly).
My problem is passing the image through express. I am using multer to get the image from the form but I am assuming multer is turning that image into some kind of file object and s3 is expecting some sort of blob or stream.
In following code, the image in req.file exists, I get a 200 response from s3 with no errors and when I visit the asset url the url works, but the image itself is missing.
const router = Router();
const upload = multer()
router.post('/', upload.single('file'), async (req, res) => {
console.log(req.file)
const asset = req.file
const assetPath = req.headers['asset-path']
let s3URLs = await getPresignedURLS(assetPath)
const sendAsset = await fetch(
s3URLs.urls[0].upload_url, // the s3 upload url
{
method: 'PUT',
headers: {
"Content-Type": asset.mimetype
},
body: asset,
redirect: 'follow'
}
)
console.log("s3 response", sendAsset)
res.status(200).json({"url": s3URLs.urls[0].access_url });
});
export default router;
I am not sure what to do to convert what multer gives me to something that aws s3 will accept. I am also open to getting rid of multer if there is an easier way to upload binary files to express.
Instead of multer, you can use multiparty to get file data from request object. And to upload file to s3 bucket you can use aws-sdk.
const AWS = require("aws-sdk");
const multiparty = require("multiparty");
/**
* Helper method which takes the request object and returns a promise with a data.
*/
const getDataFromRequest = (req) =>
new Promise(async(resolve, reject) => {
const form = new multiparty.Form();
await form.parse(req, (err, fields, files) => {
if (err) reject(err);
const bucketname = fields.bucketname[0];
const subfoldername = fields.subfoldername[0];
const file = files["file"][0]; // get the file from the returned files object
if (!file) reject("File was not found in form data.");
else resolve({
file,
bucketname,
subfoldername
});
});
});
/**
* Helper method which takes the request object and returns a promise with the AWS S3 object details.
*/
const uploadFileToS3Bucket = (
file,
bucketname,
subfoldername,
options = {}
) => {
const s3 = new AWS.S3();
// turn the file into a buffer for uploading
const buffer = readFileSync(file.path);
var originalname = file.originalFilename;
var attach_split = originalname.split(".");
var name = attach_split[0];
// generate a new random file name
const fileName = name;
// the extension of your file
const extension = extname(file.path);
console.log(`${fileName}${extension}`);
const params = {
Bucket: bucketname, //Bucketname
ACL: "private", //Permission
Key: join(`${subfoldername}/`, `${fileName}${extension}`), // File name you want to save as in S3
Body: buffer, // Content of file
};
// return a promise
return new Promise((resolve, reject) => {
return s3.upload(params, (err, result) => {
if (err) reject(err);
else resolve(result); // return the values of the successful AWS S3 request
});
});
};
router.post('/', upload.single('file'), async(req, res) => {
try {
// extract the file from the request object
const {
file,
bucketname,
subfoldername
} = await getDataFromRequest(req);
// Upload File to specified bucket
const {
Location,
ETag,
Bucket,
Key
} = await uploadFileToS3Bucket(
file,
bucketname,
subfoldername
);
let response = {};
res["Location"] = Location;
response["ETag"] = ETag;
response["Bucket"] = Bucket;
response["Key"] = Key;
res.status(200).json(response);
} catch (error) {
throw error;
}
});
Request body will be form data with following fields
bucketname:
subfoldername:
file: FileData
For anyone that ever stumbles across this question the solution was to create an custom multer storage engine. Inside the engine you get access to the file with a stream property that s3 accepted (with the correct headers).

Send binary response from UInt8Array in Express.js

I am using Express.js with Typescript and I would like to send a UInt8Array as binary data.
This is what I use so far and it works, but I would like not to save the file before, because I think it wastes performance:
const filePath = path.resolve(__dirname, 'template.docx');
const template = fs.readFileSync(filePath);
const buffer: Uint8Array = await createReport({
template,
data: {
productCode: data.productCode,
},
});
fs.writeFileSync(path.resolve(__dirname, 'output.docx'), buffer);
res.sendFile(path.resolve(__dirname, 'output.docx'));
I am using docx-templates to generate the file by the way.
You can use a PassThrough stream for this purpose, it'll keep the file in memory with no need to write to disk.
Something like this should do it:
const stream = require("stream");
const readStream = new stream.PassThrough();
// Pass your output.docx buffer to this
readStream.end(buffer);
res.set("Content-disposition", 'attachment; filename=' + "output.docx");
res.set("Content-Type", "application/vnd.openxmlformats-officedocument.wordprocessingml.document");
readStream.pipe(res);
The complete node.js code:
const fs = require("fs");
const express = require("express");
const port = 8000;
const app = express();
const stream = require("stream");
app.get('/download-file', (req, res) => {
const buffer = fs.readFileSync("./test.docx");
console.log("/download-file: Buffer length:", buffer.length);
const readStream = new stream.PassThrough();
readStream.end(buffer);
res.set("Content-disposition", 'attachment; filename=' + "test.docx");
res.set("Content-Type", "application/vnd.openxmlformats-officedocument.wordprocessingml.document");
readStream.pipe(res);
});
app.listen(port);
console.log(`Serving at http://localhost:${port}`);
To test, add a 'test.docx' file to the same directory, then point your browser to http://localhost:8000/download-file
Terry,
Thanks for the update of your answer and providing the full code. However, it still does not help much. I am trying to understand how I can handle this on the front-end side, in my case in Vue. Here is the following code:
router.post('/chart/word', async (req, res, next) => {
try {
if (!req.body.chartImage) throw new BadRequest('Missing the chart image from the request body')
const wordTemplate = await s3GetFile('folder', 'chart-templates-export/charts-template.docx')
const template = wordTemplate.Body
const buffer = await createReport({
cmdDelimiter: ["{", "}"],
template,
additionalJsContext: {
chart: () => {
const dataUrl = req.body.chartImage.src
const data = dataUrl.slice("data:image/jpeg;base64,".length);
return { width: 18 , height: 12, data, extension: '.jpeg' }
}
}
})
const stream = require('stream')
const readStream = new stream.PassThrough()
readStream.end(buffer)
res.set("Content-disposition", 'attachment; filename=' + "output.docx")
res.set("Content-Type", "application/vnd.openxmlformats-officedocument.wordprocessingml.document")
readStream.pipe(res)
} catch (err) {
console.log(err)
next(err)
}
})
And here is my Vue code, tested various stuff, but nothing...:
async exportCharts() {
console.log('this.$refs.test: ', this.$refs.test)
let img = {
src: this.$refs.test.getDataURL({
type: 'jpeg',
pixelRatio: window.devicePixelRatio || 1,
backgroundColor: '#fff'
}),
width: this.$refs.test.getWidth(),
height: this.$refs.test.getHeight()
}
const answersReq = await this.axios({
method: 'post',
url: '/pollAnswers/chart/word',
data: {
chartImage: img
}
responseType: 'arraybuffer' // 'blob' // 'document'
})
console.log('answersReq: ', answersReq)
if (answersReq.data) {
downloadURL(answersReq.data, 'report.docx')
}
}
What I am basically doing is: sending an image to the API (taken from html vue-echart element), then inserting it in a docx template, by using docx-templates library, which returns me Uint8Array that I want to export as the new Word Document with the populated charts. Then, the user (on the UI) should be able to choose the destination.
Here is the code for the download URL:
export function downloadURL(data, fileName) {
const mimeType = 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'
const blob = new Blob([data], { type: mimeType })
const url = URL.createObjectURL(blob)
const element = document.createElement('a')
element.href = url
element.download = fileName
element.style.display = 'none'
document.body.appendChild(element)
element.click()
URL.revokeObjectURL(element.href)
document.body.removeChild(element)
}
P.S. Just to mention, if I directly save the buffer (the Uint8Array returned from the createReport) in the API, it works, the file is downloaded successfully and I can read it without any problems - it populates the correct chart in the file.
UPDATE:
I figured that out, but I am not sure why this is necessary and why it works that way and not the other. So, in the /chart/word endpoint, I am converting the Uint8Array buffer into a stream, then passing it as a response (the same way you used). Afterwards, in the Vue, I fetched this as responseType: 'arraybuffer', which converted the stream response into Uint8Array buffer again, then, I used the same method for the download and it works. Initially, I tried to send directly the buffer (without converting it as stream as you mentioned), but then on the front-end, the response was received as object that contained the Uint8Array buffer values, which was not what is expected and I could not create legit docx file. So, for some reason, it is required to convert the buffer as stream in the API, before sending it as response. Afterwards, on the front-end, I have to convert it back to arraybuffer and, finally, to make the docx download.
If you can explain to me why it works like that, I will be very happy.

Upload Image from Google Cloud Function to Cloud Storage

I'm attempting to handle file uploads using a Google Cloud Function. This function uses Busboy to parse the multipart form data and then upload to Google Cloud Storage.
I keep receiving the same error: ERROR: { Error: ENOENT: no such file or directory, open '/tmp/xxx.png' error when triggering the function.
The error seems to occur within the finish callback function when storage.bucket.upload(file) attempts to open the file path /tmp/xxx.png.
Note that I can't generate a signed upload URL as suggested in this question since the application invoking this is an external, non-user application. I also can't upload directly to GCS since I'll be needing to make custom filenames based on some request metadata. Should I just be using Google App Engine instead?
Function code:
const path = require('path');
const os = require('os');
const fs = require('fs');
const Busboy = require('busboy');
const Storage = require('#google-cloud/storage');
const _ = require('lodash');
const projectId = 'xxx';
const bucketName = 'xxx';
const storage = new Storage({
projectId: projectId,
});
exports.uploadFile = (req, res) => {
if (req.method === 'POST') {
const busboy = new Busboy({ headers: req.headers });
const uploads = []
const tmpdir = os.tmpdir();
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(tmpdir, filename)
var obj = {
path: filepath,
name: filename
}
uploads.push(obj);
var writeStream = fs.createWriteStream(obj.path);
file.pipe(writeStream);
});
busboy.on('finish', () => {
_.forEach(uploads, function(file) {
storage
.bucket(bucketName)
.upload(file.path, {name: file.name})
.then(() => {
console.log(`${file.name} uploaded to ${bucketName}.`);
})
.catch(err => {
console.error('ERROR:', err);
});
fs.unlinkSync(file.path);
})
res.end()
});
busboy.end(req.rawBody);
} else {
res.status(405).end();
}
}
I eventually gave up on using Busboy. The latest versions of Google Cloud Functions support both Python and Node 8. In node 8, I just put everything into async/await functions and it works fine.

Categories

Resources