I have the following aws settings for uploading files.The file loads just fine but no matter what I do I cannot view the file through the aws object URL. It gives a blank page.
I have tried changing the ACL to 'public-read', providing the file 'Content-Type' and ContentDisposition: 'inline', but none of these seem to solve my problem
//config file
const AWS = require('aws-sdk');
const env = require('./s3.env.js');
const s3Client = new AWS.S3({
accessKeyId: env.AWS_ACCESS_KEY,
secretAccessKey: env.AWS_SECRET_ACCESS_KEY,
region : env.REGION
});
const uploadParams = {
Bucket: env.Bucket,
ACL: 'public-read',
ContentDisposition: 'inline'
};
const s3 = {};
s3.s3Client = s3Client;
s3.uploadParams = uploadParams;
module.exports = s3;
//upload controller
const s3 = require('../../s3.config.js');
const s3Client = s3.s3Client;
module.exports = {
uploadStuff(req, res) {
console.log(req.file)
const params = s3.uploadParams;
params.Key = req.file.filename;
params.Body = req.file.filename;
params.ContentType = req.file.mimetype;
s3Client.upload(params, (err, data) => {});
}
}
I expect that after the file is successfully uploaded, i should be able to preview it with the Object URL provided
//config file
const AWS = require('aws-sdk');
const env = require('./s3.env.js')
const s3Client = new AWS.S3({
accessKeyId: env.AWS_ACCESS_KEY,
secretAccessKey: env.AWS_SECRET_ACCESS_KEY,
region: env.REGION
});
const uploadParams = {
Bucket: env.Bucket,
ACL: 'public-read',
ContentDisposition: 'inline'
};
const s3 = {};
s3.s3Client = s3Client;
s3.uploadParams = uploadParams;
module.exports = s3;
//upload controller
const s3 = require('../../s3.config.js');
const s3Client = s3.s3Client;
module.exports = {
uploadStuff(req, res) {
console.log(req.file)
const params = s3.uploadParams;
params.Key = req.file.filename;
params.Body = req.file.filename;
params.ContentType = req.file.mimetype;
s3Client.upload(params, (err, data) => {
});
}
Related
I have an app which calls a cloud function endpoint:
import './App.css';
import React from 'react';
import axios from 'axios';
function App() {
const [file, setFile] = React.useState(null);
function fileSelected(e)
{
setFile(()=> e.target.files[0]);
}
function uploadFile()
{
console.log(file)
const fd = new FormData();
fd.append('image', file, file.name);
console.log(file);
console.log(file.name);
axios.post('https://us-central1-athelasapp.cloudfunctions.net/uploadFile', fd)
.then(res => {
console.log(res)
});
}
return (
<div className="App">
<input type="file" onChange={fileSelected}/>
<input type="submit" onClick={uploadFile}/>
</div>
);
}
export default App;
and the endpoint tries to parse it with Busboy however, it throws a 500 error. I have Busboy imported but it throws:
xhr.js:220 POST https://us-central1-athelasapp.cloudfunctions.net/uploadFile 500`
const functions = require("firebase-functions");
const express = require("express");
const cors = require("cors");
const app = express();
const Busboy = require("busboy");
const os = require("os");
const path = require("path");
const fs = require("fs");
const gcconfig = {
projectId: "athelasapp",
keyFilename: "athelasapp-firebase-adminsdk-yojnp-1e9141a009.json",
};
const {Storage} = require("#google-cloud/storage");
const gcs = new Storage(gcconfig);
app.use(cors({origin: "http://localhost:3000"}));
// // Create and Deploy Your First Cloud Functions
// https://firebase.google.com/docs/functions/write-firebase-functions
exports.uploadFile = functions.https.onRequest(app);
app.post("/", (req, res) =>{
if (req.method !== "POST") {
return res.status(500).json({
message: "Method Does Not Work",
});
}
const busboy = new Busboy({headers: req.headers});
let uploadData = null;
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(os.tmpdir(), filename);
uploadData = {file: filepath, type: mimetype};
file.pipe(fs.createWriteStream(filepath));
res.status(200).json({
imageDetails: uploadData,
});
});
busboy.on("finish", ()=>{
const bucket = gcs.bucket("athelasapp.appspot.com");
bucket.upload(uploadData.file, {
uploadType: "media",
metadata: {
metadata: {
contentType: uploadData.type,
},
},
});
}).then(() => {
res.status(200).json({
message: "Method Works!",
});
}).catch((err) =>{
res.status(500).json({
message: "Method Failed!",
});
});
busboy.end(req.rawBody);
res.status(200).json({
message: "Method Works",
});
});
I cant find any errors in my code or how it's implemented? Could it be I'm passing in the wrong things in the request? I think it might have to do with the nomenclature of Busboy
Here's a working gist streaming directly instead of creating the temporary file: https://gist.github.com/PatrickHeneise/8f2c72c16c4e68e829e58ade64aba553#file-gcp-function-storage-file-stream-js
function asyncBusboy(req, res) {
return new Promise((resolve, reject) => {
const storage = new Storage()
const bucket = storage.bucket(process.env.BUCKET)
const fields = []
const busboy = Busboy({
headers: req.headers,
limits: {
fileSize: 10 * 1024 * 1024
}
})
busboy.on('field', (key, value) => {
fields[key] = value
})
busboy.on('file', (name, file, fileInfo) => {
const { mimeType } = fileInfo
const destFile = bucket.file(fileName)
const writeStream = destFile.createWriteStream({
metadata: {
contentType: fileInfo.mimeType,
metadata: {
originalFileName: fileInfo.filename
}
}
})
file.pipe(writeStream)
})
busboy.on('close', function () {
return resolve({ fields })
})
if (req.rawBody) {
busboy.end(req.rawBody)
} else {
req.pipe(busboy)
}
})
}
I have put together upload.js script that reads the image jpg file from a local drive and uploads it to AWS S3 bucket.
var fs = require('fs');
var AWS = require('aws-sdk');
AWS.config.update({region: 'us-east-1'});
const BUCKET_NAME = 'my-bucket-name';
let filepath = '/home/user/test-image.jpeg';
const content = fs.readFileSync(filepath, {encoding: 'base64'});
let params = {
params: {
Bucket: BUCKET_NAME,
Key: 'test.jpeg',
Body: content
}
};
var upload = new AWS.S3.ManagedUpload(params);
var promise = upload.promise();
promise.then(
function(data) {
console.log("Successfully uploaded photo.");
},
function(err) {
console.error("There was an error uploading: ", err.message);
}
);
When I run it with node upload.js the image is uploaded. But when I donwload it back the downloaded image is corrupted and cannot be opened with image viewer. What am I doing wrong?
Add the ContentType: image/jpeg to your params object and Loose the base64 encoding, this configuration will work for you as well.
var fs = require('fs');
const AWS = require('aws-sdk');
// const s3 = new AWS.S3();
AWS.config.update({ region: 'us-east-1' });
const mime = require('mime');
const BUCKET_NAME = 'my-bucket-name';
let filepath = '/home/user/test-image.jpeg';
const content = fs.readFileSync(filepath);
console.log(mime.getType(filepath));
let params = {
params: {
Bucket: BUCKET_NAME,
Key: 'cancel.jpeg',
Body: content,
ContentType: mime.getType(filepath),
},
};
var upload = new AWS.S3.ManagedUpload(params);
var promise = upload.promise();
promise.then(
function (data) {
console.log('Successfully uploaded photo.');
},
function (err) {
console.error('There was an error uploading: ', err.message);
}
);
Consider the following S3 Upload code :
const Category = require('../models/category');
const Link = require('../models/link');
const slugify = require('slugify');
const uuidv4 = require('uuid/v4');
const AWS = require('aws-sdk');
// s3
const s3 = new AWS.S3({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
region: process.env.AWS_REGION,
});
// Using BASE 64
exports.create = (req, res) => {
const {image, name, content} = req.body;
const base64Data = new Buffer.from(
image.replace(/^data:image\/\w+;base64,/, ''),
'base64'
);
const type = image.split(';')[0].split('/')[1]; // get the png from "data:image/png;base64,"
// upload image to s3 (The params will be passed to the refactored code)
const params = {
Bucket: 'categories-react',
Key: `category/${uuidv4()}.${type}`,
Body: base64Data,
ACL: 'public-read',
ContentEncoding: 'base64',
ContentType: `image/${type}`,
};
s3.upload(params, (err, data) => {
if (err) {
console.log(err);
res.status(400).json({error: 'Upload to s3 failed'});
}
// ... more logic
return res.json(success : '....');
});
};
I want to export the AWS upload code to a different file and pass it the params without repeating the same code , refactored code :
/**
* Upload image to S3
*/
const AWS = require('aws-sdk');
const uploadImageToS3 = (params) => {
const s3 = new AWS.S3({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
region: process.env.AWS_REGION,
});
// ... handle the upload
s3.upload(params, (err, data) => {
if (err) {
console.log(err);
res.status(400).json({error: 'Upload to s3 failed'});
}
// else handle the data ...
});
};
export {uploadImageToS3 as default};
But how can I use the callback function from S3 in any file the will use the refactored code of S3 ?
You must accept the callback as an argument to this function.
As a suggestion, instead of callbacks try using the promise API instead:
s3.upload(params).promise().then(data => {}).catch(err => {})
You will be able to return a promise from your function and tack on then/catch later.
I'm trying to upload my html result file to AWS S3 after my Protractor test suite execution is complete. I use JavaScript in my automation. Please help me resolve the error here:
static uploadtoS3() {
const AWS = require('aws-sdk');
var FILE_NAME_LOCAL;
var crypt = require("crypto");
fs.readdirSync("./reports/html/").forEach(file => {
if (file.startsWith("execution_report")) {
FILE_NAME_LOCAL = process.cwd() + "\\reports\\html\\" + file;
}
});
console.log("File name: " + FILE_NAME_LOCAL);
// Get file stream
const fileStream = fs.createReadStream(FILE_NAME_LOCAL);
var hash = crypt.createHash("md5")
.update(new Buffer.from(FILE_NAME_LOCAL, 'binary'))
.digest("base64");
console.log("Hash: "+hash);
// Call S3 to retrieve upload file to specified bucket
const uploadParams = {
Bucket: 'my.bucket',
Key: 'automation_report.html',
Body: fileStream,
ContentType: "text/html",
ContentMD5: hash,
// CacheControl: "max-age=0,no-cache,no-store,must-revalidate",
ACL: 'public-read',
};
const s3 = new AWS.S3({
// TODO: use this `accessKeyId: <key>` annotation to indicate the presence of a key instead of placing the actual key here.
endpoint: "https://3site-abc-wip1.nam.nsroot.net",
accessKeyId: <access_key_id>,
secretAccessKey: <secret_access_key>,
signatureVersion: 'v4',
ca: fs.readFileSync('C:\\Users\\AB11111\\InternalCAChain_PROD.pem'),
sslEnabled: true
});
// Create S3 service object and upload
s3.upload(uploadParams, function (err, data) {
console.log("Inside upload..");
if (err) {
throw err;
} if (data) {
console.log('Upload Success. File location:' + data.Location);
}
});
}
Error: unable to get local issuer certificate at
TLSSocket.onConnectSecure (_tls_wrap.js:1049:34) at TLSSocket.emit
(events.js:182:13) at TLSSocket.EventEmitter.emit (domain.js:442:20)
at TLSSocket._finishInit (_tls_wrap.js:631:8)
I made it working. I needed to add the certiicate in AWS.Config. Full working code is below. This might help someone. Note: The below credentials and urls are representation purpose only and they aren't not real:
const AWS = require('aws-sdk');
const https = require('https');
var FILE_NAME_LOCAL;
AWS.config.update({
httpOptions: {
agent: new https.Agent({
// rejectUnauthorized: false, // Don't use this - this is insecure, just like --no-verify-ssl in AWS cli
ca: fs.readFileSync('./support/InternalCAChain_PROD.pem')
})
}
});
const s3 = new AWS.S3({
s3BucketEndpoint: true,
endpoint: "https://my.bucket.3site-abc.nam.nsroot.net/",
accessKeyId: "abABcdCD",
secretAccessKey: "kjlJLlklkLlUYt",
});
// Get file stream
fs.readdirSync("./reports/html/").forEach(file => {
if (file.startsWith("execution_report")) {
FILE_NAME_LOCAL = process.cwd() + "\\reports\\html\\" + file;
}
});
const fileStream = fs.readFileSync(FILE_NAME_LOCAL);
// Call S3 to retrieve upload file to specified bucket
const uploadParams = {
Bucket: 'my.bucket',
Key: path.basename(FILE_NAME_LOCAL),
Body: fileStream,
ContentType: "text/html",
ContentEncoding: 'UTF-8',
ACL: 'public-read',
};
// Create S3 service object and upload
s3.upload(uploadParams, function (err, data) {
console.log("Inside upload..");
if (err) {
throw err;
} if (data) {
s3FileLocation = data.Location;
console.log('Upload Success. File location:' + data.Location);
}
});
I'm attempting to handle file uploads using a Google Cloud Function. This function uses Busboy to parse the multipart form data and then upload to Google Cloud Storage.
I keep receiving a ERROR: { Error: ENOENT: no such file or directory, open '/tmp/xxx.png' error when triggering the function.
The error seems to occur within the finish callback function when storage.bucket.upload(file) attempts to open the file path /tmp/xxx.png.
Example code
const path = require('path');
const os = require('os');
const fs = require('fs');
const Busboy = require('busboy');
const Storage = require('#google-cloud/storage');
const moment = require('moment');
const _ = require('lodash');
const projectId = 'xxx';
const bucketName = 'xxx';
const storage = new Storage({
projectId: projectId,
});
exports.uploadFile = (req, res) => {
if (req.method === 'POST') {
const busboy = new Busboy({
headers: req.headers
});
const uploads = []
const tmpdir = os.tmpdir();
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(tmpdir, filename)
var obj = {
path: filepath,
name: filename
}
uploads.push(obj);
var writeStream = fs.createWriteStream(obj.path);
file.pipe(writeStream);
});
busboy.on('finish', () => {
_.forEach(uploads, function (file) {
storage
.bucket(bucketName)
.upload(file.path, {
name: moment().format('/YYYY/MM/DD/x') + '-' + file.name
})
.then(() => {
console.log(`${file.name} uploaded to ${bucketName}.`);
})
.catch(err => {
console.error('ERROR:', err);
});
fs.unlinkSync(file.path);
})
res.end()
});
busboy.end(req.rawBody);
} else {
res.status(405).end();
}
}
Solved this with a stream instead of a temporary file. Only handles a single file at the moment though.
https://gist.github.com/PatrickHeneise/8f2c72c16c4e68e829e58ade64aba553#file-gcp-function-storage-file-stream-js
function asyncBusboy(req, res) {
return new Promise((resolve, reject) => {
const storage = new Storage()
const bucket = storage.bucket(process.env.BUCKET)
const fields = []
const busboy = Busboy({
headers: req.headers,
limits: {
fileSize: 10 * 1024 * 1024
}
})
busboy.on('field', (key, value) => {
fields[key] = value
})
busboy.on('file', (name, file, fileInfo) => {
const { mimeType } = fileInfo
const destFile = bucket.file(fileName)
const writeStream = destFile.createWriteStream({
metadata: {
contentType: fileInfo.mimeType,
metadata: {
originalFileName: fileInfo.filename
}
}
})
file.pipe(writeStream)
})
busboy.on('close', function () {
return resolve({ fields })
})
if (req.rawBody) {
busboy.end(req.rawBody)
} else {
req.pipe(busboy)
}
})
}