I have recently completed a node js course and wanted to work on projects to improve my skills. I started a dev.io challenge, an image uploader using react and express and I am trying to implement drag and drop but I get the error POST http://localhost:3000/upload 404 (Not Found) when trying to upload the file to the public/uploads folder. Here is my code.
Dropzone.js
import image from './images/image.svg';
import React, { useState } from 'react';
import axios from 'axios';
const Dropzone = () => {
const [file, setFile] = useState('');
const [uploadedFile, setUploadedFile] = useState({});
const dropHandler = async (ev) => {
// Prevent default behavior (Prevent file from being opened)
ev.preventDefault();
if (ev.dataTransfer.items) {
// Use DataTransferItemList interface to access the file(s)
for (let i = 0; i < ev.dataTransfer.items.length; i++) {
// If dropped items aren't files, reject them
if (ev.dataTransfer.items[i].kind === 'file') {
let fileSync = ev.dataTransfer.items[i].getAsFile();
setFile(ev.dataTransfer.files[i]);
}
}
}
const formData = new FormData();
formData.append('file', file);
try {
const res = await axios.post('/upload', formData, {
headers: {
'Content-Type': 'multipart/form-data',
},
});
const { fileName, filePath } = res.data;
setUploadedFile({ fileName, filePath });
} catch (err) {
if (err.response.status === 500) {
console.log('There was a problem with the server');
} else {
console.log(err.response.data.msg);
}
}
};
const dragOverHandler = (e) => {
// Prevent default behavior (Prevent file from being opened)
e.preventDefault();
};
return (
// Drag and drop zone
<div
className="image-upload-area"
onDrop={dropHandler}
onDragOver={dragOverHandler}
>
<img className="upload-icon" src={image} />
<p className="upload-info">Drag & Drop your image here</p>
</div>
);
};
export default Dropzone;
server.js
const express = require('express');
const fileUpload = require('express-fileupload');
const app = express();
app.use(fileUpload());
// Upload Endpoint
app.post('/upload', (req, res) => {
if (req.files === null) {
return res.status(400).json({ msg: 'No file uploaded' });
}
const file = req.files.file;
file.mv(`${__dirname}/client/public/uploads/${file.name}`, (err) => {
if (err) {
console.error(err);
return res.status(500).send(err);
}
res.json({ fileName: file.name, filePath: `/uploads/${file.name}` });
});
});
app.listen(5000, () => console.log('Server Started...'));
Related
I tried to resize or compress an image before uploading to the google cloud storage.
The upload works fine but the resizing does not seem to work.
Here is my code:
const uploadImage = async (file) => new Promise((resolve, reject) => {
let { originalname, buffer } = file
sharp(buffer)
.resize(1800, 948)
.toFormat("jpeg")
.jpeg({ quality: 80 })
.toBuffer()
const blob = bucket.file(originalname.replace(/ /g, "_"))
const blobStream = blob.createWriteStream({
resumable: false
})
blobStream.on('finish', () => {
const publicUrl = format(
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
)
resolve(publicUrl)
}).on('error', () => {
reject(`Unable to upload image, something went wrong`)
})
.end(buffer)
})
I ran into the same issue with a project I was working on. After lots of trial and error I found the following solution. It might not be the most elegant, but it worked for me.
In my upload route function I created a new thumbnail image object with the original file values and passed it as the file parameter to the uploadFile function for google cloud storage.
Inside my upload image route function:
const file = req.file;
const thumbnail = {
fieldname: file.fieldname,
originalname: `thumbnail_${file.originalname}`,
encoding: file.encoding,
mimetype: file.mimetype,
buffer: await sharp(file.buffer).resize({ width: 150 }).toBuffer()
}
const uploadThumbnail = await uploadFile(thumbnail);
My google cloud storage upload file function:
const uploadFile = async (file) => new Promise((resolve, reject) => {
const gcsname = file.originalname;
const bucketFile = bucket.file(gcsname);
const stream = bucketFile.createWriteStream({
resumable: false,
metadata: {
contentType: file.mimetype
}
});
stream.on('error', (err) => {
reject(err);
});
stream.on('finish', (res) => {
resolve({
name: gcsname
});
});
stream.end(file.buffer);
});
I think the problem is with toFormat(). That function does not exist in the Docs. Can you try to remove it and check if it would work?
sharp(buffer)
.resize(1800, 948)
.jpeg({ quality: 80 })
.toBuffer()
Modify the metadata once you have finished uploading the image.
import * as admin from "firebase-admin";
import * as functions from "firebase-functions";
import { log } from "firebase-functions/logger";
import * as sharp from "sharp";
export const uploadFile = functions.https.onCall(async (data, context) => {
const bytes = data.imageData;
const bucket = admin.storage().bucket();
const buffer = Buffer.from(bytes, "base64");
const bufferSharp = await sharp(buffer)
.png()
.resize({ width: 500 })
.toBuffer();
const nombre = "IMAGE_NAME.png";
const fileName = `img/${nombre}.png`;
const fileUpload = bucket.file(fileName);
const uploadStream = fileUpload.createWriteStream();
uploadStream.on("error", async (err) => {
log("Error uploading image", err);
throw new functions.https.HttpsError("unknown", "Error uploading image");
});
uploadStream.on("finish", async () => {
await fileUpload.setMetadata({ contentType: "image/png" });
log("Upload success");
});
uploadStream.end(bufferSharp);
});
I'm using multer for uploading images for creating courses in my application and my courses have images and I have a form for getting the data from user.
I use multer in middleware and add it to my route with upload.single('images') and my field which I get the image from user is named images and that means I have got the image from the user.
When I just click on the save button, I won't get any errors and my server will work like it is stuck in a middleware and when I go to my upload folder, I don't see any images loaded!
And if I load the image I will save the course in my database which is mongo. But I don't find the course saved too.
I have checked the enctype="multipart/form-data" and it was in my form.
And this is my multer middleware code
const multer = require("multer");
const mkdirp = require("mkdirp");
const fs = require("fs");
const getDirImage = () => {
let year = new Date().getFullYear();
let month = new Date().getMonth() + 1;
let day = new Date().getDay();
return `./public/uploads/images/${year}/${month}/${day}`;
};
const ImageStorage = multer.diskStorage({
destination: (req, file, cb) => {
let dir = getDirImage();
mkdirp(dir).then(made => {
console.log(`File made on ${made}`);
});
},
filename: (req, file, cb) => {
let filePath = getDirImage() + "/" + file.originalname;
if (!fs.existsSync(filePath)) cb(null, file.originalname);
else cb(null, Date.now() + "-" + file.originalname);
},
});
const uploadImage = multer({
storage: ImageStorage,
limits: {
fileSize: 1024 * 1024 * 10,
},
});
module.exports = uploadImage;
and this is the middlewares which I referenced it to my handler of route controller
router.post("/courses/create", upload.single("images"), convertFileToField.handle, courseValidator.handle(), courseController.storeCourse);
and this is the convertFileToField Code
const middleware = require('./middleware');
class ConvertFiletoField extends middleware {
handle(req, res, next) {
if (!req.file)
req.body.images = undefined;
else
req.body.images = req.file.filename;
next();
}
}
module.exports = new ConvertFiletoField();
And this is the courseValidator Middleware Code
const validator = require('./validator');
const Course = require("app/models/Course");
const path = require("path");
const { check } = require("express-validator/check");
class courseValidator extends validator {
handle() {
return [
check("title")
.not()
.isEmpty()
.withMessage("فیلد عنوان نمیتواند خالی بماند")
.custom(async (value) => {
const course = await Course.findOne({ slug: this.slug(value)});
if (course) {
throw new Error('we have this course on our site !!!!')
}
}),
check('images')
.custom(async value => {
if (! value) {
throw new Error('You need to enter a course !');
}
let fileExt = ['.png', '.jpg', 'jpeg', '.svg'];
if (! fileExt.includes(path.extname(value)))
throw new Error('the course extention is not valid !');
}),
];
}
slug(title) {
return title.replace(/([^۰-۹آ-یa-z0-9]|-)+/g, "-");
}
}
module.exports = new courseValidator();
And Finally this is the post route handler
const controller = require("app/http/controllers/controller");
const Course = require("app/models/Course");
const fs = require('fs');
const path = require("path");
const sharp = require("sharp");
class courseController extends controller {
showCourses(req, res) {
const courses = Course.find({}).sort({ createdAt: -1 });
res.render("admin/courses/index", { courses: courses });
}
createCourse(req, res) {
res.render("admin/courses/create");
}
async storeCourse(req, res) {
let status = await this.validationData(req);
if (!status) {
// For Deleting the saved image because of having validation error
if (req.file)
fs.unlink(req.file.path, (err) => {
console.log(err);
});
return this.back(req, res);
}
// Create the Course
let images = this.imageResize(req.file);
const { title, type, body, price, tags } = req.body;
const newCourse = new Course({
user: req.user._id,
title,
type,
slug: this.slug(),
body,
images: JSON.stringify(images),
price,
tags,
});
await newCourse.save();
return res.redirect("/admin/courses");
}
imageResize(image) {
let imageInfo = path.parse(image.path);
let addressImage = {};
addressImage["original"] = `${imageInfo}/${image.filename}`;
const resize = (size) => {
let imageName = `${imageInfo.name}-${size}${imageInfo.ext}`;
addressImage[size] = this.getUrlImage(`${image.destination}/${imageName}`);
sharp(image.path)
.resize(size, null)
.toFile(`${image.destination}/${imageName}`)
};
[1080, 720, 480].map(resize);
}
getUrlImage(dir) {
return dir.substr(8);
}
slug(title) {
return title.replace(/([^۰-۹آ-یa-z0-9]|-)+/g, "-");
}
}
module.exports = new courseController();
I had done everything I could and I had tried all the solutions for loading the image but I get error at my courseValidation middleware.
Please say any solution that is related to multer. I will try it out.
in the destination section of the diskStorage you must be return name of the directory in the callback function. your middleware stoped in this section because you don't call cb function.
destination: (req, file, cb) => {
let dir = getDirImage();
mkdirp(dir).then(made => {
console.log(`File made on ${made}`);
cb(made)
});
}
here middleware function need to be defined
router.post("/courses/create", multerMiddleWare,..{...});
const multerMiddleWare = (req, res, next) => {
uploadImage(req, res,
(error) => {
if (!error) return next();
return next('error');
});
};
const uploadImage = multer({
storage: ImageStorage,
limits: {
fileSize: 1024 * 1024 * 10,
},
}).single("images");
I'm trying to create an app where a user can upload a zipped file, the app will unzip the file and save it to disk, and a path to the file will be saved to MongoDB for later retrieval.
I'm having a hard time getting the upload from form, unzipping, saving to disk, and uploading path of the unzipped file to the database all in one function. I'm really new to this and and am trying to learn about callbacks and such, I can't find any working solution for what I'm trying to do.
This is what my functions currently looks like:
// Multer is a form handling middleware
var storage = multer.diskStorage({
destination: function (req, file, cb) {
console.log(file)
cb(null, './uploads/unzip')
},
filename: function (req, file, cb) {
cb(null, file.fieldname + '-' + Date.now() + path.extname(file.originalname))
},
})
const upload = multer({ storage }).single('file'); //this is the 1st func in the route
const unzipp = async (req, res, next) => { //second func in route
try {
const dir = 'uploads/unzipped/';
var stream = fs.createReadStream(req.file.path)
stream.pipe(unzip.Extract({path: dir}))
.on('entry', function () {
var fileName = entry.path;
var type = entry.type;
var size = entry.size;
console.log(fileName, type, size)
if (type.isDirectory) {
postfile() //TRYING TO CALL POSTFILE() HERE
console.log('unzipped and path saved')
} else {
res.error('Failed unzipping')
}
fs.unlink(req.file.path, function (e) {
if (e) throw e;
console.log('successfully deleted '+req.file.path);
});
})
} catch (e) {
console.error(e)
}
next();
}
//Upload is a mongoDB cluster Schema
async function postfile () {
try{
let newUpload = new Upload(req.body); //new instance of uplaod based on the model based on req.body
newUpload.title = req.body.title;
newUpload.description = req.body.description;
newUpload.labels = req.body.labels;
newUpload.filePath = fileName; //ASSIGN FILEPATH IN DB SCHEMA TO UNZIPPED FILE PATH
console.log("filePath saved")
newUpload.save()
.then(newUpload => {
res.status(200).json({file: "File added successfully"})
})
.catch(err => {
res.status(400).send('File upload failed to save to DB :(')
})
} catch (e) {
console.error(e);
}
}
As you can see I'm trying to call the function to save the mongo schema in unzipp function. This is the post route in a separate folder:
router.post('/upload', FileCtrl.upload, FileCtrl.unzipp)
I've also tried saving the entry path of the unzipped file as a global var (fileName) and assigning the path in the Schema as fileName, but it doesn't work either:
const unzipp = async (req, res, next) => {
try {
const dir = 'uploads/unzipped/';
var stream = fs.createReadStream(req.file.path)
stream.pipe(unzip.Extract({path: dir}))
.on('entry', function () {
fileName = entry.path;
type = entry.type;
size = entry.size;
console.log(fileName, type, size)
// if (type.isDirectory) {
// console.log('unzipped and path saved')
// } else {
// res.error('Failed unzipping')
// }
result = {
file: fileName,
message:"File has been extracted"
};
//var file = req.file
fs.unlink(req.file.path, function (e) {
if (e) throw e;
console.log('successfully deleted '+req.file.path);
});
res.json(result);
})
} catch (e) {
console.error(e)
}
next();
}
const postfile = async (req, res) => {
try{
console.log("Posting to DB")
let newUpload = new Upload(req.body); //new instance of uplaod based on the model based on req.body
newUpload.title = req.body.title;
newUpload.description = req.body.description;
newUpload.labels = req.body.labels;
newUpload.filePath = fileName;
console.log("Ok so far")
newUpload.save()
.then(newUpload => {
res.status(200).json({file: "File added successfully"})
})
.catch(err => {
res.status(400).send('File upload failed to save to DB :(')
})
} catch (e) {
console.error(e);
}
}
this gives the error " ReferenceError: fileName is not defined "
the new route looks like this:
router.post('/upload', FileCtrl.upload, FileCtrl.unzipp, FileCtrl.postfile)
I've been trying to solve this for a really long time and would really appreciate some advice.
EDIT:
For testing purposes I hardcoded the filepath and it saved to the DB perfectly...
const postfile = async (req, res) => {
try{
console.log("Posting to DB")
//var stream = fs.readdirSync('./uploads/unzipped/Nancy_Collins_118226967_v2')
let newUpload = new Upload(req.body); //new instance of uplaod based on the model based on req.body
newUpload.title = req.body.title;
newUpload.description = req.body.description;
newUpload.labels = req.body.labels;
newUpload.filePath = './uploads/unzipped/Nancy_Collins_118226967_v2';
console.log("Ok so far")
newUpload.save()
.then(newUpload => {
res.status(200).json({file: "File added successfully"})
})
.catch(err => {
res.status(400).send('File upload failed to save to DB :(')
})
} catch (e) {
console.error(e);
}
}
Obviously this isn't practical or dynamic, but it's possible.
I am trying to figure this out with axios. I have made the direct api call with superagent and now want to know how to use with axios as the rest of my project is with axios. I know there is cloudinary-react, but this is the way I prefer to do it.
Here is what I have so far.
import React, { Component } from 'react';
import Dropzone from 'react-dropzone';
import sha1 from 'sha1';
import superagent from 'superagent';
import axios from 'axios';
class Images extends Component {
uploadFile(files) {
console.log('uploadFile: ');
const image = files[0];
const cloudName = 'tbaustin';
const url = `https://api.cloudinary.com/v1_1/${cloudName}/image/upload`;
const timestamp = Date.now()/1000;
const uploadPreset = 'cnh7rzwp';
const paramsStr = `timestamp=${timestamp}&upload_preset=${uploadPreset}secret`;
const signature = sha1(paramsStr);
const params = {
'api_key': 'api_key',
'timestamp': timestamp,
'upload_preset': uploadPreset,
'signature': signature
}
let uploadRequest = superagent.post(url)
uploadRequest.attach('file', image);
Object.keys(params).forEach((key) => {
uploadRequest.field(key, params[key]);
});
uploadRequest.end((err, res) => {
if(err) {
alert(err);
return
}
console.log('UPLOAD COMLETE: '+JSON.stringify(res.body));
});
//AXIOS CONTENT HERE
// let request = axios.post(url, {file: image});
// request.then((response) => {
// Object.keys(params).forEach((key) => {
// uploadRequest.field(key, params[key]);
// });
// console.log('UPLOAD COMPLETE: '+JSON.stringify(response.body));
// }).catch((err) => { alert(err); });
}
render() {
return (
<div>
<Dropzone onDrop={this.uploadFile.bind(this)}/>
</div>
)
}
}
export default Images;
This worked for me.
let formData = new FormData();
formData.append("api_key",'');
formData.append("file", image);
formData.append("public_id", "sample_image");
formData.append("timestamp", timeStamp);
formData.append("upload_preset", uploadPreset);
axios
.post(url, formData)
.then((result) => {
console.log(result);
})
.catch((err) => {
console.log(err);
})
This is a part of my project when I tried to upload an avatar. I have set my upload preset to unsigned on cloudinary.
const uploadPhotoHandler = async (e) => {
const file = e.target.files[0];
const formData = new FormData();
formData.append("file", file);
formData.append("upload_preset", "pa*****");
try {
setPictureProfile("./assets/img/giphy.gif");
const res = await axios.post("https://api.cloudinary.com/v1_1/${cloud_name}/upload", formData);
console.log(res.data.url);
setPictureProfile(res.data.url);
} catch (error) {}
};
I currently upload single objects to S3 using like so:
var options = {
Bucket: bucket,
Key: s3Path,
Body: body,
ACL: s3FilePermissions
};
S3.putObject(options,
function (err, data) {
//console.log(data);
});
But when I have a large resources folder for example, I use the AWS CLI tool.
I was wondering, is there a native way to do the same thing with the aws sdk (upload entire folders to s3)?
Old-school recursive way I whipped up in a hurry. Only uses core node modules and standard AWS sdk.
var AWS = require('aws-sdk');
var path = require("path");
var fs = require('fs');
const uploadDir = function(s3Path, bucketName) {
let s3 = new AWS.S3();
function walkSync(currentDirPath, callback) {
fs.readdirSync(currentDirPath).forEach(function (name) {
var filePath = path.join(currentDirPath, name);
var stat = fs.statSync(filePath);
if (stat.isFile()) {
callback(filePath, stat);
} else if (stat.isDirectory()) {
walkSync(filePath, callback);
}
});
}
walkSync(s3Path, function(filePath, stat) {
let bucketPath = filePath.substring(s3Path.length+1);
let params = {Bucket: bucketName, Key: bucketPath, Body: fs.readFileSync(filePath) };
s3.putObject(params, function(err, data) {
if (err) {
console.log(err)
} else {
console.log('Successfully uploaded '+ bucketPath +' to ' + bucketName);
}
});
});
};
uploadDir("path to your folder", "your bucket name");
Special thanks to Ali from this post with helping get the filenames
async/await + Typescript
If you need a solution that uses modern JavaScript syntax and is compatible with TypeScript, I came up with the following code. The recursive getFiles is borrowed from this answer (After all that years, recursion still gives me headache, lol).
import { promises as fs, createReadStream } from 'fs';
import * as path from 'path';
import { S3 } from 'aws-sdk';
async function uploadDir(s3Path: string, bucketName: string) {
const s3 = new S3();
// Recursive getFiles from
// https://stackoverflow.com/a/45130990/831465
async function getFiles(dir: string): Promise<string | string[]> {
const dirents = await fs.readdir(dir, { withFileTypes: true });
const files = await Promise.all(
dirents.map((dirent) => {
const res = path.resolve(dir, dirent.name);
return dirent.isDirectory() ? getFiles(res) : res;
})
);
return Array.prototype.concat(...files);
}
const files = (await getFiles(s3Path)) as string[];
const uploads = files.map((filePath) =>
s3
.putObject({
Key: path.relative(s3Path, filePath),
Bucket: bucketName,
Body: createReadStream(filePath),
})
.promise()
);
return Promise.all(uploads);
}
await uploadDir(path.resolve('./my-path'), 'bucketname');
here is a cleaned up/debugged/working version of #Jim's solution
function uploadArtifactsToS3() {
const artifactFolder = `logs/${config.log}/test-results`;
const testResultsPath = './test-results';
const walkSync = (currentDirPath, callback) => {
fs.readdirSync(currentDirPath).forEach((name) => {
const filePath = path.join(currentDirPath, name);
const stat = fs.statSync(filePath);
if (stat.isFile()) {
callback(filePath, stat);
} else if (stat.isDirectory()) {
walkSync(filePath, callback);
}
});
};
walkSync(testResultsPath, async (filePath) => {
let bucketPath = filePath.substring(testResultsPath.length - 1);
let params = {
Bucket: process.env.SOURCE_BUCKET,
Key: `${artifactFolder}/${bucketPath}`,
Body: fs.readFileSync(filePath)
};
try {
await s3.putObject(params).promise();
console.log(`Successfully uploaded ${bucketPath} to s3 bucket`);
} catch (error) {
console.error(`error in uploading ${bucketPath} to s3 bucket`);
throw new Error(`error in uploading ${bucketPath} to s3 bucket`);
}
});
}
I was just contemplating this problem the other day, and was thinking something like this:
...
var async = require('async'),
fs = require('fs'),
path = require("path");
var directoryName = './test',
directoryPath = path.resolve(directoryName);
var files = fs.readdirSync(directoryPath);
async.map(files, function (f, cb) {
var filePath = path.join(directoryPath, f);
var options = {
Bucket: bucket,
Key: s3Path,
Body: fs.readFileSync(filePath),
ACL: s3FilePermissions
};
S3.putObject(options, cb);
}, function (err, results) {
if (err) console.error(err);
console.log(results);
});
Here's a version that contains a Promise on the upload method. This version allows you to perform an action when all uploads are complete Promise.all().then...
const path = require('path');
const fs = require('fs');
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const directoryToUpload = 'directory-name-here';
const bucketName = 'name-of-s3-bucket-here';
// get file paths
const filePaths = [];
const getFilePaths = (dir) => {
fs.readdirSync(dir).forEach(function (name) {
const filePath = path.join(dir, name);
const stat = fs.statSync(filePath);
if (stat.isFile()) {
filePaths.push(filePath);
} else if (stat.isDirectory()) {
getFilePaths(filePath);
}
});
};
getFilePaths(directoryToUpload);
// upload to S3
const uploadToS3 = (dir, path) => {
return new Promise((resolve, reject) => {
const key = path.split(`${dir}/`)[1];
const params = {
Bucket: bucketName,
Key: key,
Body: fs.readFileSync(path),
};
s3.putObject(params, (err) => {
if (err) {
reject(err);
} else {
console.log(`uploaded ${params.Key} to ${params.Bucket}`);
resolve(path);
}
});
});
};
const uploadPromises = filePaths.map((path) =>
uploadToS3(directoryToUpload, path)
);
Promise.all(uploadPromises)
.then((result) => {
console.log('uploads complete');
console.log(result);
})
.catch((err) => console.error(err));
You might try the node-s3-client.
UPDATE: Available on npm here
From the sync a directory to s3 docs:
UPDATE: Added client inialization code.
var client = s3.createClient({
maxAsyncS3: 20, // this is the default
s3RetryCount: 3, // this is the default
s3RetryDelay: 1000, // this is the default
multipartUploadThreshold: 20971520, // this is the default (20 MB)
multipartUploadSize: 15728640, // this is the default (15 MB)
s3Options: {
accessKeyId: "YOUR ACCESS KEY",
secretAccessKey: "YOUR SECRET ACCESS KEY"
}
});
var params = {
localDir: "some/local/dir",
deleteRemoved: true, // default false, whether to remove s3 objects
// that have no corresponding local file.
s3Params: {
Bucket: "s3 bucket name",
Prefix: "some/remote/dir/",
// other options supported by putObject, except Body and ContentLength.
// See: http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putObject-property
},
};
var uploader = client.uploadDir(params);
uploader.on('error', function(err) {
console.error("unable to sync:", err.stack);
});
uploader.on('progress', function() {
console.log("progress", uploader.progressAmount, uploader.progressTotal);
});
uploader.on('end', function() {
console.log("done uploading");
});
This works for me (you'll need to add walkSync package):
async function asyncForEach(array, callback) {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array);
}
}
const syncS3Directory = async (s3Path, endpoint) => {
await asyncForEach(walkSync(s3Path, {directories: false}), async (file) => {
const filePath = Path.join(s3Path, file);
const fileContent = fs.readFileSync(filePath);
const params = {
Bucket: endpoint,
Key: file,
Body: fileContent,
ContentType: "text/html",
};
let s3Upload = await s3.upload(params).promise();
s3Upload ? undefined : Logger.error("Error synchronizing the bucket");
});
console.log("S3 bucket synchronized!");
};
const AWS = require("aws-sdk");
const fs = require("fs");
const path = require("path");
const async = require("async");
const readdir = require("recursive-readdir");
// AWS CRED
const ID = "<accessKeyId>";
const SECRET = "<secretAccessKey>";
const rootFolder = path.resolve(__dirname, "../");
const uploadFolder = "./sources";
// The name of the bucket that you have created
const BUCKET_NAME = "<Bucket_Name>";
const s3 = new AWS.S3({
accessKeyId: ID,
secretAccessKey: SECRET
});
function getFiles(dirPath) {
return fs.existsSync(dirPath) ? readdir(dirPath) : [];
}
async function uploadToS3(uploadPath) {
const filesToUpload = await getFiles(path.resolve(rootFolder, uploadPath));
console.log(filesToUpload);
return new Promise((resolve, reject) => {
async.eachOfLimit(
filesToUpload,
10,
async.asyncify(async file => {
const Key = file.replace(`${rootFolder}/`, "");
console.log(`uploading: [${Key}]`);
return new Promise((res, rej) => {
s3.upload(
{
Key,
Bucket: BUCKET_NAME,
Body: fs.readFileSync(file)
},
err => {
if (err) {
return rej(new Error(err));
}
res({ result: true });
}
);
});
}),
err => {
if (err) {
return reject(new Error(err));
}
resolve({ result: true });
}
);
});
}
uploadToS3(uploadFolder)
.then(() => {
console.log("upload complete!");
process.exit(0);
})
.catch(err => {
console.error(err.message);
process.exit(1);
});