I'm trying to write a program that can unzip a zip file, read the images in the file and apply grayscale to them.
right now i have these two functions :
var fs = require('fs'),
PNG = require('pngjs').PNG
const unzipper = require('unzipper')
PNG = require('pngjs').PNG
const dir = __dirname + "/";
const myFile = (fileName) => {
let createdFile = dir + fileName
fs.createReadStream(createdFile)
.pipe(unzipper.Extract({ path: 'myfile' }));
console.log('file unzipped')
}
myFile("myfile.zip")
function applyFilter(Name) {
fs.readdir(Name, 'utf-8', (err, data) => {
if (err) {
console.log(err)
} else {
data.forEach(function (file) {
if (file.includes('png')) {
let greyPNG = (__dirname + '/' + 'myfile' + '/' + file)
console.log (greyPNG)
fs.createReadStream(greyPNG)
.pipe(new PNG({
colorType: 0,
}))
.on('parsed', function () {
this.pack().pipe(fs.createWriteStream(__dirname + "/" + "myfile" + "/" + file));
});
}
})
}
})
}
applyFilter ('myfile')
these two function works fine individually, however, it will not run together, if I comment out "applyFilter". A zip file will be unzipped. if there is a file in the directory, "applyFilter" will apply grayscale on those pictures. I know that this is because both functions runs at the same time which causes the problem. So how do I implement promises to solve this issue. I know that I can use "Sync" version of the functions. I just want to know how to do it in promises.
There are Example's in official documentation about ‘promisify’:
https://nodejs.org/dist/latest-v12.x/docs/api/util.html#util_util_promisify_original
which will gives you a "promised" version of the same function (as long as the original function has a standard signature / a custom promisified definition).
const util = require('util');
const fs = require('fs');
const stat = util.promisify(fs.stat);
async function callStat() {
const stats = await stat('.');
console.log(`This directory is owned by ${stats.uid}`);
}
You can also implement your own, just return a promise (this example is reading a http request):
function read_request(request) {
request.setEncoding("utf-8");
return new Promise((resolve, reject) => {
var cache = "";
request.on("data", (chunk) => {
cache += cache;
}).on("end", () => {
resolve(cache);
}).on("error", reject);
});
}
You can use the stream finish event to determine when the file unzip is complete. We can then use promises and async / await to ensure we don't try to apply the filter before the files are ready.
const fs = require('fs');
const PNG = require('pngjs').PNG;
const unzipper = require('unzipper');
const dir = __dirname + "/";
function unzipFile(fileName, outputPath) {
return new Promise((resolve, reject) => {
let createdFile = dir + fileName
let stream = fs.createReadStream(createdFile)
.pipe(unzipper.Extract({ path: outputPath }));
stream.on('finish', () => {
console.log('file unzipped');
resolve(outputPath);
});
});
}
function applyFilter(Name) {
fs.readdir(dir, 'utf-8', (err, data) => {
if (err) {
console.log(err)
} else {
data.filter(file => file.includes("png")).forEach(file => {
let greyPNG = (__dirname + '/' + Name + '/' + file)
console.log (greyPNG)
fs.createReadStream(greyPNG)
.pipe(new PNG({
colorType: 0,
}))
.on('parsed', function () {
this.pack().pipe(fs.createWriteStream(greyPNG));
});
})
}
})
}
async function unzipAndApplyFilter(zipFile, outputPath) {
await unzipFile(zipFile, outputPath); // Wait until unzip is complete.
applyFilter(outputPath);
}
unzipAndApplyFilter('myfile.zip', 'myfile');
Related
As seen in the title, I am currently using sailjs + skipper-better-s3 for s3 upload. Started with uploading one file which works great, then because change request the need of multi-file upload at once so I added a for loop but by doing this, all keys will be the same and ended up the only one file is uploaded which is the last uploaded file but with the first upload filename.
I did read some articles and people are saying something like The problem is because for loop is synchronous and file upload is asynchronous and people saying the result of this is using a recursion which I tried too but no luck though, the same thing happens.
My recursive code is below...
s3_upload_multi: async (req, res) => {
const generatePath = (rootPath, fieldName) => {
let path;
// this is just a switch statement here to check which fieldName is provided then value of path will depend on it
// as for the other two variable is just checking if upload content type is correct
return { path };
};
const processUpload = async ({
fieldName,
awsOp,
fileExtension,
rootPath,
fileName,
}) => {
return new Promise(function (resolve, reject) {
req.file(fieldName).upload(awsOp, async (err, filesUploaded) => {
if (err) reject(err);
const filesUploadedF = filesUploaded[0]; // F = first file
const response = {
status: true,
errCode: 200,
msg: 'OK',
response: {
url: filesUploadedF.extra.Location,
size: filesUploadedF.size,
type: fileExtension,
filename: filesUploadedF.filename,
key: filesUploadedF.extra.Key,
field: fieldName,
}
};
resolve(response);
});
});
}
const process_recur = async (files, fieldName) => {
if (files.length <= 0) return;
const fileUpload = files[0].stream;
const rootPath = `${sails.config.aws.upload.path.root}`;
const fileCType = fileUpload.headers['content-type'];
// console.log(fileCType, 'fileCType');
const { path } = generatePath(rootPath, fieldName);
const fileName = fileUpload.filename;
const fileExtension = fileUpload.filename.split('.').pop();
const genRan = await UtilsService.genRan(8);
const fullPath = `${path}${genRan}-${fileName}`;
const awsOp = {
adapter: require('skipper-better-s3'),
key: sails.config.aws.access_key,
secret: sails.config.aws.secret_key,
saveAs: fullPath,
bucket: sails.config.aws.bucket,
s3params: {
ACL: 'public-read'
},
};
const config = {
fieldName,
awsOp,
fileExtension,
rootPath,
fileName,
}
const procceed = await processUpload(config);
files.shift();
await process_recur(files, fieldName);
};
try {
const fieldName = req._fileparser.upstreams[0].fieldName;
const files = req.file(fieldName)._files;
await process_recur(files, fieldName);
} catch (e) {
console.log(e, 'inside UploadService');
return false;
}
}
below is the code for me using for loop which is quite similiar from above though
s3_upload_multi: async (req, res) => {
const generatePath = (rootPath, fieldName) => {
let path;
// this is just a switch statement here to check which fieldName is provided then value of path will depend on it
// as for the other two variable is just checking if upload content type is correct
return { path };
};
const processUpload = async ({
fieldName,
awsOp,
fileExtension,
rootPath,
fileName,
}) => {
return new Promise(function (resolve, reject) {
req.file(fieldName).upload(awsOp, async (err, filesUploaded) => {
if (err) reject(err);
const filesUploadedF = filesUploaded[0]; // F = first file
const response = {
status: true,
errCode: 200,
msg: 'OK',
response: {
url: filesUploadedF.extra.Location,
size: filesUploadedF.size,
type: fileExtension,
filename: filesUploadedF.filename,
key: filesUploadedF.extra.Key,
field: fieldName,
}
};
resolve(response);
});
});
}
try {
const fieldName = req._fileparser.upstreams[0].fieldName;
const files = req.file(fieldName)._files;
for (const file of files) {
const fileUpload = file.stream;
const rootPath = `${sails.config.aws.upload.path.root}`;
const fileCType = fileUpload.headers['content-type'];
// console.log(fileCType, 'fileCType');
const fileName = fileUpload.filename;
const { path } = generatePath(rootPath, fieldName);
const fileExtension = fileUpload.filename.split('.').pop();
// using a variable here because if this is an image, a thumbnail will be created with the same name as the original one
const genRan = await UtilsService.genRan(8);
const fullPath = await `${path}${genRan}-${fileName}`;
const awsOp = {
adapter: require('skipper-better-s3'),
key: sails.config.aws.access_key,
secret: sails.config.aws.secret_key,
saveAs: fullPath,
bucket: sails.config.aws.bucket,
s3params: {
ACL: 'public-read'
},
};
const config = {
fieldName,
awsOp,
fileExtension,
rootPath,
fileName,
}
const procceed = await processUpload(config);
console.log(procceed, 'procceed');
}
} catch (e) {
console.log(e, 'inside UploadService');
return false;
}
}
Which part am I making mistake that's causing such behavior? I checked my path it's totally correct with correct filename too when I console.log
Thanks in advance for any suggestions and help.
Took me quite a lot of time to figure this out ages ago.
Especially you are using skipper-better-s3 which did not conclude as much detailed documentation as skipper, going back to look into skipper documentation actually the saveAs field doesn't only take string but also a function which you can then use that to get each file's filename and return it as needed so actually you do not even need to use neither resursive or for loop at all.
for example with some of your codes
const awsOp = {
adapter: require('skipper-better-s3'),
key: sails.config.aws.access_key,
secret: sails.config.aws.secret_key,
saveAs: (__newFileStream, next) => {
// generatePath is what you wrote
// __newFileStream.filename would the filename of each each before uploading
// the path is pretty much the s3 key which includes your filename too
const { path } = generatePath(rootPath, __newFileStream.filename, fieldName);
return next(undefined, path);
},
bucket: sails.config.aws.bucket,
s3params: {
ACL: 'public-read'
},
};
skipper documentation https://www.npmjs.com/package/skipper#customizing-at-rest-filenames-for-uploads
I'm trying to download multiple file using request library, I need to download them one by one and also show a progress bar, the files links are stored in an array that passes them to a function to start the download
const request = require('request')
const fs = require('fs')
const ProgressBar = require('progress')
async function downloadFiles(links) {
for (let link of links) {
let file = request(link)
file.on('response', (res) => {
var len = parseInt(res.headers['content-length'], 10);
console.log();
bar = new ProgressBar(' Downloading [:bar] :rate/bps :percent :etas', {
complete: '=',
incomplete: ' ',
width: 20,
total: len
});
file.on('data', (chunk) => {
bar.tick(chunk.length);
})
file.on('end', () => {
console.log('\n');
})
})
file.pipe(fs.createWriteStream('./downloads/' + Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15)))
}
}
let links = ['https://speed.hetzner.de/100MB.bin', 'https://speed.hetzner.de/100MB.bin', 'https://speed.hetzner.de/100MB.bin', 'https://speed.hetzner.de/100MB.bin']
downloadFiles(links)
This is what I've got so far, the problem is that the request is asynchronous, I tried to use async/await but that way I couldn't get the progress bar to work.
How can make it so that the files are downloaded one at the time and also have a progress bar?
Based on my comment about async.queue, this is how I would write that up.
You can call dl.downloadFiles([]) as often as you want and it will just fetch everything that you have added to the queue one after another.
const request = require('request')
const async = require('async')
const fs = require('fs')
const ProgressBar = require('progress')
class Downloader {
constructor() {
this.q = async.queue(this.singleFile, 1);
// assign a callback
this.q.drain(function() {
console.log('all items have been processed');
});
// assign an error callback
this.q.error(function(err, task) {
console.error('task experienced an error', task);
});
}
downloadFiles(links) {
for (let link of links) {
this.q.push(link);
}
}
singleFile(link, cb) {
let file = request(link);
let bar;
file.on('response', (res) => {
const len = parseInt(res.headers['content-length'], 10);
console.log();
bar = new ProgressBar(' Downloading [:bar] :rate/bps :percent :etas', {
complete: '=',
incomplete: ' ',
width: 20,
total: len
});
file.on('data', (chunk) => {
bar.tick(chunk.length);
})
file.on('end', () => {
console.log('\n');
cb();
})
})
file.pipe(fs.createWriteStream('./downloads/' + Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15)))
}
}
const dl = new Downloader();
dl.downloadFiles([
'https://speed.hetzner.de/100MB.bin',
'https://speed.hetzner.de/100MB.bin'
]);
I am using fs.copyFile to copy files from one location to another. I am doing this twice in order to copy two files. It's redundant and I would like to make my code better by maybe copying both files to the destination with a single call? How can I achieve this?
fs.copyFile('src/blah.txt', 'build/blah.txt', (err) => {
if (err) throw err;
});
fs.copyFile('src/unk.txt', 'build/unk.txt', (err) => {
if (err) throw err;
});
You can simply create a function of your own that takes the src and dest path and an array of filenames as arguments:
const util = require('util');
const fs = require('fs');
const path = require('path');
const copyFilePromise = util.promisify(fs.copyFile);
function copyFiles(srcDir, destDir, files) {
return Promise.all(files.map(f => {
return copyFilePromise(path.join(srcDir, f), path.join(destDir, f));
}));
}
// usage
copyFiles('src', 'build', ['unk.txt', 'blah.txt']).then(() => {
console.log("done");
}).catch(err => {
console.log(err);
});
Probably the best option is to use fs-extra:
const fse = require('fs-extra');
const srcDir = `path/to/file`;
const destDir = `path/to/destination/directory`;
// To copy a folder
fse.copySync(srcDir, destDir, function (err) {
if (err) {
console.error(err);
} else {
console.log("success!");
}
});
const fs = require('fs');
const path = require('path');
const files = ['/files/a.js', '/files/b.js', '/files/c.txt'];
files.forEach(file => {
fs.copyFile(path.join(__dirname + file), path.join(__dirname + '/files/backup/' + path.basename(file)), err => {
if(!err){
console.log(file + " has been copied!");
}
})
});
Use the following code if forced replace is unwanted.
const fs = require('fs');
const path = require('path');
const files = ['/files/a.js','/files/b.js','/files/c.txt'];
files.forEach(file => {
let basename = path.basename(file);
let oldFile = path.join(__dirname+file);
let newFile = path.join(__dirname+'/files/backup/'+basename);
if (!fs.existsSync(newFile)) {
fs.copyFile(oldFile, newFile, err=>{
if(!err){
console.log(basename+" has been copied!");
}
});
}else{
console.log(basename+" already existed!");
}
});
This is how I implemented the function above with the walk npm package to get all the files.
https://www.npmjs.com/package/walk
This gets all files within sub folders as well.
Worked copying 16,000 images from my Go Pro into one single folder on my desktop.
const util = require('util');
const fs = require('fs');
const path = require('path');
const copyFilePromise = util.promisify(fs.copyFile);
const walk = require('walk');
let files = [];
let source_folder = '/Volumes/Untitled/DCIM';
let destination_folder = '/Users/dave/Desktop/pics';
let walker = walk.walk(source_folder, {
followLinks: false
});
walker.on('file', function(root, stat, next) {
let file_path = root + '/' + stat.name;
files.push({
src: file_path,
des: destination_folder + '/' + stat.name
});
next();
});
walker.on('end', function() {
copyFiles(files).then(() => {
console.log("done");
}).catch(err => {
console.log(err);
});
});
function copyFiles(files) {
return Promise.all(files.map(f => {
return copyFilePromise(f.src, f.des);
}));
}
I am trying to upload some images to my node backend with formidable middleware.
I want to upload multiple files from the same form. Each file will be stored in media/temp_files folder, then processed by Jimp to multiple resolutions and stored in a path with its resolution; finally all images should be returned to the next middleware. But I can't, after uploading both images to the temp folder, it only process and returns the first image.
I wrote this middleware for formidable:
var fs = require('fs');
var path = require('path');
var Jimp = require('jimp');
var mkdirp = require('mkdirp');
const formidable = require('formidable');
var fs = require('fs');
function parse(opts) {
return (req, res, next) => {
const form = new formidable.IncomingForm({
uploadDir: 'media/temp_images',
keepExtensions: true,
});
Object.assign(form, opts);
form.on('file', function(field, file) {
var date = new Date();
var newName = date.getTime() + '.' + file.type.split('/')[1];
var newPath = form.uploadDir + '/' + newName;
fs.rename(file.path, newPath);
file.path = newPath;
});
form.parse(req, (err, fields, files) => {
var processImage = function(image, options) {
options.sizes.map((item) => {
var clone = image.clone();
var uploadPath = path.join(options.folder, 'w' + item.width);
var filepath = path.join(uploadPath, options.name);
!fs.existsSync(uploadPath) && mkdirp.sync(uploadPath);
clone.resize(item.width, item.height).write(filepath);
});
};
var promises = [];
for (const file in files) {
var options = {
sizes: [{ width: 100, height: 200 }, { width: 200, height: 400 }, { width: 600, height: 1200 }],
folder: path.join('media/images', file.toLowerCase()),
name: files[file].name,
};
var promise = new Promise((resolve, reject) => {
Jimp.read(files[file].path)
.then((image) => {
processImage(image, options);
})
.catch((err) => {
console.error(err);
});
});
promises.push(promise);
}
Promise.all(promises);
if (err) {
next(err);
return;
}
Object.assign(req, { fields, files });
next();
});
};
}
module.exports = parse;
exports.parse = parse;
I add it to a file, I call it with var formidableMiddleware = require('./formidableMiddleware') and then, app.use(formidableMiddleware()), and that's all.
The problem is that this logic returns only the first item, and I cant make it return both of them. I think is related to the way I'm using the Promises, but I cant find the bug.
Any advice will be welcome!!!
I currently upload single objects to S3 using like so:
var options = {
Bucket: bucket,
Key: s3Path,
Body: body,
ACL: s3FilePermissions
};
S3.putObject(options,
function (err, data) {
//console.log(data);
});
But when I have a large resources folder for example, I use the AWS CLI tool.
I was wondering, is there a native way to do the same thing with the aws sdk (upload entire folders to s3)?
Old-school recursive way I whipped up in a hurry. Only uses core node modules and standard AWS sdk.
var AWS = require('aws-sdk');
var path = require("path");
var fs = require('fs');
const uploadDir = function(s3Path, bucketName) {
let s3 = new AWS.S3();
function walkSync(currentDirPath, callback) {
fs.readdirSync(currentDirPath).forEach(function (name) {
var filePath = path.join(currentDirPath, name);
var stat = fs.statSync(filePath);
if (stat.isFile()) {
callback(filePath, stat);
} else if (stat.isDirectory()) {
walkSync(filePath, callback);
}
});
}
walkSync(s3Path, function(filePath, stat) {
let bucketPath = filePath.substring(s3Path.length+1);
let params = {Bucket: bucketName, Key: bucketPath, Body: fs.readFileSync(filePath) };
s3.putObject(params, function(err, data) {
if (err) {
console.log(err)
} else {
console.log('Successfully uploaded '+ bucketPath +' to ' + bucketName);
}
});
});
};
uploadDir("path to your folder", "your bucket name");
Special thanks to Ali from this post with helping get the filenames
async/await + Typescript
If you need a solution that uses modern JavaScript syntax and is compatible with TypeScript, I came up with the following code. The recursive getFiles is borrowed from this answer (After all that years, recursion still gives me headache, lol).
import { promises as fs, createReadStream } from 'fs';
import * as path from 'path';
import { S3 } from 'aws-sdk';
async function uploadDir(s3Path: string, bucketName: string) {
const s3 = new S3();
// Recursive getFiles from
// https://stackoverflow.com/a/45130990/831465
async function getFiles(dir: string): Promise<string | string[]> {
const dirents = await fs.readdir(dir, { withFileTypes: true });
const files = await Promise.all(
dirents.map((dirent) => {
const res = path.resolve(dir, dirent.name);
return dirent.isDirectory() ? getFiles(res) : res;
})
);
return Array.prototype.concat(...files);
}
const files = (await getFiles(s3Path)) as string[];
const uploads = files.map((filePath) =>
s3
.putObject({
Key: path.relative(s3Path, filePath),
Bucket: bucketName,
Body: createReadStream(filePath),
})
.promise()
);
return Promise.all(uploads);
}
await uploadDir(path.resolve('./my-path'), 'bucketname');
here is a cleaned up/debugged/working version of #Jim's solution
function uploadArtifactsToS3() {
const artifactFolder = `logs/${config.log}/test-results`;
const testResultsPath = './test-results';
const walkSync = (currentDirPath, callback) => {
fs.readdirSync(currentDirPath).forEach((name) => {
const filePath = path.join(currentDirPath, name);
const stat = fs.statSync(filePath);
if (stat.isFile()) {
callback(filePath, stat);
} else if (stat.isDirectory()) {
walkSync(filePath, callback);
}
});
};
walkSync(testResultsPath, async (filePath) => {
let bucketPath = filePath.substring(testResultsPath.length - 1);
let params = {
Bucket: process.env.SOURCE_BUCKET,
Key: `${artifactFolder}/${bucketPath}`,
Body: fs.readFileSync(filePath)
};
try {
await s3.putObject(params).promise();
console.log(`Successfully uploaded ${bucketPath} to s3 bucket`);
} catch (error) {
console.error(`error in uploading ${bucketPath} to s3 bucket`);
throw new Error(`error in uploading ${bucketPath} to s3 bucket`);
}
});
}
I was just contemplating this problem the other day, and was thinking something like this:
...
var async = require('async'),
fs = require('fs'),
path = require("path");
var directoryName = './test',
directoryPath = path.resolve(directoryName);
var files = fs.readdirSync(directoryPath);
async.map(files, function (f, cb) {
var filePath = path.join(directoryPath, f);
var options = {
Bucket: bucket,
Key: s3Path,
Body: fs.readFileSync(filePath),
ACL: s3FilePermissions
};
S3.putObject(options, cb);
}, function (err, results) {
if (err) console.error(err);
console.log(results);
});
Here's a version that contains a Promise on the upload method. This version allows you to perform an action when all uploads are complete Promise.all().then...
const path = require('path');
const fs = require('fs');
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const directoryToUpload = 'directory-name-here';
const bucketName = 'name-of-s3-bucket-here';
// get file paths
const filePaths = [];
const getFilePaths = (dir) => {
fs.readdirSync(dir).forEach(function (name) {
const filePath = path.join(dir, name);
const stat = fs.statSync(filePath);
if (stat.isFile()) {
filePaths.push(filePath);
} else if (stat.isDirectory()) {
getFilePaths(filePath);
}
});
};
getFilePaths(directoryToUpload);
// upload to S3
const uploadToS3 = (dir, path) => {
return new Promise((resolve, reject) => {
const key = path.split(`${dir}/`)[1];
const params = {
Bucket: bucketName,
Key: key,
Body: fs.readFileSync(path),
};
s3.putObject(params, (err) => {
if (err) {
reject(err);
} else {
console.log(`uploaded ${params.Key} to ${params.Bucket}`);
resolve(path);
}
});
});
};
const uploadPromises = filePaths.map((path) =>
uploadToS3(directoryToUpload, path)
);
Promise.all(uploadPromises)
.then((result) => {
console.log('uploads complete');
console.log(result);
})
.catch((err) => console.error(err));
You might try the node-s3-client.
UPDATE: Available on npm here
From the sync a directory to s3 docs:
UPDATE: Added client inialization code.
var client = s3.createClient({
maxAsyncS3: 20, // this is the default
s3RetryCount: 3, // this is the default
s3RetryDelay: 1000, // this is the default
multipartUploadThreshold: 20971520, // this is the default (20 MB)
multipartUploadSize: 15728640, // this is the default (15 MB)
s3Options: {
accessKeyId: "YOUR ACCESS KEY",
secretAccessKey: "YOUR SECRET ACCESS KEY"
}
});
var params = {
localDir: "some/local/dir",
deleteRemoved: true, // default false, whether to remove s3 objects
// that have no corresponding local file.
s3Params: {
Bucket: "s3 bucket name",
Prefix: "some/remote/dir/",
// other options supported by putObject, except Body and ContentLength.
// See: http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putObject-property
},
};
var uploader = client.uploadDir(params);
uploader.on('error', function(err) {
console.error("unable to sync:", err.stack);
});
uploader.on('progress', function() {
console.log("progress", uploader.progressAmount, uploader.progressTotal);
});
uploader.on('end', function() {
console.log("done uploading");
});
This works for me (you'll need to add walkSync package):
async function asyncForEach(array, callback) {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array);
}
}
const syncS3Directory = async (s3Path, endpoint) => {
await asyncForEach(walkSync(s3Path, {directories: false}), async (file) => {
const filePath = Path.join(s3Path, file);
const fileContent = fs.readFileSync(filePath);
const params = {
Bucket: endpoint,
Key: file,
Body: fileContent,
ContentType: "text/html",
};
let s3Upload = await s3.upload(params).promise();
s3Upload ? undefined : Logger.error("Error synchronizing the bucket");
});
console.log("S3 bucket synchronized!");
};
const AWS = require("aws-sdk");
const fs = require("fs");
const path = require("path");
const async = require("async");
const readdir = require("recursive-readdir");
// AWS CRED
const ID = "<accessKeyId>";
const SECRET = "<secretAccessKey>";
const rootFolder = path.resolve(__dirname, "../");
const uploadFolder = "./sources";
// The name of the bucket that you have created
const BUCKET_NAME = "<Bucket_Name>";
const s3 = new AWS.S3({
accessKeyId: ID,
secretAccessKey: SECRET
});
function getFiles(dirPath) {
return fs.existsSync(dirPath) ? readdir(dirPath) : [];
}
async function uploadToS3(uploadPath) {
const filesToUpload = await getFiles(path.resolve(rootFolder, uploadPath));
console.log(filesToUpload);
return new Promise((resolve, reject) => {
async.eachOfLimit(
filesToUpload,
10,
async.asyncify(async file => {
const Key = file.replace(`${rootFolder}/`, "");
console.log(`uploading: [${Key}]`);
return new Promise((res, rej) => {
s3.upload(
{
Key,
Bucket: BUCKET_NAME,
Body: fs.readFileSync(file)
},
err => {
if (err) {
return rej(new Error(err));
}
res({ result: true });
}
);
});
}),
err => {
if (err) {
return reject(new Error(err));
}
resolve({ result: true });
}
);
});
}
uploadToS3(uploadFolder)
.then(() => {
console.log("upload complete!");
process.exit(0);
})
.catch(err => {
console.error(err.message);
process.exit(1);
});