Delete files from folders on S3 bucket - javascript

After doing a lot of investigation on the Amazon S3 I have understood that emptying S3 bucket is feasible. I have a requirement where I want to delete only the selected folders from the S3 bucket. Instead of deleting all the folders at once or deleting individual folders from S3 bucket. Hope my understanding of AWS is right. I would like to have options to achieve this. Please let me know if there is a solution from javascript.
function deleteObj(x){
var params = {Bucket: bucketName, Key: x.fileName};
bucket.deleteObject(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
});
}

If s3 is anything like dynamoDB you will have to get a list of all of the items in the bucket and then loop over deleting them all individually. There's a gist here with an example script.

I was in the same condition, I didn't find any interesting answer, so i came up with my own solution. You just need to Modify the value of MaxKeys. In the below code, i am showing how we can delete a folder (Including any files under it).
Just remove "export" from below code in case you don't need it
exports.EmptyS3Folder = function(folderName){
return new Promise((resolve, reject) => {
exports.s3.listObjectsV2({
Bucket: process.env.AWS_S3_BUCKET,
Prefix: folderName,
MaxKeys: 10000000
}, function(err, data) {
if (err) {
console.log(err, err.stack); // an error occurred
reject(err)
} else {
if(data.Contents.length > 0){
const deleteParams = {
Bucket: process.env.AWS_S3_BUCKET,
Delete: {
Objects: [],
Quiet: false
}
};
data.Contents.forEach(function(content) {
deleteParams.Delete.Objects.push({Key: content.Key});
});
deleteParams.Delete.Objects.push({Key: folderName});
exports.s3.deleteObjects(deleteParams, function(err, data) {
if (err) {
console.log(err, err.stack); // an error occurred
reject(err)
} else {
resolve(data)
}
});
}else{
resolve(data)
}
}
});
})
}

Related

Error: ENOENT: no such file or directory

I am trying to get get the folder path from user's selection and for each file I read it and return the data. However upon getting the file I cannot read the data for some reason that I have't been able to understand yet. The directory I am trying to read and render do exist. I have referred to the other similar posts as well.
readFolder() {
dialog.showOpenDialog({ properties: ['openDirectory'] }, (dirFiles) => {
console.log(dirFiles);
if (dirFiles === undefined) {
console.log('No file ');
return;
}
const pathName = dirFiles[0];
fs.readdir(pathName, (err, files) => {
files.forEach(file => {
fs.readFile(file, 'utf-8', (err, data) => {
if (err) {
console.log(`something went wrong ${err}`);
} else {
console.log(data);
}
});
});
});
});
}
readdir returns filenames "a", "b", "c" etc. You want pathName + '/' + file for "/path/to/file/a"
The mistake I made was the fact that I hadn't realised the return values of 'file' which are just the names of the files as strings but not paths. Assinging let filePath =${pathName}/${file}; and reading it onwards solved the problem.

delete all files in folder

how to delete files in project catalogue during yeoman process?
initializing() {
this.sourceRoot('./generators/templates');
this.destinationRoot('./generators/project');
console.log(this.destinationPath());
console.log(this.sourceRoot());
this.fs.delete(this.destinationPath());
this.fs.delete(this.destinationPath('**/*'));
this.fs.delete('project');
this.fs.delete('./generators/project');
this.fs.delete('generators/project');
this.fs.delete('generators/project/**/*');
}
non of these seems to work :(
fs.readdir('path here', (err, files) => {
if (err) console.log(err);
for (const file of files) {
fs.unlink(path.join('path here', file), err => {
if (err) console.log(err);
});
}
});
If you want to delete a file using fs you should use fs.unlink(path, callback) or fs.unlinkSync(path).
// Asynchronous version
fs.unlink('file.txt', function(err) {
if(!err) {
console.log('file deleted');
}
}
// Synchronous version, deprecated
fs.unlinkSync('file.txt');
Make sure you have the newest version of node installed to make sure this is available.

Trying to upload to S3 bucket. After 5-10 uploads it hangs

I've created a promise that uses the aws-sdk to upload to my S3 bucket. My application is a simple command line script to add images to S3 and update the database. I have anywhere between 300-1000 images to upload every run.
The problem I am having is it uploads 5-10 images but then seems to hang. I've confirmed this by placing a console.log(data) after the error checking in the promise below.
The first 5 images upload quickly, the sixth takes about a minute, the seventh a lot longer and at which point it just hangs.
s3-upload-promise.js
'use strict'
let AWS = require('aws-sdk')
let s3 = new AWS.S3()
module.exports = function(params) {
return new Promise(function(resolve, reject) {
s3.upload(params, function(err, data) {
if(err) return reject(Error(err))
resolve(data)
})
})
}
And here is the code that calls the promise:
let s3UploadPromise = require('../src/s3-upload-promise/s3-upload-promise')
// Get all PNG files from given path and upload to bucket
listpng(process.argv[2]).then(function(files) {
let promises = []
files.forEach(function(el, i) {
promises.push(el, s3UploadPromise({
Bucket: process.env.S3_BUCKET,
Key: 'templates/' + randomstring.generate() + '.png',
Body: fs.createReadStream(process.argv[2] + el),
ACL: 'public-read'
}))
});
Promise.all(promises).then(function(values) {
return console.log(values)
})
})
Any ideas what I'm doing wrong? Does this have anything to do with me not closing createReadStream?
Edit
I've tried closing the stream within the the s3.upload callback. It didn't make any difference and it still hung:
s3.upload(params, function(err, data) {
params.Body.close()
if(err) return reject(Error(err))
resolve(data)
})
Edit 2
I added some error checking and I am getting the following error:
{
message: 'Your socket connection to the server was not read from or written to within the timeout period. Idle connections w
ill be closed.',
code: 'RequestTimeout',
region: null,
time: 2016-06-19T13:14:39.223Z,
requestId: 'F7E64E8F99E774F3',
extendedRequestId: 'PW/mPy6t3w9U1uJc8xYKhUGi/KiSY+6yK6nq0RB21Ke1KqRmTWjjm3KXEp0qAEPDadypw+kiwCEP3upER1uecEP4Sl9Tk/lt',
cfId: undefined,
statusCode: 400,
retryable: true
}
A comment on an issue on Github mentions:
I've been noticing this a lot when using concurrency > 1, on several systems. Most of the time an uploaded folder will begin to get 400's due to timeouts after the first 10 or so requests.
Maybe adding a Content-Length somewhere will help. Will try.
Edit 3
I decided to try a different library called knox. I get the same problem! This is crazy. It must surely an Amazon issue if two different libraries are facing the same problem?
s3-upload-promise.js
'use strict'
let knox = require('knox')
let process = require('process')
var client = knox.createClient({
key: process.env.AWS_KEY,
secret: process.env.AWS_SECRET,
bucket: process.env.S3_BUCKET,
});
module.exports = function(params) {
return new Promise(function(resolve, reject) {
let headers = {
'Content-Length': params.contentLength,
'Content-Type': params.contentType,
'x-amz-acl': params.permissions
}
client.putBuffer(params.buffer, params.key, headers, function(err, res){
if(err) return reject(err)
resolve(res)
});
})
}
Calling code ...
// Get all PNG files from given path
listpng(process.argv[2]).then(function(files) {
let promises = []
files.forEach(function(el, i) {
let file = process.argv[2] + el;
fs.readFile(file, function(err, data) {
if(err) return console.log(err)
fs.stat(process.argv[2] + el, function(err, stats) {
if(err) return console.log(err)
let key = process.env.S3_TEMPLATES + '/'
let buffer = fs.createReadStream(process.argv[2] + el)
let params = {
buffer: data,
key: key + randomstring.generate() + '.png',
contentLength: stats.size,
contentType: 'image/png',
permissions: 'public-read',
}
promises.push(s3Uploader(params))
Promise.all(promises).then(function(values) {
return console.log(values)
})
})
})
})
})
Not sure what else to do now.

Error : Path must be a string, NodeJS Read/Write

What i'm trying to do is read/write to multiple files at once, Once a file is created, only the data inside the file would be changed.
code:
var files = fs.readdirSync(__dirname+"/")
function readWrite(files) {
fs.readFile(files[i], 'utf-8', function(err, data){
if (err){
console.log(err)
}
fs.writeFile(files[i], 'test string', 'utf-8', function (err) {
if (err){
console.log("completed")
}
})
})
}
for(i in files){
readWrite(files[i])
}
The error is pretty obvious "path must be a string", But how do I go about writing to multiple files in the same directory at once?
I'm pretty new to node, so sorry if this seems like a bonehead question, any help would be appreciated.
You're passing filename to readWrite function so you should not use [i]:
function readWrite(file) {
fs.readFile(file, 'utf-8', function(err, data) {
if (err) {
console.log(err)
}
fs.writeFile(file, 'test string', 'utf-8', function (err) {
if (err) {
console.log("completed")
}
})
})
}
for (i in files) {
readWrite(files[i])
}
Try replacing files[i] by files inside your function. You should be using the name of your variable, files (and probably rename it to filepath)
After that, do you really want to read and write from the same file at the same time (this is what your code is doing) ?

Async confusion in nodejs function

I always have multiple operations in one route or endpoint. Take an example below, when a user deletes an item, I want the related file be deleted in s3 too besides deleting related collection from the database.
So is the code below ok? Does it matter if I put the first function (delete file from s3) inside the DeleteItem function?
router.post('/item/delete', function(req, res) {
if(req.body.dlt_item){
var tempArray = [];
tempArray.push({"Key":req.body.dlt_item});
s3Bucket.deleteObjects({
Bucket: 'myS3',
Delete: {
Objects: req.body.dlt_item
}
}, function(err, data) {
if (err)
return console.log(err);
});
}
Item.DeleteItem(req.body.item_id, function(err,result){
if(err){console.log(err)}
res.send({result:1});
})
});
You should organise your code like this. This will ensure that s3 deletion will start only when mongodb deletion has finished.
In your code both things happen simultaneously. this may cause issue in some cases.
If one fails and other succeeds then there will be trouble. Suppose s3 files get deleted successfully and mongo deletion fails. Then you will have many references to non existing resources.
router.post('/item/delete', function(req, res) {
if(req.body.dlt_item){
var tempArray = [];
tempArray.push({"Key":req.body.dlt_item});
Item.DeleteItem(req.body.item_id, function(err,result){
if(err)
{
console.log(err)
res.send(err);
}
else
{
//deletion from mongodb is succesful now delete from s3
s3Bucket.deleteObjects({
Bucket: 'myS3',
Delete: {
Objects: req.body.dlt_item
}
},function(err, data) {
if (err)
{
// deletion from s3 failed you should handle this case
res.send({result:1});
return console.log(err);
}
else
{
// successful deletion from both s3 and mongo.
// If you do not want to wait for this then send the response before this function.
res.send({result:1});
}
});
}
})
});

Categories

Resources