aws-sdk get file information - javascript

i am trying to get the file information from a file on my Amazon S3 server using the aws-sdk node module.
What i want to get out is the file name, file type and size.
I have attempted the following methods without luck:
s3.headObject(params, function (err, data) {
if (err) {
console.log(err, err.stack)
}
else {
d.resolve(data);
}
});
And
s3.getObject(params, function (err, data) {
if (err) {
console.log(err, err.stack)
}
else {
d.resolve(data);
}
});
Looking through their documentation i cant seem to find any other method that will give me the information i need.
So my question to you is how do i get the above information?

Here is the code to get the file name, size and content-type of all the objects present in a bucket.
Change the bucket name
Load your access keys from config.json accordingly
Code:-
var AWS = require('aws-sdk');
// Load credentials and set region from JSON file
AWS.config.loadFromPath('./config.json');
// Create S3 service object
s3 = new AWS.S3({ apiVersion: '2006-03-01' });
var bucketName = 'yourBucketName';
var params = {
Bucket: bucketName
};
var headParams = {
Bucket: bucketName
};
listAllKeys();
function listAllKeys() {
s3.listObjectsV2(params, function (err, data) {
if (err) {
console.log(err, err.stack); // an error occurred
} else {
var contents = data.Contents;
contents.forEach(function (content) {
//console.log(JSON.stringify(content));
headParams["Key"] = content.Key;
s3.headObject(headParams, function (err, headObjectData) {
if (err) {
console.log(err, err.stack);
} else {
console.log("1. File name :" + content.Key + ";" + " 2. File size :" + content.Size + ";" + " 3. Content-Type :" + headObjectData.ContentType);
}
});
});
if (data.IsTruncated) {
params.ContinuationToken = data.NextContinuationToken;
console.log("get further list...");
listAllKeys();
}
}
});
}
Sample output:-
1. File name :index.html; 2. File size :48; 3. Content-Type :text/html

s3.headObject works fine. You can find sample code below
let primaryBucket = primarys3bucketname;
var headParams = {
Bucket: primaryBucket,
};
let size = '';
headParams["Key"] = "/sample/path/to/filename.pdf";
s3.headObject(headParams).promise().then((headObjectData) => {
size = this.bytesToSize(headObjectData.ContentLength);
});
function bytesToSize(bytes) {
var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
if (bytes == 0) return '0 Byte';
var i = parseInt(Math.floor(Math.log(bytes) / Math.log(1024)));
return Math.round(bytes / Math.pow(1024, i), 2) + ' ' + sizes[i];
};

Related

how to exclude current log file while archiving log files inside a directory

I have to archive a directory in which log file are present(File names are in a format as i.e 2017-12-06.log).I have to archive all files except 2017-12-06.log.
I have a logs directory where all log files are present and a script create-zip.js. When I run following script with node create-zip.js, It creates directories as /archive/<year>/<month_name> and saves here logs.zip folder.
Everything is fine except when I extract logs.zip, my archived log files are found inside /archive/<year>/<month_name>/home/hotam/nodejs_archive_example/logs but I want these files inside /archive/<year>/<month_name>. I googled a lot but couldn't find solution. Thanks in advance.
I have following script(create-zip.js):
'use strict';
var fs = require('fs'),
path = require('path'),
archiver = require('archiver'),
currDate = new Date(),
year = currDate.getFullYear(),
month = currDate.toLocaleString("en-us", { month: "long" }),
dir = path.join(__dirname + '/archive/' + year + '/' + month),
ignoredFile = currDate.getFullYear()+'-'+('0' + (currDate.getMonth() + 1)).slice(-2)+'-'+('0' + currDate.getDate()).slice(-2)+'.log';
//Function to create directories recursively
exports.createDir = function (dir) {
const splitPath = dir.split('/');
splitPath.reduce(function (dirPath, subPath) {
let currentPath;
if (subPath != '.') {
currentPath = dirPath + '/' + subPath;
if (!fs.existsSync(currentPath)) {
fs.mkdirSync(currentPath);
}
} else {
currentPath = subPath;
}
return currentPath
}, '');
};
exports.createDir(dir);
var output = fs.createWriteStream(path.join(dir, 'logs.zip'));
var archive = archiver('zip', {});
var logPath = __dirname + '/logs';
output.on('close', function () {
if (fs.existsSync(logPath)) {
fs.readdirSync(logPath).forEach(function (file, index) {
var curPath = logPath + "/" + file;
if (!fs.lstatSync(logPath).isFile()) {
// delete file
if(!(file == ignoredFile)) {
fs.unlinkSync(curPath);
}
}
});
}
});
output.on('end', function () {
console.log('Data has been drained');
});
archive.on('warning', function (err) {
if (err.code === 'ENOENT') {
console.log(err);
} else {
// throw error
console.log(err);
throw err;
}
});
archive.on('error', function (err) {
logger.error(err);
throw err;
});
archive.pipe(output);
//ignoring 2017-12-06.log
archive
.glob(__dirname + '/logs/**', {
ignore: [__dirname + '/logs/'+ignoredFile]
})
.finalize();
I got the solution of this scenario. I changed archive.glob() and it worked for me.
`//ignoring 2017-12-06.log
archive
.glob('./logs/**/*', {
ignore: ['./logs/**/*' + ignoredFile]
})
.finalize();`

NodeJS - output json array as mutiple json files to disk

I try to use NodeJS to read a JSON array from a JSON file, and then output each JSON object multiple JSON files to the disk.
However, I got the Error EMFILE: too many open files
The array has 20,000 objects.
The code:
function main(){
var clusters_statistics=require("cluster_whole_1.json");
for(var i=0; i<clusters_statistics.length; i++){
var fs=require('fs');
var outputFilename='cut_json/'+i+'.json';
fs.writeFile(outputFilename, JSON.stringify(clusters_statistics[i], null, 4), function(err) {
if(err) {
console.log(err);
} else {
console.log(data);
}
});
}
}
Update:
1. I tried to use the close() function as suggested by Gustavo, unfortunately, it still says "there are too many files open". ("Open" this time).
2. Then I tried recursion inside the close(), and it works now.
The code:
function main(){
clusters_statistics=require("cluster_whole_1.json");
call_close(clusters_statistics.length);
}
function call_close(i){
var fs = require("fs");
var path = 'cut_json/'+i+'.json';
fs.open(path, "w+", function(error, fd) {
if (error) {
console.error("open error: " + error.message);
}else {
fs.writeFile(path, JSON.stringify(clusters_statistics[i], null, 4), function(err) {
if(err) {
console.log(err);
} else {
}
});
fs.close(fd, function(error) {
if (error) {
console.log(err);
} else {
if(i<=0){
return;
}else{
if(i%100==0){
console.log(i);
}
call_close(i-1);
}
}
});
}
});
}
Close the file after you finish writing the Jason into it.
Now you are keeping the files open and filling the memory, if you don't close them manually they will only be closed when your program finishes​.
var fs=require('fs');
function main(){
var clusters_statistics=require("cluster_whole_1.json"); for(var i=0; i<clusters_statistics.length; i++){
var outputFilename='cut_json/'+i+'.json';
var my_file = fs.open(outputFilename, 'w+');
var buffer = new Buffer(JSON.stringify(clusters_statistics[i], null,4):
fs.write(my_file, buffer, 0, buffer.length, null, function(err, written, buffer) {
if(err) {
console.log(err);
} else {
console.log('OK!');
}
});
fs.close(my_file);
}
Opening and closing the file descriptor is not necessary when using fs.writeFile, but you'll probably want to write the files sequentially so you don't open too many files at once.
function write_cluster_statistics(clusters_statistics, callback, index) {
index = index || 0;
if (index >= clusters_statistics.length) {
return callback();
}
var fs = require('fs');
var path = 'cut_json/' + index + '.json';
var content = JSON.stringify(clusters_statistics[index], null, 4);
fs.writeFile(path, content, function (err) {
if (err) {
callback(err);
} else {
write_cluster_statistics(clusters_statistics, callback, index + 1);
}
});
}
function main() {
var clusters_statistics = require("cluster_whole_1.json")
write_cluster_statistics(clusters_statistics, function (err) {
if (err) {
console.error(err);
} else {
console.log('done');
}
});
}

XMPP File Transfer via JavaScript Strophe.js in Openfire

I am trying to get Strophe.js based XMPP file transfer to work. I can get logged in to work on my openfire server. I can send messages and receive messages fine but I am having trouble with file transfer.
HTML:
<form name='file_form' class="panel-body">
<input type="file" id="file" name="file[]" />
<input type='button' id='btnSendFile' value='sendFile' />
<output id="list"></output>
</form>
Javascript file:
// file
var sid = null;
var chunksize;
var data;
var file = null;
var aFileParts, mimeFile, fileName;
function sendFileClick() {
file =$("#file")[0].files[0];
sendFile(file);
readAll(file, function(data) {
log("handleFileSelect:");
log(" >data="+data);
log(" >data.len="+data.length);
});
}
function sendFile(file) {
var to = $('#to').get(0).value;
var filename = file.name;
var filesize = file.size;
var mime = file.type;
chunksize = filesize;
sid = connection._proto.sid;
log('sendFile: to=' + to);
// send a stream initiation
connection.si_filetransfer.send(to, sid, filename, filesize, mime, function(err) {
fileTransferHandler(file, err);
});
}
function fileTransferHandler(file, err) {
log("fileTransferHandler: err=" + err);
if (err) {
return console.log(err);
}
var to = $('#to').get(0).value;
chunksize = file.size;
chunksize = 20 * 1024;
// successfully initiated the transfer, now open the band
connection.ibb.open(to, sid, chunksize, function(err) {
log("ibb.open: err=" + err);
if (err) {
return console.log(err);
}
readChunks(file, function(data, seq) {
sendData(to, seq, data);
});
});
}
function readAll(file, cb) {
var reader = new FileReader();
// If we use onloadend, we need to check the readyState.
reader.onloadend = function(evt) {
if (evt.target.readyState == FileReader.DONE) { // DONE == 2
cb(evt.target.result);
}
};
reader.readAsDataURL(file);
}
function readChunks(file, callback) {
var fileSize = file.size;
var chunkSize = 20 * 1024; // bytes
var offset = 0;
var block = null;
var seq = 0;
var foo = function(evt) {
if (evt.target.error === null) {
offset += chunkSize; //evt.target.result.length;
seq++;
callback(evt.target.result, seq); // callback for handling read chunk
} else {
console.log("Read error: " + evt.target.error);
return;
}
if (offset >= fileSize) {
console.log("Done reading file");
return;
}
block(offset, chunkSize, file);
}
block = function(_offset, length, _file) {
log("_block: length=" + length + ", _offset=" + _offset);
var r = new FileReader();
var blob = _file.slice(_offset, length + _offset);
r.onload = foo;
r.readAsDataURL(blob);
}
block(offset, chunkSize, file);
}
function sendData(to, seq, data) {
// stream is open, start sending chunks of data
connection.ibb.data(to, sid, seq, data, function(err) {
log("ibb.data: err=" + err);
if (err) {
return console.log(err);
}
// ... repeat calling data
// keep sending until you're ready you've reached the end of the file
connection.ibb.close(to, sid, function(err) {
log("ibb.close: err=" + err);
if (err) {
return console.log(err);
}
// done
});
});
}
$('#btnSendFile').bind('click', function() {
console.log('File clicked:');
sendFileClick();
});
Full code is based on:
Complete example of Strophe.js file transfer
http://plnkr.co/edit/fYpXo1mFRWPxrLlgr123 (source can be download here: has errors). I changed the sendFileClick function.
I am getting:
ibb.open: err=Error: feature-not-implemented? Why is this error I am getting?

Uploading video without 777 does not work

I am locally testing my node video upload. my upload class looks like this:
var videoExtensions = ['mp4', 'webm', 'mov'];
var audioExtensions = [];
//Media object
function Media(file, targetDirectory) {
this.file = file;
this.targetDir = targetDirectory;
}
Media.prototype.isVideo = function () {
return this.file.mimetype.indexOf('video') >= 0;
};
Media.prototype.isAudio = function () {
return this.file.mimetype.indexOf('audio') >= 0;
};
Media.prototype.getName = function () {
return this.file.originalname.substr(0, this.file.originalname.indexOf('.'))
};
router.route('/moduleUpload')
.post(function (request, response) {
var media = new Media(request.files.file, '../user_resources/module/' + request.body.module_id + '/');
if (!fs.existsSync(media.targetDir)) {
fs.mkdirSync(media.targetDir, 0777, function (err) {
if (err) {
console.log(err);
response.send("ERROR! Can't make the directory! \n"); // echo the result back
}
});
}
if (media.isVideo()) {
convertVideos(media);
}
else if (media.isAudio()) {
convertAudio(media);
}
else {
moveFile(media);
}
response.status(200).json('user_resources/module/' + request.body.module_id + '/' + media.getName());
});
router.route('/retrieveFile')
.post(function (request, response) {
var path = '../' + request.body.data;
var file = fs.createReadStream(path);
file.pipe(response);
});
function convertVideos(media) {
var ffmpeg = require('fluent-ffmpeg');
videoExtensions.forEach(function (extension) {
var proc = new ffmpeg({source: media.file.path, nolog: false})
.withVideoCodec('libx264')
.withVideoBitrate(800)
.withAudioCodec('libvo_aacenc')
.withAudioBitrate('128k')
.withAudioChannels(2)
.toFormat(extension)
.saveToFile(media.targetDir + media.getName() + '.' + extension,
function (retcode, error) {
console.log('file has been converted succesfully');
});
});
}
function convertAudio(media) {
var ffmpeg = require('fluent-ffmpeg');
audioExtensions.forEach(function (extension) {
var proc = new ffmpeg({source: media.file.path, nolog: false})
.withVideoCodec('libx264')
.withVideoBitrate(800)
.withAudioCodec('libvo_aacenc')
.withAudioBitrate('128k')
.withAudioChannels(2)
.toFormat(extension)
.saveToFile(media.targetDir + media.getName() + '.' + extension,
function (retcode, error) {
console.log('file has been converted succesfully');
});
});
}
When a video file is uploaded it is convert into 3 different files.
Now the file i wish to upload is in my /Video folder at first this did not have any permissions. Which resulted in the upload could not play. However as soon as i changed the permission of the file to 777 the video plays without a problem.
My question is why? am i missing something in my upload and is chmod 777 wise?
also note im using ubuntu 14.04

node.js async function in loop?

I am having some problems with node.js. What I'm trying to do is get an array of the directories in "./"+req.user.email and loop through them finding out their size and adding a table row to output, as you can see in the code. At the end I wan't to send all the table rows using res.send().
However the only output I am getting is:
<tr></tr>
for each file in the array. It seems that the forEach function is not waiting for readSizeRecursive at all. The readSizeRecursive function is asynchronous, and I believe that is what's causing the problem, but I don't know how I can fix this.
Any help would be greatly appreciated, I have included the readSizeRecursive function too. Thank you!
var output = "";
fs.readdir("./" + req.user.email, function (err, files) {
files.forEach(function(file){
output += "<tr>";
readSizeRecursive("./"+req.user.email+"/"+file, function (err, total){
output += '<td>' + file + '</td><td>' + total + '</td>';
});
output += "</tr>"
});
res.send(output)
});
readSizeRecursive() :
// Function to find the size of a directory
function readSizeRecursive(item, cb) {
fs.lstat(item, function(err, stats) {
var total = stats.size;
if (!err && stats.isDirectory()) {
fs.readdir(item, function(err, list) {
async.forEach(
list,
function(diritem, callback) {
readSizeRecursive(path.join(item, diritem), function(err, size) {
total += size;
callback(err);
});
},
function(err) {
cb(err, total);
}
);
});
}
else {
cb(err, total);
}
});
}
Please use the async module for this kind of pattern. Using async.each will allow you to compute the size for each folder asynchronously, and then return the sizes once you're done computing everything individually.
var output = [];
fs.readdir('./' + req.user.email, function (err, files) {
async.each(compute, report);
});
function compute (file, done) {
// calculate size, then callback to signal completion
// produce a result like below, then invoke done()
var obj = { files: [
{ name: file, size: size },
{ name: file, size: size },
{ name: file, size: size }
]};
output.push(obj);
done();
}
// doesn't need to be this awful
function format (list) {
var result = [];
list.forEach(function (item) {
var description = item.files.map(function (file) {
return util.format('<td>%s</td><td>%s</td>', file.name, file.size);
});
result.push(description);
});
result.unshift('<tr>');
result.push('</tr>');
return result.join('</tr><tr>');
}
function report (err) {
if (err) { return next(err); }
var result = format(output);
res.send(result);
}
This way you can easily swap out the different pieces of functionality, changing the formatting without altering the computing of the file size tree, for example.
Your main issue was control flow. You return with res.send while you are asynchronously looping and figuring out the sizes.
var fs = require ("fs");
var createTableContent = function (p, cb){
var read = function (p, cb){
//Prevent recursion if error
if (err) return cb ();
fs.stat (p, function (error, stats){
if (error){
err = error;
return cb ();
}
if (stats.isDirectory ()){
var dirSize = 0;
fs.readdir (p, function (error, entries){
if (error){
err = error;
return cb ();
}
var pending = entries.length;
//Empty dir
if (!pending) return cb (0);
entries.forEach (function (entry){
read (p + "/" + entry, function (entrySize){
dirSize += entrySize;
if (!--pending) return cb (dirSize);
});
});
});
}else{
cb (stats.size);
}
});
};
//A lot of errors can be produced, return only the first one
var err = null;
//Suppose p is a dir
fs.readdir (p, function (error, entries){
if (error) return cb (error);
var content = "";
var pending = entries.length;
if (!pending) return cb (null, content);
entries.forEach (function (entry){
read (p + "/" + entry, function (totalSize){
if (err) return cb (err);
content += "<tr><td>" + entry + "</td><td>" + totalSize + "</td></tr>";
if (!--pending){
//End
cb (null, content);
}
});
});
});
};
//Here goes the "email" path
createTableContent (".", function (error, content){
if (error) return console.error (error);
console.log (content);
});

Categories

Resources