I want to create object with filename and fileStat so in below code i am checking stats with async and for filename i used readDir now once i have values how can i create array of object ? I am trying to get filename and its created date and send it the client objToReturn. Any idea how can i acheive that task ?
app.js
function readDirectory(callback) {
var dirPath = './logs/ditLogs';
//this will get you list of all files. in directory
var files = fs.readdirSync(dirPath);
var objToReturn = [{
fileName: '',
fileStat: ''
}];
//then using async do like this
async.eachSeries(files, function(file, callback) {
var filePath = path.join(dirPath, file);
fs.stat(filePath, function(err, stats) {
objToReturn.fileStat = stats;
//write stats data into objToReturn
fs.readdir(path, function(err, items) {
objToReturn.filename = items;
});
callback();
});
}, function(err) {
//final callback when all files completed here send objToReturn to client
callback(objToReturn);
});
Okay, I think I see what you're trying to do here. You want to read all of the file paths in the given directory and then for each file, collect information before returning this information to a given callback. It appears right now that you are adding these props to the array objToReturn, instead of to each object and pushing it to objToReturn. In this way, you overwrite the properties on each async read.
You also use fs.readdir(path, function(err, items) { where I think you mean to refer to filePath that you declare earlier.
Try something like:
async.eachSeries(files, function(file, callback) {
var filePath = path.join(dirPath, file);
var fileInfo = {};
fs.stat(filePath, function(err, stats) {
fileInfo.fileStat = stats;
//write stats data into objToReturn
fs.readdir(filePath, function(err, items) {
fileInfo.filename = items;
});
objToReturn.push(fileInfo);
callback();
});
}, function(err) {
You can try this:
function readDirectory(callback) {
var dirPath = './logs/ditLogs';
//this will get you list of all files. in directory
var files = fs.readdirSync(dirPath);
var objToReturn = [];
//then using async do like this
async.eachSeries(files, function(file, callback) {
var filePath = path.join(dirPath, file);
fs.stat(filePath, function(err, stats) {
objToReturn.fileStat = stats;
//write stats data into objToReturn
fs.readdir(path, function(err, items) {
objToReturn.filename = items;
objToReturn.push({
fileName: items,
fileStat: stats
})
});
callback();
});
}, function(err) {
//final callback when all files completed here send objToReturn to client
callback(objToReturn);
});
}
Hope this should work.
Related
I wrote this code that checks image files sizes in a folder, if the file are bigger than 30000 bytes then moves to a temporary folder called 'before-compress'. The compressImages() function iterates over the 'before-compress' folder and returns the compressed images to the original folder. My question is: How can i await the process of move the exceeded size files and then call the compressImage() function?, as you can see in the code i handle this with a setTimeout once the forEach reaches the last item. Thanks in advance.
const fs = require('fs');
const path = require('path');
const imagemin = require("imagemin");
const imageminMozjpeg = require("imagemin-mozjpeg");
const imageminPngquant = require("imagemin-pngquant");
const imageminGifsicle = require('imagemin-gifsicle');
const directoryPath = path.join(__dirname, 'uploads');
fs.readdir(`${directoryPath}/products`, function (err, files) {
if (err) {
return console.log('Unable to scan directory: ' + err);
}
files.forEach(function (file) {
console.log(`File: ${file} - Size: ${getFilesizeInBytes(file)} bytes`);
if(getFilesizeInBytes(file) > 30000){
moveFile(file)
}
if(files.indexOf(file) == files.length - 1){
//console.log('last index');
setTimeout(() => compressImages(), 4000);
}
});
});
function getFilesizeInBytes(fileName) {
var stats = fs.statSync(`${directoryPath}/products/${fileName}`);
var fileSizeInBytes = stats.size;
return fileSizeInBytes;
}
function moveFile(file){
var oldPath = `${directoryPath}/products/${file}`;
var newPath = `${directoryPath}/before-compress/${file}`;
fs.rename(oldPath, newPath, function (err) {
if (err) throw err;
console.log(`File ${file} moved!`);
})
}
function compressImages(){
fs.readdir(`${directoryPath}/before-compress`, function (err, files) {
if (err) {
return console.log('Unable to scan directory: ' + err);
}
files.forEach(function (file) {
console.log(`File to compress: ${file}`);
let fileExt = file.split('.')[1];
let compressPlugin = fileExt == 'jpg' || fileExt == 'jpeg' ? imageminMozjpeg({quality: 40}) :
fileExt == 'png' ? imageminPngquant({quality: [0.5, 0.6]}) :
fileExt == 'gif' ? imageminGifsicle() : 0;
(async () => {
const files = await imagemin([`./uploads/before-compress/${file}`], {
destination: './uploads/products',
plugins: [ compressPlugin ]
});
fs.unlink(`${directoryPath}/before-compress/${file}`, err => err ? console.log(err) : 0);
})();
});
});
}
This kind of code would become much more readable if you would convert all the functions from using callbacks to using async.
If you want to keep using callbacks however, there are two options:
Make moveFile() to use fs.renameSync() instead of fs.rename(). Normally I would advise against that, but since you are already using fs.statSync() and I suppose you run this as a script with nothing in parallel, maybe that would be an acceptable solution.
Or make moveFile() accept a callback:
function moveFile(file, callback){
// [...]
fs.rename(oldPath, newPath, callback)
}
Now you can use this callback to detect when the file has been moved, for example like this:
// [...]
var done = 0;
var error = false;
files.forEach(function (file) {
if(error) return;
if(getFilesizeInBytes(file) > 30000){
moveFile(file, function(err) {
if (err) { console.log(err); error = true; }
done++;
});
} else {
done++;
}
if(done == files.length) {
compressImages(), 4000);
}
});
});
I want to know how to hash the download stream of a file using node js
Because I wanna hash the file before I store in to mongo db in order to avoid duplicates , I am using mongo grid fs by the way. https://github.com/aheckmann/gridfs-stream
downloading file
var download = function (url, dest, callback) {
request.get(url)
.on('error', function (err) { console.log(err) })
.pipe(fs.createWriteStream(dest))
.on('close', callback);
};
final_list.forEach(function (str) {
var filename = str.split('/').pop();
console.log('Downloading ' + filename);
download(str, filename, function () { console.log('Finished Downloading' + "" + filename) });
});
function getHash(dest, filename) {
let crypto = require('crypto');
let hash = crypto.createHash('sha256').setEncoding('hex');
let fileHash = "";
let filePath = `${dest}/${filename}`
fs.createReadStream(filePath)
.pipe(hash)
.on('finish', function() {
fileHash = hash.read();
console.log(`Filehash calculated for ${filename} is ${fileHash}.`);
// insert into mongo db here
});
}
I'm trying to upload images to a s3 bucket as part of the application.
index.js
function upImg(req) {
if(req.files.img) {
var img = req.files.image;
var name = Math.round(Math.random()*10000).toString(); // Returns a random 5 digit number
if(myDB.uploadImg(img, name)) {
return name;
} else {
return "";
}
} else {
return "";
}
}
app.post('/newEV*', isLoggedIn, function(req, res) {
var myURL = req.path.replace('/newEV', '');
var imgPath = upImg(req);
fetch(myURL).then(function (events){
var myID;
var x = 0;
while(!myID) {
if(!events[x]) {
myID = x;
} else {
x++;
}
}
myDB.newEvent(myURL, req.body.name, req.body.desc, req.body.loc, imgPath, req.body.link, req.body.cap, req.body.date, req.body.time, myID, events);
res.redirect('/edit' + myURL);
});
});
myDB file
function signs3(file, name) {
devs3();
const s3 = new aws.S3();
const s3Params = {
Body: file,
Bucket: S3_BUCKET,
Key: name
};
s3.putObject(s3Params, function(err, data) {
if(err) {
throw err;
} else {
console.log("Data from putObject:" + JSON.stringify(data));
}
});
}
module.exports = {
uploadImg : function(file, name) {
var nName = "imgs/" + name;
console.log(nName);
signs3(file, nName);
return true;
}
}
I know that the signs3 function works because I can use it in other bits of my application to upload JSON files. Whenever I post to the URL, weirdly enough I can see in the console the 'data from putObject', however what I can't see is the nName. I don't understand this, as the console.log(nName) line should be run before the other one. When I go to look at bucket, the image hasn't uploaded (despite me getting an ETag from the console), and the page does not display it as there (I know this also works because it can display images already uploaded to the bucket).
You want to do something like this, soliciting events from the Request object created when you call putObject.
const req = s3.putObject( s3Params )
req.on('success', res => {
console.log ('upload complete! );
});
req.on ('error', res => {
console.error (res.error');
});
req.send();
Why does this appear to work differently for small files (JSON files) and large files (images)? Because the large files take longer to upload.
Im setting up a program that will check two different folder and copy all the files from the in a third one. The problem for me here is how to copy them without their names?
var fs = require("fs");
fs.renameSync("/home/oem/git/test/folder1/test1.js", "/home/oem/git/test/folder1/test1CHANGED.js")
console.log("file renamed");
fs.rename("/home/oem/git/test/folder1/test2", "/home/oem/git/test/folder2", function(err){
if(err)
{
console.log(err);
}
else
{
console.log("file moved successfully");
}
});
with the code above i can move files that i manually write thei r names, i want to implement it somehow that it will automatically scan the folder and change them to the other one!
Here a Function that could help you to do this
const fs = require('fs');
const path = require('path');
function copyFiles() {
const firstFolder = 'firstFolder';
const secondFolder = 'secondFolder';
const destinationFolder = 'destinationFolder';
const firstDir = path.join(__dirname, firstFolder);
const secondDir = path.join(__dirname, secondFolder);
const destDir = path.join(__dirname, destinationFolder);
fs.readdir(firstDir, (err, files) => {
if (err) {
throw err;
}
for (let i = 0; i < files.length; i += 1) {
fs.copyFile(firstDir + '/' + files[i], destDir + '/' + files[i], function (err) {
if (err)
throw err;
});
}
});
fs.readdir(secondDir, (err, files) => {
if (err) {
throw err;
}
for (let i = 0; i < files.length; i += 1) {
fs.copyFile(secondDir + '/' + files[i], destDir + '/' + files[i], function (err) {
if (err)
throw err;
});
}
});
}
copyFiles();
You should take a look on nodes docs where it mentions this more detailed
If I may assume that there is two folder folder1 and folder2
folder1 where there is file word.txt and folder2 is empty
In the script file(assuming also that it lives with the two folders) you can write
const fs = require('fs');
// destination.txt will be created or overwritten by default.
// you can do the renaming here
fs.copyFile('./folder1/word.txt', './folder2/destination.txt', (err) => {
if (err) throw err;
console.log('word.txt was copied to destination.txt');
});
The fastest way to copy file
const fs = require('fs');
function copies(fs, files, destination)
{
for (i=0;i<files.length;i++) {
// destination file will be created or overwritten by default.
fs.copyFile(files[i], destionation + '/' + files[i].replace(/^.*[\\\/]/, ''), (err) => {
if (err) throw err;
console.log(files[i] +' was copied to ' + destination);
});
}
}
var files = ['/path/to/source/files.txt', '/sources/files/files2.txt'];
var destination = '/file/would/copy/to';
copies(fs, files, destination);
I m creating a job search portal using nodejs,mongodb,and ejs view engine :
collection "filters" = 'username' 'tags'
collection "alljobs" = 'category' 'jobsdata'
In the following code I'm fetching 'tags' from "filters" collection and comparing with all 'category' in "alljobs" collection. And then all 'tags' array matched with 'category' view their respective 'jobsdata' using ejs view engine.
Problem: Code is working but in browser I can't see all matched category respective jobs only one category jobs data viewed. I am not able to understand where is the problem?
Code:
function processRequest(req,res){
var tags,data,jobsdata = [];
var len;
if(condition)
{....}
else{
var db = new Db('askhere', new Server('localhost', '27017'));
db.open(function (err, db) {
db.authenticate('', '', function (err, result) {
var url = 'mongodb://localhost:27017/askhere';
client.connect(url, function (err, db) {
var col = db.collection('filters');
col.find({username:req.session.userName}).toArray(function (err, items) { // find tags of session user from collection filters
console.log('items: ' + JSON.stringify(items));
items.forEach(function (doc) {
tags = doc.tags; //save tags
});
var col = db.collection('alljobs'); //find all categories jobs matched with tags data in collection alljobs
for(var i=0; i<tags.length;i++){
col.find({category:tags[i]}).toArray(function (err, items1) {
if (items1 == false) {
res.render('mainqa',{uname:req.session.userName,tags:'No Tags Selected',jobsdata:'No Tags Matched !!!',len:0});
}
items1.forEach(function (doc1) {
jobsdata = doc1.jobsdata;
var html = ["url : ", "Posted Date : ", "Job Title : ", "Company : ", "Location : "]
for (var i = 0; i < 25; i++) {
for (var j = 0; j < 5; j++) {
data.push(html[j] + jobsdata[i][j]);
} //Nested for loop
} //for loop covert 2D array in 1D
res.render('mainqa',{uname:req.session.userName,tags:tags,jobsdata:data,len:len});
}); //forEach
}); //col.find collection=alljobs
} //for loop
}); //col.find collection=filters
}); //client connect
}); //db.authenticate
}); //db.open
} //else end
} //function processRequest end
You should stop using callbacks for nesting code of multiple methods unless you want to end up with the Callback Hell. Use the async module to simplify this task, in particular you'd need to use async.waterfall() since you want to run multiple tasks that depend on each other.
The async.waterfall() method allows you to pass values between the functions in a trickle-down manner. It is quite handy when you need the results of a previous function to perform an operation with the next function in the series.
Consider restructuring following this example:
var async = require("async"),
userName = req.session.userName,
locals = {},
url = 'mongodb://localhost:27017/askhere',
db = new Db('askhere', new Server('localhost', '27017'));
async.waterfall(
[
// Open db
function(callback) {
db.open(function (err, db) {
if (err) return callback(err);
callback(null, db);
});
},
// Authenticate and connect
function(db, callback) {
db.authenticate('', '', function (err, result) {
if (err) return callback(err);
client.connect(url, function (err, res) {
if (err) return callback(err);
locals.db = db;
callback(null, locals);
});
});
},
// Query filters collection for tags of session user
function(arg, callback) {
var collection = arg.db.collection("filters");
collection.find({"username": userName}).toArray(function (err, result) {
if (err) return callback(err);
locals.tags = result.map(function(item){ return item.tags; });
callback(null, locals);
});
},
// Query alljobs collection for categories jobs matched with tags data
function(arg, callback) {
var collection = arg.db.collection("alljobs");
collection.find({"category": {"$in": arg.tags}}).toArray(function (err, result) {
if (err) return callback(err);
locals.jobsdata = result.map(function (doc){ return doc.jobsdata; });
callback(null, locals);
});
}
], function(err, result) {
if (err) { /* handle err */ };
res.render("mainqa", {
"uname": userName,
"tags": result.tags,
"jobsdata": result.jobsdata
});
});
);