The project that I am working on (Node.js) implies lots of operations with the file system (copying, reading, writing, etc.).
Which methods are the fastest?
Use the standard built-in way fs.copyFile:
const fs = require('fs');
// File destination.txt will be created or overwritten by default.
fs.copyFile('source.txt', 'destination.txt', (err) => {
if (err) throw err;
console.log('source.txt was copied to destination.txt');
});
If you have to support old end-of-life versions of Node.js - here is how you do it in versions that do not support fs.copyFile:
const fs = require('fs');
fs.createReadStream('test.log').pipe(fs.createWriteStream('newLog.log'));
Same mechanism, but this adds error handling:
function copyFile(source, target, cb) {
var cbCalled = false;
var rd = fs.createReadStream(source);
rd.on("error", function(err) {
done(err);
});
var wr = fs.createWriteStream(target);
wr.on("error", function(err) {
done(err);
});
wr.on("close", function(ex) {
done();
});
rd.pipe(wr);
function done(err) {
if (!cbCalled) {
cb(err);
cbCalled = true;
}
}
}
Since Node.js 8.5.0 we have the new fs.copyFile and fs.copyFileSync methods.
Usage example:
var fs = require('fs');
// File "destination.txt" will be created or overwritten by default.
fs.copyFile('source.txt', 'destination.txt', (err) => {
if (err)
throw err;
console.log('source.txt was copied to destination.txt');
});
I was not able to get the createReadStream/createWriteStream method working for some reason, but using the fs-extra npm module it worked right away. I am not sure of the performance difference though.
npm install --save fs-extra
var fs = require('fs-extra');
fs.copySync(path.resolve(__dirname, './init/xxx.json'), 'xxx.json');
Fast to write and convenient to use, with promise and error management:
function copyFile(source, target) {
var rd = fs.createReadStream(source);
var wr = fs.createWriteStream(target);
return new Promise(function(resolve, reject) {
rd.on('error', reject);
wr.on('error', reject);
wr.on('finish', resolve);
rd.pipe(wr);
}).catch(function(error) {
rd.destroy();
wr.end();
throw error;
});
}
The same with async/await syntax:
async function copyFile(source, target) {
var rd = fs.createReadStream(source);
var wr = fs.createWriteStream(target);
try {
return await new Promise(function(resolve, reject) {
rd.on('error', reject);
wr.on('error', reject);
wr.on('finish', resolve);
rd.pipe(wr);
});
} catch (error) {
rd.destroy();
wr.end();
throw error;
}
}
Well, usually it is good to avoid asynchronous file operations. Here is the short (i.e. no error handling) sync example:
var fs = require('fs');
fs.writeFileSync(targetFile, fs.readFileSync(sourceFile));
If you don't care about it being async, and aren't copying gigabyte-sized files, and would rather not add another dependency just for a single function:
function copySync(src, dest) {
var data = fs.readFileSync(src);
fs.writeFileSync(dest, data);
}
Mike Schilling's solution with error handling with a shortcut for the error event handler.
function copyFile(source, target, cb) {
var cbCalled = false;
var rd = fs.createReadStream(source);
rd.on("error", done);
var wr = fs.createWriteStream(target);
wr.on("error", done);
wr.on("close", function(ex) {
done();
});
rd.pipe(wr);
function done(err) {
if (!cbCalled) {
cb(err);
cbCalled = true;
}
}
}
You may want to use async/await, since node v10.0.0 it's possible with the built-in fs Promises API.
Example:
const fs = require('fs')
const copyFile = async (src, dest) => {
await fs.promises.copyFile(src, dest)
}
Note:
As of node v11.14.0, v10.17.0 the API is no longer experimental.
More information:
Promises API
Promises copyFile
const fs = require("fs");
fs.copyFileSync("filepath1", "filepath2"); //fs.copyFileSync("file1.txt", "file2.txt");
This is what I personally use to copy a file and replace another file using Node.js :)
For fast copies you should use the fs.constants.COPYFILE_FICLONE flag. It allows (for filesystems that support this) to not actually copy the content of the file. Just a new file entry is created, but it points to a Copy-on-Write "clone" of the source file.
To do nothing/less is the fastest way of doing something ;)
https://nodejs.org/api/fs.html#fs_fs_copyfile_src_dest_flags_callback
let fs = require("fs");
fs.copyFile(
"source.txt",
"destination.txt",
fs.constants.COPYFILE_FICLONE,
(err) => {
if (err) {
// TODO: handle error
console.log("error");
}
console.log("success");
}
);
Using promises instead:
let fs = require("fs");
let util = require("util");
let copyFile = util.promisify(fs.copyFile);
copyFile(
"source.txt",
"destination.txt",
fs.constants.COPYFILE_FICLONE
)
.catch(() => console.log("error"))
.then(() => console.log("success"));
Use Node.js's built-in copy function
It provides both async and sync version:
const fs = require('fs');
// File "destination.txt" will be created or overwritten by default.
fs.copyFile('source.txt', 'destination.txt', (err) => {
if (err)
throw err;
console.log('source.txt was copied to destination.txt');
});
fs.copyFileSync(src, dest[, mode])
You can do it using the fs-extra module very easily:
const fse = require('fs-extra');
let srcDir = 'path/to/file';
let destDir = 'pat/to/destination/directory';
fse.moveSync(srcDir, destDir, function (err) {
// To move a file permanently from a directory
if (err) {
console.error(err);
} else {
console.log("success!");
}
});
Or
fse.copySync(srcDir, destDir, function (err) {
// To copy a file from a directory
if (err) {
console.error(err);
} else {
console.log("success!");
}
});
I wrote a little utility to test the different methods:
https://www.npmjs.com/package/copy-speed-test
run it with
npx copy-speed-test --source someFile.zip --destination someNonExistentFolder
It does a native copy using child_process.exec(), a copy file using fs.copyFile and it uses createReadStream with a variety of different buffer sizes (you can change buffer sizes by passing them on the command line. run npx copy-speed-test -h for more info).
Mike's solution, but with promises:
const FileSystem = require('fs');
exports.copyFile = function copyFile(source, target) {
return new Promise((resolve,reject) => {
const rd = FileSystem.createReadStream(source);
rd.on('error', err => reject(err));
const wr = FileSystem.createWriteStream(target);
wr.on('error', err => reject(err));
wr.on('close', () => resolve());
rd.pipe(wr);
});
};
Improvement of one other answer.
Features:
If the dst folders do not exist, it will automatically create it. The other answer will only throw errors.
It returns a promise, which makes it easier to use in a larger project.
It allows you to copy multiple files, and the promise will be done when all of them are copied.
Usage:
var onePromise = copyFilePromise("src.txt", "dst.txt");
var anotherPromise = copyMultiFilePromise(new Array(new Array("src1.txt", "dst1.txt"), new Array("src2.txt", "dst2.txt")));
Code:
function copyFile(source, target, cb) {
console.log("CopyFile", source, target);
var ensureDirectoryExistence = function (filePath) {
var dirname = path.dirname(filePath);
if (fs.existsSync(dirname)) {
return true;
}
ensureDirectoryExistence(dirname);
fs.mkdirSync(dirname);
}
ensureDirectoryExistence(target);
var cbCalled = false;
var rd = fs.createReadStream(source);
rd.on("error", function (err) {
done(err);
});
var wr = fs.createWriteStream(target);
wr.on("error", function (err) {
done(err);
});
wr.on("close", function (ex) {
done();
});
rd.pipe(wr);
function done(err) {
if (!cbCalled) {
cb(err);
cbCalled = true;
}
}
}
function copyFilePromise(source, target) {
return new Promise(function (accept, reject) {
copyFile(source, target, function (data) {
if (data === undefined) {
accept();
} else {
reject(data);
}
});
});
}
function copyMultiFilePromise(srcTgtPairArr) {
var copyFilePromiseArr = new Array();
srcTgtPairArr.forEach(function (srcTgtPair) {
copyFilePromiseArr.push(copyFilePromise(srcTgtPair[0], srcTgtPair[1]));
});
return Promise.all(copyFilePromiseArr);
}
Related
I'm trying to use async.series to execute 2 functions in different files in sequential mode.
The use case is to download log files to the local and then move them into a folder.
The functions work fine individually.
Now my next step is to require these 2 files in index.js and call both the functions in sequence.
This is the code to write the files from webdav into local machine:
const fs = require('fs');
const stream = require('stream');
const util = require('util');
const pipeline = util.promisify(stream.pipeline);
const { createClient } = require("webdav");
const path = require("path");
const client = createClient(
"https:/*/webdav/Sites/Logs", {
username: "*",
password: "*"
}
);
async function writeFiles(){
console.log('inside write functions');
let directoryData = await client.getDirectoryContents("/");
console.log('after directorydata' +directoryData);
await Promise.all(directoryData.map(start));
}
async function start(dir) {
let fileName = dir.filename;
console.log('inside start function');
var directory = path.join(__dirname,'/logs/')
if(fileName.includes('.log')){
try {
let fileName = dir.filename;
//console.log(fileName);
await pipeline(
client.createReadStream(fileName),
fs.createWriteStream(directory + fileName)
);
return directory;
console.log('done');
} catch (e) {
console.log(e);
}
}
}
module.exports = writeFiles;
This is the code to create folders and move the log files into them according to the name:
const testFolder = './logs/';
const fs = require('fs');
const path = require("path");
var directory = path.join(__dirname,'/logs/');
async function sort (){
console.log('inside sort function sorting file');
await fs.readdir(directory, (err, files) => {
//console.log('sdcfe');
if(files){
console.log('filesss')
} else {
console.log('no filesss')
}
files.forEach(file => {
let fileFolder = file.split('-')[0];
var folder = fileFolder.replace('.', "");
var dir = directory+folder;
if (!fs.existsSync(dir)){
//console.log('inside if' + dir);
fs.mkdirSync(dir);
}
const currentPath = path.join(directory, file);
const destinationPath = path.join(directory, folder, file);
fs.rename(currentPath, destinationPath, function (err) {
if (err) {
throw err
} else {
return destinationPath;
console.log("Successfully moved the file!");
}
});
});
});
}
module.exports = sort;
This is the index.js file to require both the above files and call them in sequence (once the write function is done, I need to move the logs files into folders)
'use strict';
// Initialize constants
const async = require('async');
const writeFiles = require('./webdavConnect2');
const sort = require('./sortingFiles');
// Initialize the program
let program = require('commander');
// Initialize the version
program.version('0.5.0');
program
.command('say')
.description('Batch command that can be used to zip, upload, import, and re-index for a given site import.')
.action(function () {
function webdavConnect() {
console.log('inside webdav function');
// write the files into local machine
writeFiles();
}
// move the files into folders
function sortingFiles() {
console.log('inside sort function');
sort();
}
async.series([
function(callback) {
webdavConnect(function(directory){
callback(null, directory);
})
},
function(callback) { sortingFiles(function(destinationPath){
callback(null,destinationPath);
})
}
], function(asyncErrorObj) {
// Was an error defined? If so, then throw it
if (asyncErrorObj !== null) { throw asyncErrorObj; }
console.log('END');
});
});
// Parse the command-line arguments
program.parse();
The issue with this is that currently the first function executes and not the second one.
Posting a solution a friend suggested:
I pinpointed some issues regarding the usage of async.series . Try to replace your index.js with the following code. Note the async functions that are now being passed to async.series, and note how those functions now accept the callback that Async.js is supposed to pass to them which are missing in the current code. I couldn't test this because the project is not reproducable (at least without a major prep). If anything breaks you can share another message and/or tag me directly! Hope it helps.
"use strict";
const async = require("async");
const writeFiles = require("./webdavConnect2");
const sort = require("./sortingFiles");
let program = require("commander");
program.version("0.5.0");
program
.command("say")
.description(
"Batch command that can be used to zip, upload, import, and re-index for a given site import."
)
.action(function () {
async function webdavConnect(callback) {
callback(await writeFiles());
}
async function sortingFiles(callback) {
callback(await sort());
}
async.series(
[
function (callback) {
webdavConnect(function (directory) {
callback(null, directory);
});
},
function (callback) {
sortingFiles(function (destinationPath) {
callback(null, destinationPath);
});
}
],
function (asyncErrorObj, results) {
if (asyncErrorObj !== null) {
throw asyncErrorObj;
}
console.log(results);
}
);
});
program.parse();
It seems that both functions are async and they should be called in async function with await. First function did not completed the execution and second started its execution but as it did not find the files so you can't see the output for second function. This is what i would do to fix execution issue:
"use strict";
const writeFiles = require("./webdavConnect2");
const sort = require("./sortingFiles");
// Initialize the program
let program = require("commander");
// Initialize the version
program.version("0.5.0");
program
.command("say")
.description(
"Batch command that can be used to zip, upload, import, and re-index for a given site import."
)
.action(async function() {
await writeFiles();
await sort();
});
// Parse the command-line arguments
program.parse();
Ok, so I know how to program in C# fairly well and I have started programming in JS recently (node js). To be honest I was in a bot of shock from async calls.
Let's say I have this code in C#:
var t_1 = SomeAsyncTask();
var t_2 = SomeOtherAsyncTask();
Task.WaitAll(t_1, t_2);
var res_1 = t_1.Result;
var res_2 = t_2.Result;
Is there a JS equivalent of this? So far I have managed this:
In User.js:
var express = require("express");
var router = express.Router();
var sqlDo = require("../../js_help/DatabasReq/sqlDo.js");
router.get("/", async function(req, res){
var json = sqlDo.ExecCommand("select * from Users");
res.send(json); //json.recordset
});
module.exports = router;
In sqlDo.js:
module.exports = {
ExecCommand: function(command){
// sql and config are defined before.
sql.connect(config, function () {
var request = new sql.Request();
request.query(command, function (err, recordset) {
if (err) console.log(err)
console.log(recordset.recordset);
return recordset;
});
});
}
};
My problem is that this code is running async. I have tried putting await to different places but nothing worked. So when I start my server it returns nothing. I can tell that it is completing a call because I let it read results into console.
Thanks for any help!
Btw: I have tried googling/stackoverlow-ing,.. But I was not able to find anything that would look like C# equivalent. Is it even possible to write it like in c#? Again thanks for every answer...
To make your ExecCommand function async, you have to make it return a Promise. Read about Promises for instance here
module.exports = {
ExecCommand: function(command){
return new Promise((resolve, reject) => { //return a Promise from the function
sql.connect(config, function () {
var request = new sql.Request();
request.query(command, function (err, recordset) {
if (err) {
reject(err); //if there is an error, reject the Promise
} else {
resolve(recordset); //if no error, resolve the Promise with the result
}
});
});
});
}
};
Depending on your SQL library, it may also support promises already, instead of callbacks
module.exports = {
ExecCommand: function(command) {
return sql.connect(config)
.then(() => {
return new sql.Request().query(command);
})
}
};
or with async/await
module.exports = {
ExecCommand: async function(command) {
await sql.connect(config);
return await new sql.Request().query(command);
}
};
Then you can call this function in the requesthandler either like this
router.get("/", async function(req, res){
try {
var json = await sqlDo.ExecCommand("select * from Users");
res.send(json);
catch (err) {
console.log(err);
res.sendStatus(500);
}
});
or like this
router.get("/", function(req, res){
sqlDo.ExecCommand("select * from Users")
.then(json => { //the promise resolved
res.send(json);
})
.catch(err => { //the promise rejected
res.sendStatus(500);
console.log(err);
});
});
I prefer the second variant. But that may be just my personal opinion ...
I am getting the Error, I am writing a function to monitor a path for. I am new to Node.js:
TypeError: Cannot read property 'map' of undefined
at C:\Users\a\Desktop\DL\file\filemonitor.js:15:14
at FSReqWrap.oncomplete (fs.js:149:20)
const Promise = require ('bluebird');
var fs = Promise.promisifyAll(require("fs"));
monitordir(monitorpath) {
var fileList = [];
return new Promise((resolve, reject) => {
fs.readdir(monitorpath,function(err, items) {
items.map((file) => {
fileList.push(file);
});
resolve(fileList);
});
})
}
Note: I don't see a package.json file either. Should I have a sucessful run to see it
When you run var fs = Promise.promisifyAll(require("fs")); it return a promise to you. So you can't execute a map of a promise.
I believe that you don't need a Promise to resolve fs module, my suggestion is you write something like that.
const Promise = require('bluebird');
const fs = require("fs");
const monitordir = path => {
return new Promise((resolve, reject) => {
fs.readdir(path, (error, items) => {
if (error) return reject(error)
return resolve(items);
})
})
}
Try following fix, see if it fits your needs:
monitordir(monitorpath)
{
var fileList = [];
return fs.readdir(monitorpath)
.then(function(err,items) {
items.map((file) => {
fileList.push(file); // fileList is ready here! do whatever you want before it resolves to caller
});
return fileList;
})
.catch(function(e) {
// something bad happened; throw error or handle it as per your needs
throw new Error(e);
});
}
For package.json you can run npm init command at your project directory it will create one for you.
I'm looping through files in a directory and storing the file details to an array data. The following code populates the array if I don't attempt to run fs.stat to get things like the file create/edit date:
fs.readdir('../src/templates', function (err, files) {
if (err) {
throw err;
}
var data = [];
files
.forEach(function (file) {
try {
fs.stat('../src/templates/'+file,(error,stats) => {
data.push({ Name : file,Path : path.join(query, file) });
});
} catch(e) {
console.log(e);
}
});
res.json(data);
});
});
If I move the data.push(...) outside the fs.stat the array returns with the file data. Inside the fs.stat it returns empty. I assume this is an asynchronous issue in that the for loop is running and finishing before fs.stat runs.
I'm thinking I need to use a promise here but unsure.
If you want or need to be asynchronous:
const fs = require("fs");
const path = require("path");
const { promisify } = require("util");
const asyncStat = promisify(fs.stat);
fs.readdir('../src/templates', async function(err, files) {
if (err) {
throw err;
}
const data = await Promise.all(files.map(async function(file) {
try {
const stats = await asyncStat('../src/templates/' + file);
return { Name: file, Path: path.join(query, file), stats };
} catch (e) {
console.log(e);
}
}));
res.json(data);
});
Note that I used map instead of forEach and then awaited all Promises (async makes function return a promise).
I also needed to change fs.stat to use promise with util.promisify.
You're right about the issue being in the asynchronous call. You could use a promise, or you could use fs.statSync(...), which returns a fs.Stats object and operates synchonously.
files.forEach(function (file) {
try {
var fileStats = fs.statSync('../src/templates/' + file);
data.push({
Name : file,
Path : path.join(query, file)
});
} catch(e) {
console.log(e);
}
});
I'm trying to refactor some node code that is a whole mess of callbacks. I thought that would be nice give promises a try for this purpose. I'm trying to convert some xml string to json with the xml2js node module. The original code was:
"use strict";
var xml2jsParser = require('xml2js').parseString;
var string = "<container><tag3>option3</tag3></container>";
xml2jsParser(string, function(err, result)
{
console.log(result);
});
and this displays:
{ container: { tag1: [ 'option1' ], tag2: [ 'option2' ], tag3: [ 'option3' ] } }
Following the first answer on this question How do I convert an existing callback API to promises? I tried to wrap the xml2jsParser function using promises in the following way:
"use strict";
var xml2jsParser = require('xml2js').parseString;
function promisesParser(string)
{
return new Promise(function(resolve, reject)
{
xml2jsParser(string, resolve);
});
}
var string = "<container><tag3>option3</tag3></container>";
promisesParser(string).then(function(err, result){
console.log(result);
});
This displays undefined through the console instead of the json object as expected. I don't understand why this happens as I was able to successfully do the same with other functions. I know something similar can be achieved with Bluebird promisify functionality but I'd like to do this on plain Javascript without any third party libraries.
Another option is to use native util module's promisify method, available from Node 8.0:
const xml2js = require('xml2js');
const util = require('util');
xml2js.parseStringPromise = util.promisify(xml2js.parseString);
// await xml2js.parseStringPromise(.. your xml ..);
You are going to need to wrap it up like this:
return new Promise(function(resolve, reject)
{
xml2jsParser(string, function(err, result){
if(err){
reject(err);
}
else {
resolve(result);
}
});
});
Then use it like this:
promisesParser(string).then(function(result){
console.log(result);
}).catch(function(err){
//error here
});
There are 2 issues...
You have to resolve with a value if it passes...and reject with an error when it fails
You need to add a catch block to you promise handling chain to catch errors.
var xml2jsParser = require('xml2js').parseString;
function promisesParser(string)
{
return new Promise(function(resolve, reject)
{
xml2jsParser(string, function(err, result) {
if (err) {
return reject(err);
} else {
return resolve(result);
}
});
});
}
var string = "<container><tag3>option3</tag3></container>";
promisesParser(string)
.then(console.log)
.catch(console.log);
I might be too late to this answer, but I thought to share what I have been using
One can use parseStringPromise method of xml2js with await keyword inside an async function.
import { parseStringPromise } from 'xml2js'
export const main = async () => {
const leadsData = await parseStringPromise(docBody)
console.log(leadsData)
}