I'm trying to create a function that pull a remote repository and then navigate through this repo and install it's dependencies but somehow it fail to install the dependencies inside the cloned repo and it install them outside:
const spinner = clui.Spinner;
const git = require("simple-git/promise");
const path = require("path");
const { install } = require("pkg-install");
async function pullRepo() {
const pulling = new spinner("Initializing project...");
const installing = new spinner("Installing dependencies...");
const rep = await inquirer.DirectoryName();
const package = path.join(rep.project, "package.json");
pulling.start();
await git()
.silent(true)
.clone("git#github.com:blacklane/create-blacklane-app.git", rep.project)
.then(async () => {
pulling.stop();
console.log(`working directory:`, process.cwd());
// check file exist asynchronously
fs.access(package, fs.constants.F_OK, err => {
console.log(`${package} ${err ? "does not exist" : "exists"}`);
});
installing.start();
const obj = JSON.parse(fs.readFileSync(package, "utf8"));
const dependencies = { ...obj.dependencies, ...obj.devDependencies };
process.chdir(rep.project); // navigate to directory to install dpendencies
console.log(`new working directory from git:`, process.cwd());
const { stdout } = await install(dependencies, {
dev: true,
prefer: "npm"
});
console.log(stdout);
installing.stop();
})
.catch(error => console.error("failed: ", error));
// progress.finish();
}
The reason this can happen because install is somehow not able to get to the cloned repo. May be because underlying shell which is running the Node process is still the same.
following is the working code:
const clui = require('clui');
const spinner = clui.Spinner;
const git = require("simple-git/promise");
const path = require("path");
const fs = require('fs');
const fsPromises = fs.promises;
const { spawn } = require('child_process');
async function pullRepo(repo, dirName) {
const pulling = new spinner("Initializing project...");
const installing = new spinner("Installing dependencies...");
pulling.start();
await git().silent(true).clone(repo, dirName);
pulling.stop();
const package = path.join(dirName, "package.json");
// check file exist asynchronously
await fsPromises.access(package, fs.constants.F_OK);
installing.start();
const npmInstall = spawn('npm', ['i'], { cwd: dirName });
npmInstall.stdout.on('data', (data) => {
console.log(`stdout: ${data}`);
});
npmInstall.stderr.on('data', (data) => {
console.error(`stderr: ${data}`);
installing.stop();
});
npmInstall.on('close', (data) => {
installing.stop();
});
}
const repoToPull = "https://github.com/facebook/create-react-app.git"; // can be any repo
const dirToPullTo = path.join(__dirname,'gitpull'); // directory you want to pull it to.
pullRepo(
repoToPull,
dirToPullTo
).then(res => console.log(res));
This code needs better error handling.
You don't need to add .then when you are awaiting a promise
Related
I am trying to copy a folder and all of it's content using node.js cp feature as follows
fs.cp('D:\\Developer\\insomniac-beta\\template', dir_path, {recursive: true});
however its throwing me this error
node:internal/validators:232
throw new ERR_INVALID_ARG_TYPE(name, 'Function', value);
^
TypeError [ERR_INVALID_ARG_TYPE]: The "cb" argument must be of type function. Received undefined
at makeCallback (node:fs:191:3)
at Object.cp (node:fs:2848:14)
at D:\Developer\igbot\generate_config.js:30:13
at FSReqCallback.oncomplete (node:fs:193:23) {
code: 'ERR_INVALID_ARG_TYPE'
}
how is this possible ? i do not have any calls to cb ?
If you dont want to use asynchronous copy with callback you can use synchronous version.
fs.cpSync(sourceDir, destDir, {recursive: true});
You are missing one argument. As mentioned in the documentation, fs.cp is an asynchronous function that takes in a callback function
the final arguement needs to be a callback function
fs.cp('D:\\Developer\\insomniac-beta\\template', dir_path, (err)=>{
// handle error
})
It seems like you're using the promises API, but you didn't show how you import the module. Here's an example with the current Node LTS (v16.x):
Ref: fsPromises.cp(src, dest[, options])
import {promises as fs} from 'fs';
// ...
await fs.cp(sourceDir, destDir, {recursive: true});
Here's a full, self-contained example which creates a sample dir structure, copies it, verifies the copy, and cleans up the sample data:
example.mjs:
import * as path from 'path';
import {constants as fsConstants, promises as fs} from 'fs';
import {fileURLToPath} from 'url';
import {ok as assert} from 'assert/strict';
// Create sample folder structure, return relative file paths
async function createSampleFiles (rootDir) {
const writeFileOpts = {encoding: 'utf8'};
const filePaths = [];
await fs.mkdir(rootDir, {recursive: true});
let fPath = 'hello.txt';
filePaths.push(fPath);
fPath = path.join(rootDir, fPath);
let text = 'hello world\n';
await fs.writeFile(fPath, text, writeFileOpts);
let dir = 'more';
await fs.mkdir(path.join(rootDir, dir), {recursive: true});
fPath = path.join(dir, 'wow.txt');
filePaths.push(fPath);
fPath = path.join(rootDir, fPath);
text = 'wow\n';
await fs.writeFile(fPath, text, writeFileOpts);
return filePaths;
}
async function fsEntryExists (filePath) {
try {
await fs.access(filePath, fsConstants.F_OK);
return true;
}
catch (ex) {
if (ex instanceof Error && ex.code === 'ENOENT') return false;
throw ex;
}
}
async function assertFSEntryExists (filePath) {
assert(await fsEntryExists(filePath), `FS entry not found for "${filePath}"`);
}
async function main () {
const moduleDir = path.dirname(fileURLToPath(import.meta.url));
const sourceDir = path.join(moduleDir, 'data');
const destDir = path.join(moduleDir, 'data-copy');
const relativePaths = await createSampleFiles(sourceDir);
await fs.cp(sourceDir, destDir, {recursive: true});
let exitCode = 0;
try {
const filePaths = relativePaths.map(fPath => path.join(destDir, fPath));
for (const fPath of filePaths) await assertFSEntryExists(fPath);
console.log('Copy successful');
}
catch {
console.error('Copy failed');
exitCode = 1;
}
finally {
// Cleanup
for (const dir of [sourceDir, destDir]) {
if (await fsEntryExists(dir)) await fs.rm(dir, {recursive: true});
}
process.exit(exitCode);
}
}
main();
$ node --version
v16.15.0
$ node example.mjs
Copy successful
I have written a test case that successfully load files into virtual FS, and at the same time mounted a virtual volume as below
describe("should work", () => {
const { vol } = require("memfs");
afterEach(() => vol.reset());
beforeEach(() => {
vol.mkdirSync(process.cwd(), { recursive: true });
jest.resetModules();
jest.resetAllMocks();
});
it("should be able to mock fs that being called in actual code", async () => {
jest.mock("fs", () => {
return ufs //
.use(jest.requireActual("fs"))
.use(createFsFromVolume(vol) as any);
});
jest.mock("fs/promises", () => {
return ufs //
.use(jest.requireActual("fs/promises"))
.use(createFsFromVolume(vol) as any);
});
const { createFsFromVolume } = require("memfs");
const { ufs } = require("unionfs");
const { countFile } = require("../src/ops/fs");
vol.fromJSON(
{
"./some/README.md": "1",
"./some/index.js": "2",
"./destination": null,
},
"/app"
);
const result = ufs.readdirSync(process.cwd());
const result2 = ufs.readdirSync("/app");
const result3 = await countFile("/app");
console.log({ result, result2, result3 });
});
});
By using ufs.readdirSync, I can access to virtual FS and indeed result giving me files that loaded from disc into virtual FS, result2 representing /app which is a new volume created from vol.fromJSON.
Now my problem is I am unable to get the result for result3, which is calling countFile method as below
import fsPromises from "fs/promises";
export const countFile = async (path: string) => {
const result = await fsPromises.readdir(path);
return result.length;
};
I'm getting error
Error: ENOENT: no such file or directory, scandir '/app'
which I think it's because countFile is accessing the actual FS instead of the virtual despite I've had jest.mock('fs/promises')?
Please if anyone can provide some lead?
This is the function you want to unit test.
//CommonJS version
const fsPromises = require('fs/promises');
const countFile = async (path) => {
const result = await fsPromises.readdir(path);
return result.length;
};
module.exports = {
countFile
}
Now, how you would normally go about this, is to mock fsPromises. In this example specifically readdir() since that is the function being used in countFile.
This is what we call: a stub.
A skeletal or special-purpose implementation of a software component, used to develop or test a component that calls or is otherwise dependent on it. It replaces a called component.
const {countFile} = require('./index');
const {readdir} = require("fs/promises");
jest.mock('fs/promises');
beforeEach(() => {
readdir.mockReset();
});
it("When testing countFile, given string, then return files", async () => {
const path = "/path/to/dir";
// vvvvvvv STUB HERE
readdir.mockResolvedValueOnce(["src", "node_modules", "package-lock.json" ,"package.json"]);
const res = await countFile(path);
expect(res).toBe(4);
})
You do this because you're unit testing. You don't want to be dependent on other functions because that fails to be a unit test and more integration test. Secondly, it's a third-party library, which is maintained/tested by someone else.
Here is where your scenario applies. From my perspective, your objective isn't to test countFile() rather, to test fsPromises and maybe test functionality to read virtual file-systems: unionfs. If so then, fsPromises doesn't need to really be mocked.
Hello I am new to testing with mocha/chai/sinon and sequelize-test-helpers
Trying to use proxyquire to override the require but having issues
Getting this following error about the path:
Error: ENOENT: no such file or directory, scandir 'C:<local-directories-path>\ecommerce-pern-app\server\src\models'
I dont get why there is a src folder when I don't have a src folder at all I am using the proxyquire in the test file and its path is from the server directory would be:
server/specs/services/user-service.spec.js
"use strict";
const chai = require('chai');
const {match, stub, resetHistory, spy} = require('sinon');
const proxyquire = require('proxyquire');
const path = require('path');
const service = path.resolve('./services/userService.js')
var sinonChai = require("sinon-chai");
chai.should();
chai.use(sinonChai);
console.log(service)
const {makeMockModels, sequelize, dataTypes,} = require('sequelize-test-helpers');
describe('Idea Controller', function () {
const uid = '6a88e9b5-33a2-403f-ac3d-e86413ac101d'
const data = {
email: 'testface#test.com',
password: '123456',
is_admin: false,
first_name: 'Testy',
last_name: 'McTestface',
google_id: null,
facebook_id: null
}
describe('findAll()', function () {
it('Success case ', function () {
const mockResponse = () => {
const res = {};
res.json = stub().returns(res);
return res;
};
let res = mockResponse();
const User = {findAll: stub()};
const mockModels = makeMockModels({User});
Idea.findAll.resolves(data);
const UserService = proxyquire(service, {
"save": {}
});
UserService.findAll({}, res);
Idea.findAll.should.have.been.called; // passes
res.json.should.have.been.called; //fails
});
})
});
In the above code I am using the proxyquire like this:
const proxyquire = require('proxyquire');
const path = require('path');
const service = path.resolve('./services/userService.js')
const {makeMockModel} = require('sequelize-test-helpers');
const mockModels = makeMockModels({User});
const UserService = proxyquire(service, {
"../models": mockModels
});
As I am trying to use the path to find the server/service/userService.js file which is relatively located from the test file at ../../services/userService.js. I have got this bug that there is src folder there when I do not have a src directory at all even!
As the bug is saying:
Error: ENOENT: no such file or directory, scandir 'C:<local-directories-path>\ecommerce-pern-app\server\src\models'
Whatever I try about file path is not working I tried path.resolve, path.join and directly typing the path into it as like ../../services/userService.js
here is the
server/services/userService.js
const Models = require('../models');
const { User } = Models;
const save = async ({ id, ...data }) => {
const user = await User.findOne({ where: { uid: id } })
if (user) return await user.update(data)
return null
}
module.exports = save;
I just want the path to with proxyquire to work
What is this \src\models path from the error, I dont have a src/models path route at all!
This is a quote from sequelize-test-helpers's readme.
As a convenience, makeMockModels will automatically populate your mockModels with mocks of all of the models defined in your src/models folder (or if you have a .sequelizerc file it will look for the models-path in that). Simply override any of the specific models you need to do stuff with.
So you need to provide .sequelizerc file and define models-path.
I want to download videos one after the other in a series.
That is the first one should be completely downloaded before the second one starts & the second one should be completely downloaded before the third one starts & so on.
I have the following directory structure -
video-downloader
├── index.js
├── videos.js
├── package.json
package.json
{
"name": "video-downloader",
"version": "1.0.0",
"main": "index.js",
"license": "MIT",
"dependencies": {
"download": "^7.1.0"
},
"scripts": {
"start": "node index"
}
}
video.js
const videos = [
{
url: 'https://video.com/lesson1.mp4',
name: 'Lesson 1',
},
{
url: 'https://video.com/lesson2.mp4',
name: 'Lesson 2',
},
.
.
.
{
url: 'https://video.com/lesson2.mp4',
name: 'Lesson 100',
}
]
index.js
const fs = require('fs')
const download = require('download')
const videos = require('./videos')
const OUTPUT_DIR = 'Downloads'
fs.mkdir(OUTPUT_DIR, () => {
main()
})
const main = () => {
videos.map((video, i) => {
console.log(`Downloaded file ${i + 1} of ${videos.length} (${video.name})`)
download(video.url).pipe(
fs.createWriteStream(`${OUTPUT_DIR}/${video.name}.mp4`),
)
})
}
This downloads videos chunk by chunk parallelly. All the videos are downloaded at once but none of them gets completed before the other one starts.
How do I download it serially?
I know I should use something like http://caolan.github.io/async/ but it needs a function signature & I have videos as an array so I'm not sure how to go about it.
You can use the await keyword on standard for loops, and things will process in order, and wait on each download before proceeding.
const fs = require('fs')
const download = require('download')
const videos = require('./videos')
const util = require('util')
const mkdirAsync = util.promisify(fs.mkdir)
const OUTPUT_DIR = 'Downloads'
const main = async () => {
await mkdirAsync(OUTPUT_DIR)
for (let i = 0; i < videos.length; i++) {
const video = videos[i]
const data = await download(video.url)
fs.writeFileSync(`${OUTPUT_DIR}/${video.name}.mp4`, data)
console.log(`Downloaded file ${i + 1} of ${videos.length} (${video.name})`)
}
}
main()
You can use .reduce with promises to resolve sequentially, as follows:
const fs = require('fs')
const sh = require('shelljs')
const download = require('download')
const videos = require('./videos')
const OUTPUT_DIR = 'Downloads'
sh.mkdir('-p', OUTPUT_DIR)
videos = videos.reduce((acc, item) => {
return acc.then(() => {
return new Promise((resolve) => {
// Here you are using it as a Duplex Stream, not a promise,
// therefore, you must check when the stream emits the 'end' event
// so you can proceed further
let stream = download(video.url)
.pipe(fs.createWriteStream(`${OUTPUT_DIR}/${video.name}.mp4`));
stream.on('end', () => {
console.log(`stream done ${item}`);
resolve(item);
})
})
});
}, Promise.resolve());
// 'videos' is now a promise
videos.then((lastPromise) => {
// using reduce will return the last evaluated item(promise)
// but reaching the last one means the promises before that have been resolved
console.log('all files were downloaded');
})
Try async await for this. First download and then write in Sync.
const fs = require('fs');
const sh = require('shelljs');
const download = require('download');
const videos = require('./videos');
const OUTPUT_DIR = 'Downloads';
sh.mkdir('-p', OUTPUT_DIR);
videos.forEach(async (video, i) => {
console.log(`Downloading ${video.name}. Fil${i + 1}/${videos.length} - `);
const data = await download(video.url);
fs.writeFileSync(`${OUTPUT_DIR}/${video.name}.mp4`, data);
});
I need to call module from main index.js File
Here is my module
const request = require('./rq.js');
const callback = require('./callback.js')
const url = `https://localhost.3000/${id}`;
request(url, callback)
.then(res => {
console.log(res);
})
.catch(err => {
console.log(err);
})
module.exports = page; //Tell me how to export all code from module
So here is my index.js file
const Methods = {
page: require('./page.js'),
}
module.exports = //What i need to code here?
File from what i give a call a module :
const main = require('./index.js');
main.page({id: 'id'})
.then(console.log);
So what I should change to call page.js file like that ?
Make the following changes to page.js since in your main file you expect a promise to be returned.
const request = require('./rq.js');
const callback = require('./callback.js')
function page({id}) {
const url = `https://localhost.3000/${id}`;
return request(url, callback)
}
module.exports = {page: page} //Tell me how to export all code from module
Make the following changes to Mehods.js
const Methods = {
page: require('./page.js').page,
}
module.exports = Methods;
Check if this works.