mocking fs module causes Fs.statSync is not a function error - javascript

node version: 14.20.0
jest version: 28.1.1
ts-jest version: 28.0.5
I have following function which is to be tested
uploadFile.ts
import * as fs from 'fs';
export async function uploadFile(containerName, customerId, fileName, filePath, logCtx){
fs.stat(filePath, (err, stats) => {
if (err) {
logger.error({err, ...logCtx}, `File doesn't exist. File path: ${filePath}`);
} else {
logger.info(logCtx, `File size: ${stats.size}`);
if(stats.size < 1){
logger.error({err, ...logCtx}, `Byte size is ${stats.size} - File is empty. File path: ${filePath}`);
throw new Error("File is empty.")
}
}
});
// do some other things
}
The uploadFile function uses fs module and I decided to mock it since we don't need to do anything with files for testing
uploadFile.test.js
// mock fs package
const fs = { stat: jest.fn(), createReadStream: jest.fn() };
jest.mock("fs", () => {
return fs;
});
it("should call fs.stat once", async () => {
// Arrange
const { uploadFile } = require("../../../lib/global/uploadFile");
const containerName = "qz";
const customerId = "w3";
const fileName = "none.pdf";
const filePath = "src/services/none.pdf";
const logCtx = {};
// Act
await uploadFile(containerName, customerId, fileName, filePath, logCtx);
// Assert
expect(fs.stat).toBeCalledWith(filePath, expect.any(Function));
expect(fs.stat).toBeCalledTimes(1);
});
when running the above test file, test case fails and shows the following error
● should call fs.stat once
require-at: stat 'C:\guardian group\c360-accounts' failed: Fs.statSync is not a function
at makeIt (node_modules/require-at/require-at.js:19:15)
at requireAt (node_modules/require-at/require-at.js:35:10)
at Object.<anonymous> (node_modules/mongoose/node_modules/mongodb/node_modules/optional-require/src/index.ts:318:64)
at Object.<anonymous> (node_modules/mongoose/node_modules/mongodb/node_modules/optional-require/index.js:8:13)
I have changed the mock of "fs" to the following hopeing it would resolve the error, and it is showing a different error instead.
const fs = { stat: jest.fn(), createReadStream: jest.fn(), statSync: jest.fn() };
The error
● should call fs.stat once
require-at: not a directory: 'C:\guardian group\c360-accounts'
at makeIt (node_modules/require-at/require-at.js:23:28)
at makeIt (node_modules/require-at/require-at.js:24:16)
at requireAt (node_modules/require-at/require-at.js:35:10)
at Object.<anonymous> (node_modules/mongoose/node_modules/mongodb/node_modules/optional-require/src/index.ts:318:64)
at Object.<anonymous> (node_modules/mongoose/node_modules/mongodb/node_modules/optional-require/index.js:8:13)
What's happening here, am I missing something?
Thanks in advance

Related

copy folder in node using cp feature

I am trying to copy a folder and all of it's content using node.js cp feature as follows
fs.cp('D:\\Developer\\insomniac-beta\\template', dir_path, {recursive: true});
however its throwing me this error
node:internal/validators:232
throw new ERR_INVALID_ARG_TYPE(name, 'Function', value);
^
TypeError [ERR_INVALID_ARG_TYPE]: The "cb" argument must be of type function. Received undefined
at makeCallback (node:fs:191:3)
at Object.cp (node:fs:2848:14)
at D:\Developer\igbot\generate_config.js:30:13
at FSReqCallback.oncomplete (node:fs:193:23) {
code: 'ERR_INVALID_ARG_TYPE'
}
how is this possible ? i do not have any calls to cb ?
If you dont want to use asynchronous copy with callback you can use synchronous version.
fs.cpSync(sourceDir, destDir, {recursive: true});
You are missing one argument. As mentioned in the documentation, fs.cp is an asynchronous function that takes in a callback function
the final arguement needs to be a callback function
fs.cp('D:\\Developer\\insomniac-beta\\template', dir_path, (err)=>{
// handle error
})
It seems like you're using the promises API, but you didn't show how you import the module. Here's an example with the current Node LTS (v16.x):
Ref: fsPromises.cp(src, dest[, options])
import {promises as fs} from 'fs';
// ...
await fs.cp(sourceDir, destDir, {recursive: true});
Here's a full, self-contained example which creates a sample dir structure, copies it, verifies the copy, and cleans up the sample data:
example.mjs:
import * as path from 'path';
import {constants as fsConstants, promises as fs} from 'fs';
import {fileURLToPath} from 'url';
import {ok as assert} from 'assert/strict';
// Create sample folder structure, return relative file paths
async function createSampleFiles (rootDir) {
const writeFileOpts = {encoding: 'utf8'};
const filePaths = [];
await fs.mkdir(rootDir, {recursive: true});
let fPath = 'hello.txt';
filePaths.push(fPath);
fPath = path.join(rootDir, fPath);
let text = 'hello world\n';
await fs.writeFile(fPath, text, writeFileOpts);
let dir = 'more';
await fs.mkdir(path.join(rootDir, dir), {recursive: true});
fPath = path.join(dir, 'wow.txt');
filePaths.push(fPath);
fPath = path.join(rootDir, fPath);
text = 'wow\n';
await fs.writeFile(fPath, text, writeFileOpts);
return filePaths;
}
async function fsEntryExists (filePath) {
try {
await fs.access(filePath, fsConstants.F_OK);
return true;
}
catch (ex) {
if (ex instanceof Error && ex.code === 'ENOENT') return false;
throw ex;
}
}
async function assertFSEntryExists (filePath) {
assert(await fsEntryExists(filePath), `FS entry not found for "${filePath}"`);
}
async function main () {
const moduleDir = path.dirname(fileURLToPath(import.meta.url));
const sourceDir = path.join(moduleDir, 'data');
const destDir = path.join(moduleDir, 'data-copy');
const relativePaths = await createSampleFiles(sourceDir);
await fs.cp(sourceDir, destDir, {recursive: true});
let exitCode = 0;
try {
const filePaths = relativePaths.map(fPath => path.join(destDir, fPath));
for (const fPath of filePaths) await assertFSEntryExists(fPath);
console.log('Copy successful');
}
catch {
console.error('Copy failed');
exitCode = 1;
}
finally {
// Cleanup
for (const dir of [sourceDir, destDir]) {
if (await fsEntryExists(dir)) await fs.rm(dir, {recursive: true});
}
process.exit(exitCode);
}
}
main();
$ node --version
v16.15.0
$ node example.mjs
Copy successful

How to mock fs module together with unionfs?

I have written a test case that successfully load files into virtual FS, and at the same time mounted a virtual volume as below
describe("should work", () => {
const { vol } = require("memfs");
afterEach(() => vol.reset());
beforeEach(() => {
vol.mkdirSync(process.cwd(), { recursive: true });
jest.resetModules();
jest.resetAllMocks();
});
it("should be able to mock fs that being called in actual code", async () => {
jest.mock("fs", () => {
return ufs //
.use(jest.requireActual("fs"))
.use(createFsFromVolume(vol) as any);
});
jest.mock("fs/promises", () => {
return ufs //
.use(jest.requireActual("fs/promises"))
.use(createFsFromVolume(vol) as any);
});
const { createFsFromVolume } = require("memfs");
const { ufs } = require("unionfs");
const { countFile } = require("../src/ops/fs");
vol.fromJSON(
{
"./some/README.md": "1",
"./some/index.js": "2",
"./destination": null,
},
"/app"
);
const result = ufs.readdirSync(process.cwd());
const result2 = ufs.readdirSync("/app");
const result3 = await countFile("/app");
console.log({ result, result2, result3 });
});
});
By using ufs.readdirSync, I can access to virtual FS and indeed result giving me files that loaded from disc into virtual FS, result2 representing /app which is a new volume created from vol.fromJSON.
Now my problem is I am unable to get the result for result3, which is calling countFile method as below
import fsPromises from "fs/promises";
export const countFile = async (path: string) => {
const result = await fsPromises.readdir(path);
return result.length;
};
I'm getting error
Error: ENOENT: no such file or directory, scandir '/app'
which I think it's because countFile is accessing the actual FS instead of the virtual despite I've had jest.mock('fs/promises')?
Please if anyone can provide some lead?
This is the function you want to unit test.
//CommonJS version
const fsPromises = require('fs/promises');
const countFile = async (path) => {
const result = await fsPromises.readdir(path);
return result.length;
};
module.exports = {
countFile
}
Now, how you would normally go about this, is to mock fsPromises. In this example specifically readdir() since that is the function being used in countFile.
This is what we call: a stub.
A skeletal or special-purpose implementation of a software component, used to develop or test a component that calls or is otherwise dependent on it. It replaces a called component.
const {countFile} = require('./index');
const {readdir} = require("fs/promises");
jest.mock('fs/promises');
beforeEach(() => {
readdir.mockReset();
});
it("When testing countFile, given string, then return files", async () => {
const path = "/path/to/dir";
// vvvvvvv STUB HERE
readdir.mockResolvedValueOnce(["src", "node_modules", "package-lock.json" ,"package.json"]);
const res = await countFile(path);
expect(res).toBe(4);
})
You do this because you're unit testing. You don't want to be dependent on other functions because that fails to be a unit test and more integration test. Secondly, it's a third-party library, which is maintained/tested by someone else.
Here is where your scenario applies. From my perspective, your objective isn't to test countFile() rather, to test fsPromises and maybe test functionality to read virtual file-systems: unionfs. If so then, fsPromises doesn't need to really be mocked.

Firebase cloud function: Error: EISDIR: illegal operation on a directory

I'm trying to download an image from an url and then uploading it to my firebase cloud storage.
This is the code i'm using.
import * as functions from 'firebase-functions';
import * as admin from 'firebase-admin';
const download = require('image-downloader');
const tmp = require('tmp');
export const downloadFunction = functions.https.onCall(async (data, context) => {
var bucket = admin.storage().bucket();
await tmp.dir(async function _tempDirCreated(err: any, path: any) {
if (err) throw err;
const options = {
url: 'theUrlIWantToPutInTheStorage',
dest: path,
}
console.log('Dir: ', path);
await download.image(options)
.then(async () => {
console.log('Saved');
await bucket.upload(path, {
destination: "testfolder/test.jpg",
metadata: "metadata",
});
})
.catch((err2: any) => console.error(err2))
});
});
But from the firebase console (logs) I get this error:
{ Error: EISDIR: illegal operation on a directory, read errno: -21, code: 'EISDIR', syscall: 'read' }
What am I doing wrong?
Thanks in advance!
The path that you provide to the method upload should be a file and not a directory.
upload(pathString, optionsopt, callbackopt) → {Promise.<UploadResponse>}
Upload a file to the bucket. This is a convenience method that wraps File#createWriteStream.
Example :
const options = {
destination: 'new-image.png',
resumable: true,
validation: 'crc32c',
metadata: {
metadata: {
event: 'Fall trip to the zoo'
}
}
};
bucket.upload('local-image.png', options, function(err, file) {
// Your bucket now contains:
// - "new-image.png" (with the contents of `local-image.png')
// `file` is an instance of a File object that refers to your new file.
});
https://googleapis.dev/nodejs/storage/latest/Bucket.html

Config param are undefined in error handler

I have a config file that just sets the process.env data to params.
It works fine and I can use it correctly everywhere but on my unexpected exceptions handler I can't use it... all the params from the config file are undefined.
my config file:
module.exports = {
env: process.env.NODE_ENV,
};
here is my uncaught exception catcher:
process.on('uncaughtException', (error) => {
errorManagement.handler.handleError(error);
if (!errorManagement.handler.isTrustedError(error)) process.exit(1);
});
and here is error handler, env is undefined, everywhere else env is defined
const {
env,
} = require('../../config');
const logger = require('../../libraries/logger');
const mailer = require('../../libraries/mailer');
function ErrorHandler() {
this.handleError = async (err) => {
console.log(env);
};
}
module.exports.handler = new ErrorHandler();
tree of my project folder:
EDIT:
I found the problem but I'm still not sure why it happened...
in my config.js file I did:
const errorManager = require('./components/errorManagement');
[
'DB_USER',
].forEach((name) => {
if (typeof process.env[name] === 'undefined') {
throw new errorManager.AppError('Environment var missing', 500, `Environment variable ${name} is missing`, true);
}
});
when I deleted the error manager and used express Error everything worked as expected

Hot reloading breaks the app. Final loader didn't return a Buffer or String

I have a custom loader that load the following file data.js
const data = {
a: ()=> 8885555,
b: ()=> 55555
}
module.exports = name => {
return data[name] && data[name]()
}
when I change the above file like changing b value the app breaks
ERROR in ../data.js?name=a
Module build failed: Error: Final loader (../data-loader.js) didn't return a Buffer or String
at runLoaders (C:\Users\010\Saber.js\webpack-hmr-3-ways\middleware\node_modules\webpack\lib\NormalModule.js:319:18)
at C:\Users\010\Saber.js\webpack-hmr-3-ways\middleware\node_modules\loader-runner\lib\LoaderRunner.js:373:3
at iterateNormalLoaders (C:\Users\010\Saber.js\webpack-hmr-3-ways\middleware\node_modules\loader-runner\lib\LoaderRunner.js:214:10)
at iterateNormalLoaders (C:\Users\010\Saber.js\webpack-hmr-3-ways\middleware\node_modules\loader-runner\lib\LoaderRunner.js:221:10)
at C:\Users\010\Saber.js\webpack-hmr-3-ways\middleware\node_modules\loader-runner\lib\LoaderRunner.js:236:3
at runSyncOrAsync (C:\Users\010\Saber.js\webpack-hmr-3-ways\middleware\node_modules\loader-runner\lib\LoaderRunner.js:124:12)
at iterateNormalLoaders (C:\Users\010\Saber.js\webpack-hmr-3-ways\middleware\node_modules\loader-runner\lib\LoaderRunner.js:232:2)
at Array.<anonymous> (C:\Users\010\Saber.js\webpack-hmr-3-ways\middleware\node_modules\loader-runner\lib\LoaderRunner.js:205:4)
at Storage.finished (C:\Users\010\Saber.js\webpack-hmr-3-ways\middleware\node_modules\enhanced-resolve\lib\CachedInputFileSystem.js:43:16)
at provider (C:\Users\010\Saber.js\webpack-hmr-3-ways\middleware\node_modules\enhanced-resolve\lib\CachedInputFileSystem.js:79:9)
# ./index.js 1:10-40
# multi webpack-hot-middleware/client?path=/__webpack_hmr&timeout=20000 ./index.js
this is the custom webpack loader
const devalue = require('devalue')
const vm = require('vm');
module.exports = async function(source, map) {
const callback = this.async()
this.addDependency(this.resourcePath);
const sandbox = {
require,
module
}
vm.createContext(sandbox);
const mod = vm.runInContext(source, sandbox)
const result = await mod(this.resourceQuery.replace('?name=', ''))
return callback(null, `export default ${devalue(result)}`, map);
}
Here is a reproduction repo
So this is what should be done
const devalue = require('devalue')
const vm = require('vm');
module.exports = async function(source, map) {
const callback = this.async()
this.addDependency(this.resourcePath);
const sandbox = {
require,
module: {exports: {}}
}
vm.createContext(sandbox);
const mod = vm.runInContext(source, sandbox)
const result = await mod(this.resourceQuery.replace('?name=', ''))
return callback(null, `export default ${devalue(result)}`, map);
}
The problem is that I was overwriting the loader module.exports with the data.js file module.exports because I was passing the same module to the sandbox so when hot reloading the exported function in the loader would have been replaced with the data.js function which doesn't return a string (in this case because data[name] would be undefined) and that's the cause of the error

Categories

Resources