Cypress identify the downloaded file using regex - javascript

I have one scenario where I have to verify the downloaded text file's data against an API response.
Below is the code that I have tried.
Test:
const path = require('path')
const downloadsFolder = Cypress.config('downloadsFolder')
cy.task('deleteFolder', downloadsFolder)
const downloadedFilename = path.join(downloadsFolder, 'ABCDEF.txt')//'*.txt'
....
cy.get('#portmemo').its('response.body')
.then((response) => {
var json = JSON.parse(response);
const resCon = json[0].content.replaceAll(/[\n\r]/g, '');
cy.readFile(downloadedFilename).then((fc) => {
const formatedfc = fc.replaceAll(/[\n\r]/g, '');
cy.wrap(formatedfc).should('contains', resCon)
})
})
Task in /cypress/plugins/index.js
const { rmdir } = require('fs')
module.exports = (on, config) => {
console.log("cucumber started")
on('task', {
deleteFolder(folderName) {
return new Promise((resolve, reject) => {
rmdir(folderName, { maxRetries: 5, recursive: true }, (err) => {
if (err) {
console.error(err);
return reject(err)
}
resolve(null)
})
})
},
})
When I have the downloadedFilename as 'ABCDEF.txt', it works fine [I have hard coded here]. But I need some help to get the (dynamic) file name as it changes every time [eg.: AUADLFA.txt, CIABJPT.txt, SVACJTM.txt, PKPQ1TM.txt & etc.,].
I tried to use '.text' but I get 'Timed out retrying after 4000ms: cy.readFile("C:\Repositories\xyz-testautomation\cypress\downloads/.txt") failed because the file does not exist error.
I referred to this doc as well but no luck yet.
What is the right way to use regex to achieve the same? Also wondering is there a way to get the recently downloaded file name?

You can make use of the task shown in this question How can I verify if the downloaded file contains name that is dynamic
/cypress/plugins/index.js
const fs = require('fs');
on('task', {
downloads: (downloadspath) => {
return fs.readdirSync(downloadspath)
}
})
This returns a list of the files in the downloads folder.
Ideally you'd make it easy on yourself, and set the trashAssetsBeforeRuns configuration. That way, the array will only contain the one file and there's no need to compare arrays before and after the download.
(Just noticed you have a task for it).

Related

Async function immediately returns undefined yet output variable returns a value immediately before the return statement

I am writing a function that downloads and converts a pdf into individual jpg files by page. I am using the imagemagick library to do the conversion. I am having trouble with my processPDF() function as it immediately returns undefined. I put a console.log statement immediately before the function returns and it returns the exact value I expect yet that value doesn't seem to be getting outside of the function for some reason.
import im from 'imagemagick'
import { promises as fs } from 'fs'
import path from 'path'
import _ from 'lodash'
import axios from 'axios'
import { v4 as uuid } from 'uuid'
async function processPDF(pdfPath) {
let basename = path.basename(pdfPath, '.pdf')
let outputPath = "./img/" + basename + ".jpg";
console.log(`Converting ${pdfPath}`)
// Take PDF file and generate individual JPG files
await im.convert(["-density", 300, pdfPath, outputPath],async (err) => {
if (err) {
console.log(err)
throw `Couldn't Process ${pdfPath}`
}
else {
// Get every file in Temporary Image Directory
let files = await fs.readdir(`./img/`)
// Append directory into filenames
files = files.map(file => {
return "./img/" + file
})
// We only want the files that match the source pdf's name
files = files.filter((file) => {
return file.includes(basename)
})
console.log(`Getting ${basename} Buffer Data`)
// For each file, read and return the buffer data along with the path
let images = await Promise.all(files.map(async file => {
const contents = await fs.readFile(file)
return { path: file, buffer: contents }
}))
// Since we read the files asynchonously, Reorder the files
images = _.orderBy(images, (image) => {
let regex = /\d*.jpg/
let res = image.path.match(regex)[0]
res = path.basename(res, '.jpg')
return res
})
let output = { pdf: pdfPath, images }
// Returns a value
console.log(output)
// Returns undefined???
return output
}
})
}
export async function downloadAndProcessPDF(url) {
// Fetch PDF from server
let { data } = await axios.get(url, {
responseType: 'arraybuffer',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/pdf'
}
}).catch(e=>{
console.log(e);
throw `Can't retrieve ${url}`
})
// Generate a Unique ID for the pdf since this is called asynchronously, this will be called many times simultaneously
let id = "./pdf/" + uuid() + ".pdf"
await fs.writeFile(id, data);
// tell processPDF to process the pdf in the ./pdf directory with the given filename
let pdfData = await processPDF(id);
// Returns undefined???
console.log(pdfData)
return pdfData
}
If I had to take a wild guess I'd think that im.convert is the function that is giving me trouble. Throughout my source code i'm using promises to handle asynchronous tasks yet im.convert() uses a callback function. I'm not super familiar with how concurrency works between promises and callback functions so I think that's what's probably the issue.

How do I use the pipeline from stream-json to write to file, in nodeJs?

I'm trying to use stream-json to read a zip, unzip it, and then write it to file. I don't think I understand how to use the library.
Based on the link above, they have this example:
const {chain} = require('stream-chain');
const {parser} = require('stream-json');
const {pick} = require('stream-json/filters/Pick');
const {ignore} = require('stream-json/filters/Ignore');
const {streamValues} = require('stream-json/streamers/StreamValues');
const fs = require('fs');
const zlib = require('zlib');
const pipeline = chain([
fs.createReadStream('sample.json.gz'),
zlib.createGunzip(),
parser(),
pick({filter: 'data'}),
ignore({filter: /\b_meta\b/i}),
streamValues(),
data => {
const value = data.value;
// keep data only for the accounting department
return value && value.department === 'accounting' ? data : null;
}
]);
let counter = 0;
pipeline.on('data', () => ++counter);
pipeline.on('end', () =>
console.log(`The accounting department has ${counter} employees.`));
However I don't want to count anything, I just want to write to file. Here is what I have that works:
function unzipJson() {
const zipPath = Path.resolve(__dirname, 'resources', 'AllPrintings.json.zip');
const jsonPath = Path.resolve(__dirname, 'resources', 'AllPrintings.json');
console.info('Attempting to read zip');
return new Promise((resolve, reject) => {
let error = null;
Fs.readFile(zipPath, (err, data) => {
error = err;
if (!err) {
const zip = new JSZip();
zip.loadAsync(data).then((contents) => {
Object.keys(contents.files).forEach((filename) => {
console.info(`Writing ${filename} to disk...`);
zip.file(filename).async('nodebuffer').then((content) => {
Fs.writeFileSync(jsonPath, content);
}).catch((writeErr) => { error = writeErr; });
});
}).catch((zipErr) => { error = zipErr; });
resolve();
} else if (error) {
console.log(error);
reject(error);
}
});
});
}
However I can't easily add any processing to this, so I wanted to replace it with stream-json. This is my partial attempt, as I don't know how to finish:
function unzipJson() {
const zipPath = Path.resolve(__dirname, 'resources', 'myfile.json.zip');
const jsonPath = Path.resolve(__dirname, 'resources', 'myfile.json');
console.info('Attempting to read zip');
const pipeline = chain([
Fs.createReadStream(zipPath),
zlib.createGunzip(),
parser(),
Fs.createWriteStream(jsonPath),
]);
// use the chain, and save the result to a file
pipeline.on(/*what goes here?*/)
Later on I intend to add extra processing of the json file(s), but I want to learn the basics before I start throwing in extra functionality.
I can't produce a minimal example unfortunately, as I don't know what goes into the pipeline.on function. I'm trying to understand what I should do, not what I've done wrong.
I also looked at the related stream-chain, which has an example that ends like so:
// use the chain, and save the result to a file
dataSource.pipe(chain).pipe(fs.createWriteStream('output.txt.gz'));`
But at no point does the documentation explain where dataSource comes from, and I think my chain creates it's own by reading the zip from file?
How am I supposed to use these streaming libraries to write to file?
I don't want to count anything, I just want to write to file
In that case, you'll need to convert the token/JSON data stream back into a text stream that you can write to a file. You can use the library's Stringer for that. Its documentation also contains an example that seems to be more in line with what you want to do:
chain([
fs.createReadStream('data.json.gz'),
zlib.createGunzip(),
parser(),
pick({filter: 'data'}), // omit this if you don't want to do any processing
stringer(),
zlib.Gzip(), // omit this if you want to write an unzipped result
fs.createWriteStream('edited.json.gz')
]);

Read a file having a dynamically name everytime it is generated in Cypress?

As I'm new to Cypress, I've to write a test case to check whether the file is downloaded or not, I've been through multiple posts related to the similar test case, but all of them having the test case to read a file which has a static name, in my case I've to read a file which is downloaded after clicking the download button and every time the file is downloaded it will have a dynamic name, though the filename has some patterns (ex. filename starts with a fixed set of characters).
I wanted to achieve something like this in cypress,
cy.readFile('C:\Users\UserName\Downloads\${Regular expression to match the filename pattern}
Below is the snippet from cy.task() documentation, in which the task is to read a file exists or not, but the if the filename is not static then how it is possible to achieve
// in plugins/index.js
const fs = require('fs')
module.exports = (on, config) => {
on('task', {
readFileMaybe (filename) {
if (fs.existsSync(filename)) {
return fs.readFileSync(filename, 'utf8')
}
return null
}
})
}
I'm not sure there are any file-read or file-exists commands that can take a regular expression.
You can read the C:\Users\UserName\Downloads folder before and after the download, and take the diff.
cypress.json
{
"downloadsFolder": "C:\Users\UserName\Downloads"
}
plugins
const fs = require('fs')
module.exports = (on, config) => {
on('task', {
filesInDownload (folderName) {
return fs.readdirSync(folderName)
}
})
return config
}
Test
const downloadsFolder = Cypress.config('downloadsFolder')
cy.task('filesInDownload', downloadsFolder).then(files1 => {
// download
cy.task('filesInDownload', downloadsFolder).then(files2 => {
let difference = files2.filter(x => !files1.includes(x));
expect(difference.length).to.be.gt(0)
})
})
I wanted to achieve something like this in cypress,
cy.readFile('C:\Users\UserName\Downloads\${Regular expression to match the filename pattern}
=>
I think you need to workaround these cases as:
delete all in the folder
using cy.readfile from readdirSync
Ex:
const downloadsFolder = Cypress.config('downloadsFolder')
cy.task('filesInDownload', downloadsFolder).then(files1 => {
cy.readFile('downloadsFolder' + '/' + files[0]).contains(content)
})

Reading Multiple File in a directory one by one in Node Js

I'm trying to give a bunch of files in a folder and it should read each file and return the specified information for the correct unique path" ( It means /resource directory has f1.txt , f2.txt files separately like this /resource/f1.txt. )
The output of single file are :
{ url: 'account/43',
status: '200',
headers: [ 'content-type = application/text' ],
body: [ '{ name: XYZ }' ] }
The url of each file we are storing as one primary variable to check, As below code works fine for single file in a folder. motive here to read all file one by one and save the output in a variable.
const fs = require('fs')
const path = require('path')
const _ = require('lodash')
function parseFile(filePath) {
let content
let output = new Promise((resolve, reject) => {
fs.readFile(filePath, function(err, data) {
if (err) reject( err )
content = data.toString().split(/(?:\r\n|\r|\n)/g).map(function(line) {
return line.trim()
}).filter(Boolean)
resolve(processFile(content))
})
})
return output
}
if (require.main === module) {
let p = parseFile( path.join(__dirname, '../resources/FileData1.txt') )
p.then((results => {
console.log(results.status)
}))
}
Any suggestion would be appreciable, Thanks!
Problem is that you are working on just one file. First, you will need to retrieve all the files and then use your function
Step 1 - Get all the files for the given dir.
Step 2 - Create promise for each file.
Step 3 - Use Promise.all and then use the callback which will have all the results.
if (require.main === module) {
let dir = path.join(__dirname, '../resources')
let files = fs.readdirSync(dir) // gives all the files
let promises = files.map(file => parseFile(path.join(dir, file))) // gives an array of promises for each file
Promise.all(promises).then(console.log) // Uses Promise.all and resolves all supplied promises with their results.
// WIN
}
This will give the result in order of files.
motive here to read all file one by one and save the output in a variable.
With the approach above, you are not reading file one by one, but rather exploiting the power of promises and allow them to run concurrently.

Saving JSON in Electron

I am building an app using Electron. In this app, I am building a data structure using JSON. My data structure looks like this:
{
items: [
{ id:1, name:'football' },
{ id:2, name:'soccer ball' },
{ id:3, name:'basketball' }
]
}
I want to save this JSON to a file called "data.json". I want to save it to a file because I want to load the next time the application starts. My challenge is, I do not know how to save the data. In fact, I'm not sure where I should even save the file. Do I save it in the same directory as the app? Or is there some cross-platform approach I should use?
Currently, I have the following:
saveClick: function() {
var json = JSON.stringify(this.data);
// assume json matches the JSON provided above.
// Now, I'm not sure how to actually save the file.
}
So, how / where do I save JSON to the local file system for use at a later time?
Electron lacks an easy way to persist and read user settings for your application. electron-json-storage implements an API somehow similar to localStorage to write and read JSON objects to/from the operating system application data directory, as defined by app.getPath('userData').
Electron uses node.js as its core. You can use the following:
var fs = require("fs");
read_file = function(path){
return fs.readFileSync(path, 'utf8');
}
write_file = function(path, output){
fs.writeFileSync(path, output);
}
For write_file(), you can either pass "document.txt" as the path and it will write it to the same directory the html file it was run from. You can also put in a full path like "C:/Users/usern/document.txt" and it will write to the specific location you want.
Also, you can choose any file extention you want, (ie. ".txt", ".js", ".json", etc.). You can even make up your own!
I wrote a simple library that you can use, with a simple interface, it also creates subdirectories and works with promises/callbacks.
it will save the data into app.getPath("appData") as the root folder.
https://github.com/ran-y/electron-storage
Installation
$ npm install --save electron-storage
usage
const storage = require('electron-storage');
API
storage.get(filePath, (err, data) => {
if (err) {
console.error(err)
} else {
console.log(data);
}
});
storage.get(filePath)
.then(data => {
console.log(data);
})
.catch(err => {
console.error(err);
});
storage.set(filePath, data, (err) => {
if (err) {
console.error(err)
}
});
storage.set(filePath, data)
.then(data => {
console.log(data);
})
.catch(err => {
console.error(err);
});
`const fs = require('fs');
let student = {
name: 'Mike',
age: 23,
gender: 'Male',
department: 'English',
car: 'Honda'
};
let data = JSON.stringify(student, null, 2);
fs.writeFile('student-3.json', data, (err) => {
if (err) throw err;
console.log('Data written to file');
});
console.log('This is after the write call');`
There are multiple steps:
Step 1: As of version 5, the default for nodeIntegration changed from true to false. You can enable it when creating the Browser Window:
const createWindow = () => {
const win = new BrowserWindow({
width: 1000,
height: 800,
webPreferences: {
nodeIntegration: true,
contextIsolation: false,
}
})
}
Step 2:
function writetofile() {
let configsettings = {
break: output.innerHTML,
alwaysonoff: toggleoutput.innerHTML,
};
let settings_data = JSON.stringify(configsettings, null, 2);
const fs = require("fs");
fs.writeFileSync("assets/configs/settings.json", settings_data);
}

Categories

Resources