How to make an array of objects from dynamic values? - javascript

I have the following function
const extractTestCases = () => {
const filterFile = files.filter(test => test.includes('.test.ts'))
filterFile.forEach(testsuite => {
const testSuites = fs.readFileSync(testsuite, { encoding: "utf8" });
testCases.push(regexMatcher(testSuites, TestRegex, 1))
})
return testCases;
}
filterFile is an array for multiple files where I'm making forEach loop to extract some information from each file, what I want to do is to return an array of objects like this
[{"name of the file (testSuite)":["extracted value from this file regexMatcher(testSuites, TestRegex, 1)"]},{"testSuite2":["","",""]},...]

Try something like this :
const extractTestCases = () => {
const filesArray = []
const filterFile = files.filter(test => test.includes('.test.ts'))
filterFile.forEach(testsuite => {
const testSuites = fs.readFileSync(testsuite, { encoding: "utf8" });
testCases.push(regexMatcher(testSuites, TestRegex, 1))
filesArray.push({[testsuite]: regexMatcher(testSuites, TestRegex, 1)})
})
return filesArray;
}

Related

Run async/await function inside a reduce Javascript [duplicate]

This question already has answers here:
JavaScript array .reduce with async/await
(11 answers)
Closed 6 months ago.
I need to fetch values from another API using the guid inside this particular array, then group them together (hence I used reduce Javascript in this case)
However, I could not get those values sumEstimatedHours and sumWorkedHours as expected. Can someone suggest a method please?
export const groupProjectsByPM = (listOfProjects) => {
const dir = "./json";
const estimatedHours = fs.existsSync(dir)
? JSON.parse(fs.readFileSync("./json/phases.json", "utf-8"))
: null;
let sumWorkedHours, sumEstimatedHours;
const groupedProjects = listOfProjects?.reduce(
(
group,
{
guid,
projectOwner: { name: POName },
name,
customer: { name: customerName },
deadline,
calculatedCompletionPercentage,
}
) => {
listOfProjects.map(async (element, index) => {
// const element = listOfProjects[index];
sumWorkedHours = await getWorkhoursByProject(element?.guid).then(
(res) => {
return res.reduce((acc, cur) => {
return acc + cur.quantity;
}, 0);
}
);
const filteredEstimatedHours = estimatedHours.filter(
(item) => item.project.guid === element.guid
);
sumEstimatedHours = filteredEstimatedHours.reduce((acc, cur) => {
return acc + cur.workHoursEstimate;
}, 0);
group[POName] = group[POName] || [];
group[POName].push({
guid,
name,
POName,
customerName,
deadline,
calculatedCompletionPercentage,
sumEstimatedHours,
sumWorkedHours,
});
return group;
});
return group;
},
[]
);
return groupedProjects;
};
here is an example of async/await inside reduce:
let's assume that we have an array of numbers
const arrayOfNumbers = [2,4,5,7,6,1];
We are going to sum them using reduce function:
const sumReducer = async () => {
const sum = await arrayOfNumbers.reduce(async (promisedSum, num) => {
const sumAcc = await promisedSum
// any promised function can be called here..
return sumAcc + num
}, 0)
console.log(sum)
}
So the trick is to remember to await the accumulator inside the reduce function
export const groupProjectsByPM = async (listOfProjects) => {
const dir = "./json";
const estimatedHours = fs.existsSync(dir)
? JSON.parse(fs.readFileSync("./json/phases.json", "utf-8"))
: null;
let sumWorkedHours, sumEstimatedHours;
const groupedProjects = await listOfProjects?.reduce(
async (
promisedGroup,
{
guid,
projectOwner: { name: POName },
name,
customer: { name: customerName },
deadline,
calculatedCompletionPercentage,
}
) => {
listOfProjects.map(async (element, index) => {
//accumulator in your case is group
const group = await promisedGroup;
// const element = listOfProjects[index];
sumWorkedHours = await getWorkhoursByProject(element?.guid).then(
(res) => {
return res.reduce((acc, cur) => {
return acc + cur.quantity;
}, 0);
}
);
const filteredEstimatedHours = estimatedHours.filter(
(item) => item.project.guid === element.guid
);
sumEstimatedHours = filteredEstimatedHours.reduce((acc, cur) => {
return acc + cur.workHoursEstimate;
}, 0);
group[POName] = group[POName] || [];
group[POName].push({
guid,
name,
POName,
customerName,
deadline,
calculatedCompletionPercentage,
sumEstimatedHours,
sumWorkedHours,
});
return group;
});
return group;
},
[]
);
return groupedProjects;
};
Best of luck ...

Reactjs problem rendering binary image retrieved from mongodb

Hello so I have images in a mongodb database and I'm trying to render them on the client side however It's not working. I convert the buffer unit8 data into base64 so I can render it it seemd to work and been stored in the images state but images are not accessible by associative array.
useEffect( () => {
const getAllDoctors = async () => {
const result = await api.get('doctor/all')
const myImages = []
setDoctors(result.data)
await result.data.forEach(async doctor => {
myImages[doctor._id] = await base64_arraybuffer(doctor.photo.data.data)
})
setImages(myImages)
setLoading(false)
}
getAllDoctors()
}, [])
as for the render
return (
<div>
{
images.map((image, key) => {
console.log(doctors)
return (
<div key={key}>
<img alt={'image'} src={`data:image/png; base64, ${image}`}/>
<div>{`Doctor + ${images}`}</div>
</div>
)
})
}
</div>
);
converter (not mine):
const base64_arraybuffer = async (data) => {
const base64url = await new Promise((r) => {
const reader = new FileReader()
reader.onload = () => r(reader.result)
reader.readAsDataURL(new Blob([data]))
})
return base64url.split(",", 2)[1]
}
Two things
The way your array assignment is doesn't make sense. If _id is a ID string, then you are using a string to key an array, the data assigned at that key won't be included in loops. Use Array.prototype.push to add the item to the next available index.
Array.prototype.forEach and async / await don't play nice together due to the nature of callbacks. Try using a traditional loop.
useEffect(() => {
const getAllDoctors = async () => {
const result = await api.get("doctor/all");
const myImages = [];
setDoctors(result.data);
for (const doctor of result.data) {
const image = await base64_arraybuffer(doctor.photo.data.data);
myImages.push(image)
}
setImages(myImages);
setLoading(false);
};
getAllDoctors();
}, []);

Building an Object from fetch statement

I have some code that when you console.log it, it looks like the image below:
The code I am running is as follows:
onClick={() => {
const stream = fetch(
'https://lichess.org/api/games/user/neio',
{ headers: { Accept: 'application/x-ndjson' } }
);
const onMessage = obj => {
console.log('test', obj);
};
const onComplete = () =>
console.log('The stream has completed');
stream.then(readStream(onMessage)).then(onComplete);
}}
export const readStream = processLine => response => {
const stream = response.body.getReader();
const matcher = /\r?\n/;
const decoder = new TextDecoder();
let buf = '';
const loop = () =>
stream.read().then(({ done, value }) => {
if (done) {
if (buf.length > 0) processLine(JSON.parse(buf));
} else {
const chunk = decoder.decode(value, {
stream: true,
});
buf += chunk;
const parts = buf.split(matcher);
buf = parts.pop();
for (const i of parts) processLine(JSON.parse(i));
return loop();
}
});
return loop();
};
export default readStream;
What I am trying to do is build a parent object that contains all these individual rows of data.
I'm new at promises and fetch etc. So currently, I have no idea on how to build this parent object that contains each individual row.
Any suggestions?
Can't you have a global array and add items to it like:
var arrCollection = [];
...
const onMessage = obj => {
arrCollection.push(obj);
};
You can have an object with those items doing like:
var objCollection = { items: arrCollection };

Create unique array while push Object to array in react useState hook

I am declaring react state as below
const [selectedFiles, setselectedFiles] = useState([]);
Using them in function as below
function handleAcceptedFiles(files) {
files.map((file) =>
Object.assign(file, {
preview: URL.createObjectURL(file),
formattedSize: file.size,
})
);
selectedFiles.length === 0 ? setselectedFiles(files) : setselectedFiles(oldFiles => [...oldFiles,...files])
}
At this point how can we add only new file and remove duplicate file
setselectedFiles(oldFiles => [...oldFiles,...files])
You could create a lookup object with key-value pairs of filePreview-file and grab values from that
function handleAcceptedFiles(files) {
const pendingFiles = files.map(file =>
Object.assign(file, {
preview: URL.createObjectURL(file),
formattedSize: file.size
})
)
const dedupFiles = Object.values(
[...selectedFiles, ...pendingFiles].reduce((lookup, file) => {
if (lookup[file.name] === undefined) {
lookup[file.name] = file
}
return lookup
}, {})
)
setselectedFiles(dedupFiles)
}
Maybe this is what you need?
function handleAcceptedFiles(files) {
// Map over the current selection of files, generate required fields
const newFiles = files.map(file => {
return Object.assign(file, {
preview: URL.createObjectURL(file),
formattedSize: file.size,
});
});
// Find non-duplicate entries in previously selected files
const nonDupFiles = selectedFiles.filter(oldFile => {
const index = newFiles.findIndex(newFile => newFile.preview === oldFile.preview);
return index === -1; // index -1 => file wasn't found => non duplicate
});
// Concat of new files and non-dup files is what we need in state
setselectedFiles(newFiles.concat(nonDupFiles));
}

Why does my code using insertMany() skips some of the records and insert same records multiple times?

I have 9577 unique records in a csv file.
This code inserts 9800 records and insert not all records, but duplicates of some of them. Any idea why it does not inserts the unique 9577 records and also duplicates of some of them? Below I also insert the remain part of the code so you get the whole picture
function bulkImportToMongo(arrayToImport, mongooseModel) {
const Model = require(`../../../models/${mongooseModel}`);
let batchCount = Math.ceil(arrayToImport.length / 100);
console.log(arrayToImport.length);
let ops = [];
for (let i = 0; i < batchCount; i++) {
// console.log(i);
let batch = arrayToImport.slice(i, i + 100);
console.log(batch.length);
ops.push(Model.insertMany(batch));
}
return ops;
return Promise.all(ops).then(results => {
// results is an array of results for each batch
console.log("results: ", results);
});
}
I parse the csv file
const Promise = require("bluebird");
const csv = require("fast-csv");
const path = require("path");
const fs = Promise.promisifyAll(require("fs"));
const promiseCSV = Promise.method((filePath, options) => {
return new Promise((resolve, reject) => {
var records = [];
csv
.fromPath(filePath, options)
.on("data", record => {
records.push(record);
})
.on("end", () => {
// console.log(records);
resolve(records);
});
});
});
And here is the script that connects it all together:
const path = require("path");
const promiseCSV = require("./helpers/ImportCSVFiles");
const {
connectToMongo,
bulkImportToMongo
} = require("./helpers/mongoOperations");
const filePath = path.join(__dirname, "../../data/parts.csv");
const options = {
delimiter: ";",
noheader: true,
headers: [
"facility",
"partNumber",
"partName",
"partDescription",
"netWeight",
"customsTariff"
]
};
connectToMongo("autoMDM");
promiseCSV(filePath, options).then(records => {
bulkImportToMongo(records, "parts.js");
});
//It looks like your issue is simply i++. Perhaps you meant i += 100?
for (let i = 0; i < batchCount; i+=100 /* NOT i++ */) {
//...
}
I solved it.
I hope this helps other... :-)
I had two errors, in the function promiseCSV (changed to parseCSV) and second I had bad logic in bulkImportToMongo.
Complete solution:
I parsed and imported 602.198 objects and here is how long time it took using node --max_old_space_size=8000 on a MacBook Pro with 8gb of ram.
console
➜ database git:(master) ✗ node --max_old_space_size=8000 partImport.js
Connected to db!
Time to parse file: : 5209.325ms
Disconnected from db!
Time to import parsed objects to db: : 153606.545ms
➜ database git:(master) ✗
parseCSV.js
const csv = require("fast-csv");
function promiseCSV(filePath, options) {
return new Promise((resolve, reject) => {
console.time("Time to parse file");
var records = [];
csv
.fromPath(filePath, options)
.on("data", record => {
records.push(record);
})
.on("end", () => {
console.timeEnd("Time to parse file");
resolve(records);
});
});
}
module.exports = promiseCSV;
mongodb.js
const mongoose = require("mongoose");
mongoose.Promise = global.Promise;
function connectToMongo(databaseName) {
mongoose.connect(`mongodb://localhost:27017/${databaseName}`, {
keepAlive: true,
reconnectTries: Number.MAX_VALUE,
useMongoClient: true
});
console.log("Connected to db!");
}
function disconnectFromMongo() {
mongoose.disconnect();
console.log("Disconnected from db!");
}
function bulkImportToMongo(arrayToImport, mongooseModel) {
const Model = require(`../../../models/${mongooseModel}`);
const batchSize = 100;
let batchCount = Math.ceil(arrayToImport.length / batchSize);
let recordsLeft = arrayToImport.length;
let ops = [];
let counter = 0;
for (let i = 0; i < batchCount; i++) {
let batch = arrayToImport.slice(counter, counter + batchSize);
counter += batchSize;
ops.push(Model.insertMany(batch));
}
return Promise.all(ops);
}
module.exports.bulkImportToMongo = bulkImportToMongo;
module.exports.connectToMongo = connectToMongo;
module.exports.disconnectFromMongo = disconnectFromMongo;
partImport.js
const path = require("path");
const parseCSV = require("./helpers/parseCSV");
const {
connectToMongo,
disconnectFromMongo,
bulkImportToMongo
} = require("./helpers/mongodb");
const filePath = path.join(__dirname, "../../data/parts.csv");
const options = {
delimiter: ";",
noheader: true,
headers: [
"facility",
"partNumber",
"partName",
"partDescription",
"netWeight",
"customsTariff"
]
};
connectToMongo("autoMDM");
parseCSV(filePath, options)
.then(records => {
console.time("Time to import parsed objects to db");
return bulkImportToMongo(records, "parts.js");
})
/* .then(result =>
console.log("Total batches inserted: ", result, result.length)
) */
.then(() => {
disconnectFromMongo();
console.timeEnd("Time to import parsed objects to db");
})
.catch(error => console.log(error));

Categories

Resources