Convert differently aligned excel to json - javascript

There will be multiple excel files which has the same fields but they might be aligned differently. Such as in first excel file the "price" column might be at the first sequence but at the 2nd file it might be in the 3rd sequence.
So if I were to ask to user to enter sequences under the name of fields (so I'll know the order of fields), could I only convert those fields in the order that I want to JSON with Javascript or Nodejs?
If so, how?
Example:
This is client no. 1's data: stored in this orientation
https://imgur.com/yIgOF8w
This is client no. 2's data: stored in this orientation. 1 extra data that I won't use and different than the first one.
https://imgur.com/lY96c7J
And I want to parse it exactly as how client no. 1 stored. But there are so many varieties that I'll get. So as I explained above, if I were to get the column numbers of certain fields could I get it in the exact format with first client and transform to JSON like that.

You can use the module excel js for this purpose, it has a lot of nice features.
I've updated to allow passing the column order to the readValues function.
For example:
var excel = require('exceljs');
var wb = new excel.Workbook();
var path = require('path');
var client1Path = path.resolve(__dirname, 'client1_data.xlsx');
var client2Path = path.resolve(__dirname, 'client2_data.xlsx');
function readValues(filePath, columnNumbers) {
let columnNames = Object.entries(columnNumbers).reduce((a, [key,value]) => {
a[value] = key;
return a;
}, []);
return wb.xlsx.readFile(filePath).then( () => {
var worksheet = wb.getWorksheet("Sheet1");
var values = [];
worksheet.eachRow((row) => {
let obj = row.values.reduce((o, v, index) => {
if (columnNames[index]) o[columnNames[index]] = v;
return o;
}, {});
values.push(obj);
});
return values;
});
}
async function testReadData() {
try {
let client1Data = await readValues(client1Path, { price: 1, name: 2, country: 3});
console.log('testReadData: Client 1 data: ', client1Data);
let client2Data = await readValues(client2Path, { price: 2, name: 1, country: 4});
console.log('testReadData: Client 2 data: ', client2Data);
} catch (error) {
console.error('testReadData: Error occurred: ', error);
}
}
testReadData();
I'm using the same data as in your examples (now corrected).
e.g.
Client 1 data:
$50 Jack USA
$30 Harold USA
Client 2 data:
Jack $50 Florida USA
Harold $30 California USA
The output will be like:
testReadData: Client 1 data:
[ { price: '$50', name: 'Jack', country: 'USA' },
{ price: '$30', name: 'Harold', country: 'USA' } ]
testReadData: Client 2 data:
[ { name: 'Jack', price: '$50', country: 'USA' },
{ name: 'Harold', price: '$30', country: 'USA' } ]

Related

Sequelize magic method returning null

Just a bit confused as to why this magic method is returning null. It's probably very simple, but I'm using methods I wouldn't normally (bulkingCreating) and can't currently see it.
Association: Country.hasOne(Capital, { foreignKey: 'countryId' });
Populating dummy data:
const countries = await Country.bulkCreate([
{ name: 'England' },
{ name: 'Spain' },
{ name: 'France' },
{ name: 'Canada' }
]);
const capitals = await Capital.bulkCreate([
{ name: 'London' },
{ name: 'Madrid'},
{ name: 'Paris' },
{ name: 'Ottawa' }
]);
countries.forEach((country, index) => {
country.setCapital(capitals[index]);
});
const country = await Country.findOne({where: {name: 'Spain'}});
console.log(country.name, Object.keys(country.__proto__)); // Spain / magic methods
const capital = await country.getCapital();
console.log(capital); // null
The table:
Am I wrong in thinking country.getCapital() should return the relevant entry?
As you might guess setCapital should be an async function because it makes changes in DB so you need to use for instead of forEach method that does not support async callbacks:
let index = 0;
for (const country of countries) {
await country.setCapital(capitals[index]);
index += 1;
}
It would be better to create countries one by one and create capitals for them not relying on the same indexes of both collections (DB might return created records in a different order).
If you are using Sequelize 5.14+, you can do this in 1 bulkCreate using include option.
const countries = await Country.bulkCreate([
{
name: 'England',
Capital: { // This keyname should be matching with model name.
name: 'London'
}
},
{
name: 'Spain',
Capital: {
name: 'Madrid'
}
},
...
],
{
include: Capital,
returning: true // If Postgres, add this if you want the created object to be returned.
}
);

Node: How can I filter on a JSON value that is nested within multiple arrays?

I am pulling data from an API that looks like this:
[{
id: "62ff4289163f2d1ec1d54ff16bd8d731",
sport_key: "americanfootball_ncaaf",
commence_time: "2021-08-28T17:00:00Z",
home_team: "Illinois Fighting Illini",
away_team: "Nebraska Cornhuskers",
bookmakers: [{
key: "unibet",
title: "Unibet",
last_update: "2021-07-16T23:33:36Z",
markets: [{
key: "spreads",
outcomes: [{
name: "Illinois Fighting Illini",
price: 1.89,
point: 8
},
{
name: "Nebraska Cornhuskers",
price: 1.89,
point: -8
}
]
}]
},
{
key: "barstool",
title: "Barstool Sportsbook",
last_update: "2021-07-16T23:28:36Z",
markets: [{
key: "spreads",
outcomes: [{
name: "Illinois Fighting Illini",
price: 1.91,
point: 8
},
{
name: "Nebraska Cornhuskers",
price: 1.91,
point: -8
}
]
}]
}
]
The relevant section of my code looks like this (the (data) is the JSON posted above):
response.on("end", () => {
const oddsData = JSON.parse(data);
let games = oddsData.length;
for(let i=0; i<games; i++){
let bookies = oddsData[i].bookmakers.length;
for(let b=0; b<bookies; b++){
console.log(oddsData[i].bookmakers[b]);
}
}
})
How can I filter to only show results for the bookmaker with the key "barstool"? I have been googling different array filters and reduce functions all week and I cannot get this figured out. Any help is appreciated.
As I can see you have an array which is containing first level objects and inside the first level objects you have bookmakers array which is an array of objects.
if you are looking to filter the bookmarks array, which is what I think what you should be looking for
dataArr.forEach(item => {
item.bookmakers = item.bookmakers.filter(bookmaker => bookmaker.key == "barstool")
})
if you are looking to filter the array containing first level objects, you can do something like below
dataArr = dataArr.filter(item => {
let keep = false;
item.bookmakers.forEach(bookmaker => {
if(bookmaker.key == "barstool"){
keep = true;
}
})
return keep;
})
I would guess something like that:
gamesWithBarstool = oddsData.filter(game => game.bookmakers.some(bookmaker => bookmaker.key === 'barstool'))
With that you should only get the list of games that have odds for your named bookmaker.
filter:
Will return the subset of the array that matches the test.
some:
Will return true if any item in the array matches the test.

ES6 reduce function affecting array outside of scope

I've rewritten this into a simplified form to demonstrate, I have an array of pickers who have an array of time entries, I'm using reduce to summarise time entries by type on the pickers & then a second reduce to show global entries across both pickers.
The first reduce per picker works as expected.
The second reduce on global time entries works as expected but somehow changes the entries for the first picker ( Sam ).
Sam & John pick the same amount.
Apples 2h, Peaches 2h, Lemons 1h
Is there a better way to write this? Is there a concept I've failed to understand?
function testBug() {
// Reducer Function
function entryReducer(summary, entry) {
// find an index if the types of fruit are the same
let index = summary.findIndex((item) => {
return item.type.id === entry.type.id;
});
if (index === -1) {
summary.push(entry);
} else {
summary[index].hours = summary[index].hours + entry.hours;
}
return summary;
}
let pickers = [
{
id: 1,
identifier: "Sam Smith",
timeEntries: [
{
type: {
id: 1,
name: "Apples",
},
hours: 1,
},
{
type: {
id: 2,
name: "Peaches",
},
hours: 1,
},
{
type: {
id: 3,
name: "Lemons",
},
hours: 1,
},
{
type: {
id: 1,
name: "Apples",
},
hours: 1,
},
{
type: {
id: 2,
name: "Peaches",
},
hours: 1,
},
],
},
{
id: 2,
identifier: "John Snow",
timeEntries: [
{
type: {
id: 1,
name: "Apples",
},
hours: 1,
},
{
type: {
id: 2,
name: "Peaches",
},
hours: 1,
},
{
type: {
id: 3,
name: "Lemons",
},
hours: 1,
},
{
type: {
id: 1,
name: "Apples",
},
hours: 1,
},
{
type: {
id: 2,
name: "Peaches",
},
hours: 1,
},
],
},
];
let pickersSummary = [];
let timeEntriesSummary = [];
for (const picker of pickers) {
if (picker.timeEntries.length > 0) {
// reduce time entries into an array of similar types
picker.timeEntries = picker.timeEntries.reduce(entryReducer, []);
// push to pickers summary arr
pickersSummary.push(picker);
// push time entries to a summary array for later reduce
picker.timeEntries.map((entry) => timeEntriesSummary.push(entry));
}
}
// Reduce time entries for all pickers
// Sam & John pick the same amount
// Apples 2h
// Peaches 2h
// Lemons 1h
// **** If I run this Sam's entries are overwritten with the global time entries ***
timeEntriesSummary = timeEntriesSummary.reduce(entryReducer, []);
const results = { pickersSummary, timeEntriesSummary };
console.log(results);
}
testBug();
module.exports = testBug;
Even though with each reducer you pass a new array [], the actual objects contained by these arrays could be shared. This means when you edit one of the objects in array "A", the objects could also change in array "B".
You know how some languages let you pass variables by value or by reference and how this fundamentally changes how values are handled? JavaScript technically uses call-by-sharing. I suggest reading this other answer: Is JavaScript a pass-by-reference or pass-by-value language?
once an element in an array is pushed into a different array it is separate in memory?
No, it isn't. In JavaScript you will always remember when you made an individual copy of an object (or at least wanted to), because that needs some effort, see What is the most efficient way to deep clone an object in JavaScript? or How do I correctly clone a JavaScript object?
So, just like when you use a=b, push(a) into an array refers the original object. See this example where there is a single object accessible via two variables (x and y), and via both elements of array z. So modifying it as z[1] affects all the others:
let x={a:5};
let y=x;
let z=[x];
z.push(y);
z[1].a=4;
console.log(x);
console.log(y);
console.log(z[0]);
console.log(z[1]);
As your objects are value-like ones and do not have anything what JSON would not support (like member functions), JSON-based cloning can work on them:
function testBug() {
// Reducer Function
function entryReducer(summary, entry) {
// find an index if the types of fruit are the same
let index = summary.findIndex((item) => {
return item.type.id === entry.type.id;
});
if (index === -1) {
//summary.push(entry);
summary.push(JSON.parse(JSON.stringify(entry))); // <--- the only change
} else {
summary[index].hours = summary[index].hours + entry.hours;
}
return summary;
}
let pickers = [
{id: 1, identifier: "Sam Smith", timeEntries: [
{type: {id: 1, name: "Apples",}, hours: 1,},
{type: {id: 2, name: "Peaches",}, hours: 1,},
{type: {id: 3, name: "Lemons",}, hours: 1,},
{type: {id: 1, name: "Apples",}, hours: 1,},
{type: {id: 2, name: "Peaches",}, hours: 1,},],},
{id: 2, identifier: "John Snow", timeEntries: [
{type: {id: 1, name: "Apples",}, hours: 1,},
{type: {id: 2, name: "Peaches",}, hours: 1,},
{type: {id: 3, name: "Lemons",}, hours: 1,},
{type: {id: 1, name: "Apples",}, hours: 1,},
{type: {id: 2, name: "Peaches",}, hours: 1,},],},];
let pickersSummary = [];
let timeEntriesSummary = [];
for (const picker of pickers) {
if (picker.timeEntries.length > 0) {
// reduce time entries into an array of similar types
picker.timeEntries = picker.timeEntries.reduce(entryReducer, []);
// push to pickers summary arr
pickersSummary.push(picker);
// push time entries to a summary array for later reduce
picker.timeEntries.map((entry) => timeEntriesSummary.push(entry));
}
}
// Reduce time entries for all pickers
// Sam & John pick the same amount
// Apples 2h
// Peaches 2h
// Lemons 1h
// **** If I run this Sam's entries are overwritten with the global time entries ***
timeEntriesSummary = timeEntriesSummary.reduce(entryReducer, []);
const results = { pickersSummary, timeEntriesSummary };
console.log(results);
}
testBug();
Now it probably displays what you expected, but in the background it still alters the pickers themselves, you have that picker.timeEntries = ... line running after all. It may be worth mentioning that const something = xy; means that you can not write something = yz; later, something will stick with a given entity. But, if that entity is an object, its internals can still be changed, that happens with picker.timeEntries above (while writing picker = 123; would fail).

How to get JSON keys and add extra fields?

I'm trying to get the key of these json objects in order to create a new object with extra filed to create table headers in a React app. JSON data:
let example = [
{
id: 1,
city: 'New York',
},
{
id: 2,
city: 'Paris',
},
]
The function:
getKeys() {
return example.map((key) => {
return {
cityName: key, // gets the whole array
capital: false,
};
});
}
I tries Object.keys( example);, it returns integers; 0, 1.
How can I get the keys in this case? Thanks.
You are trying to map the keys for an array since example is an array. If the data are consistent throughout the array get the first element example[0] and do Object.keys().
So Object.keys(example[0])
There's no need to get the keys if you just want to add a property to the items in the array. I think there's a misunderstanding about .map, which gives you a single item/object in the array, not the keys.
Something like this perhaps?
let example = [{
id: 1,
city: 'New York',
}, {
id: 2,
city: 'Paris',
}];
const modifiedArray = function(arr) {
return arr.map(item => {
return {
id: item.id,
cityName: item.city,
capital: false,
};
})
}
const newArray = modifiedArray (example);
console.log(newArray )

Javascript object as a type of mini database?

Is it possible to use a JavaScript object as a type of mini database? I often find myself needing a kind of database structure when I'm coding in JS but it feels like overkill to use an actual database like MySQL (or similar).
As an example, let's say I need to structure this data as a JS object:
Object idea: Stuff to sell
Items to sell: The junk in the garage
Object structure: List all items including item name, item condition, and item value
In order to make this into a JS object I would maybe write:
var stuffToSell = {};
Then my first item would maybe look like:
var stuffToSell = {
item : "Coffee Maker",
condition : "Good",
price : 5
};
Now to me this seems like I'm on the right track, until I come to add another item and I end up having to use the properties item, condition, and price again in the same JS object — which feels wrong? — or is it?? At this point my brain keeps shouting the word "ARRAY!" at me but I just can't see how I can use an array inside the object, or an object inside an array to achieve what I want.
My end goal (in this simplified example) is to be able to then use object-oriented syntax to be able to access certain items and find out specific information about the item such as price, condition etc. For example if I want to know the price of the "coffee maker" I would like to write something like:
stuffToSell["coffee maker"].price
...and then the result above should be 5.
I feel like I'm on the right track but I think I'm missing the array part? Could someone please tell me what I'm missing or maybe what I'm doing completely wrong! And also if it is wrong to have duplicate property names in the same JS object? For example, is it okay to have:
var stuffToSell = {
item : "Coffee Maker",
price : 5,
item : "Mountain Bike",
price : 10,
item : "26 inch TV",
price : 15
};
...it seems wrong because then how does JS know which price goes with which item??
Thanks in advance :)
You're definitely on the right track!
A lot of people will refer to what you're talking about as a hash.
Here's my suggested structure for you:
var store = {
coffee_maker: {
id: 'coffee_maker',
description: "The last coffee maker you'll ever need!",
price: 5,
},
mountain_bike: {
id: 'mountain_bike',
description: 'The fastest mountain bike around!',
price: 10,
},
tv: {
id: 'tv',
description: 'A big 26 inch TV',
price: 15,
},
}
Having a structure like that will let you do this:
store.mountain_bike.price // gives me 10
Need an array instead, say for filtering or looping over?
Object.keys gives you an Array of all the object's keys in the store ['coffee_maker', 'mountain_bike', 'tv']
// Now we just have an array of objects
// [{id: 'coffee_maker', price: 5}, {id: 'mountain_bike', price: 10} ...etc]
var arr = Object.keys(store).map(el => store[el])
Need to just filter for items that are less than 10?
This will give us an array of products less than 10:
// gives us [{id: 'coffee_maker', price: 5}]
var productsUnder10 = arr.filter(el => el.price < 10)
These techniques can be chained:
var productsOver10 = Object.keys(store)
.map(el => store[el])
.filter(el => el.price > 10)
Need to add a product?
store['new_product'] = {
id: 'new_product',
description: 'The Newest Product',
price: 9000,
}
Here's another way, which would be good to start getting used to.
This is a 'safe' way to update the store, read up on immutability in javascript to learn about it
store = Object.assign({}, store, {
'new_product': {
id: 'new_product',
description: 'The Newest Product',
price: 9000,
}
})
...and another way, that you should also read up on and start using:
This is the object spread operator, basically just an easier way to work with immutable structures
store = {
...store,
'new_product': {
id: 'new_product',
description: 'The Newest Product',
price: 9000,
}
}
Resources
JavaScript Arrow Functions
Object and Array Spread Syntax
Immutable Javascript using ES6 and beyond
You can actually use json or create an array of objects.If using a separate file to store the objects, first load the file. Use array filter method to get an new array which matches the filter condition , like you want to get the item with id 1. This will return an array of objects.
var dict = [{
'id': 1,
'name': 'coffee-mug',
'price': 60
},
{
'id': 2,
'name': 'pen',
'price': 2
}
]
function getItemPrice(itemId) {
var getItem = dict.filter(function(item) {
return item.id === itemId
});
return getItem[0].price;
}
console.log(getItemPrice(1))
JSON objects don't support repeated keys, so you need to set unique keys.
Put an id as your key to group your items:
var stuffToSell = {
'1': {
item: "Coffee Maker",
price: 5
},
'2': {
item: "Mountain Bike",
price: 10
}
.
.
.
}
Now you can access the item's price very fast.
Look at this code snippet (Known Ids)
var stuffToSell = {
'1': {
item: "Coffee Maker",
price: 5
},
'2': {
item: "Mountain Bike",
price: 10
},
'3': {
item: "26 inch TV",
price: 15
}
};
let getPrice = (id) => stuffToSell[id].price;
console.log(getPrice('1'));
See? the access to your items it's fast and your code follows a readable structure.
Look at this code snippet (Item's name as key)
var stuffToSell = {
'Coffee Maker': {
price: 5
},
'Mountain Bike': {
price: 10
},
'26 inch TV': {
price: 15
}
};
let getPrice = (id) => stuffToSell[id].price;
console.log(getPrice('Coffee Maker'));
Look at this code snippet (Item's name: price)
var stuffToSell = {
'Coffee Maker': 5,
'Mountain Bike': 10,
'26 inch TV': 15
};
let getPrice = (id) => stuffToSell[id];
console.log(getPrice('Coffee Maker'));

Categories

Resources