Export of local file for dynamodb with node - javascript

I need to read an Excel spreadsheet, generate a unique field from it, create an id to save to the database,
I can export data from an xlsx file to dynamodb with this script, but I am unsuccessful when this file is too large; You can limit the reading of the file into parts without modifying or dividing it into parts.
var xlsx = require('xlsx');
var AWS = require("aws-sdk");
const controller = {};
var docClient = new AWS.DynamoDB.DocumentClient();
var table = "Table";
controller.export = (req, res) => {
var wb = xlsx.readFileSync("data/sample.xlsx", { cellDates: true })
var ws = wb.Sheets["FIT"]
var data = xlsx.utils.sheet_to_json(ws)
var NewData = data.map(function (record) {
record.NEW_ID = record.CD_EMPRESA + "_" + record.LOC_INI "_" + record.LOC_FIM;
return record;
});
for (let index = 0; index < NewData.length; index++) {
var params = {
TableName: table,
Item: {
"NEW_ID": NewData[index].NEW_ID,
"CD_EMPRESA" : NewData[index].CD_EMPRESA,
"LOC_INI" : NewData[index].LOC_INI,
"LOC_FIM" : NewData[index].LOC_FIM,
"ID_ATIVO" : NewData[index].ID_ATIVO,
}
};
docClient.put(params, function (err, data) {
if (err) {
console.log("Error", params, err);
} else {
console.log("Success", params);
}
});
}
res.status(200).json();
};
return controller;
};

Related

Updating Firestore Document with GoogleSheet AppScript

I'm finding it hard to update the fields in a firestore collection. The documentIDs of the persons are auto-generated. The inlined code below shows what I've been able to derive and make work, from the tutorials I have followed. Except for the 'updateFirestoreDocument' function, everything else works without error. How do I rewrite the code to export the modified cells on Google Sheets to the right persons' fields on the firestore collection? Thanks
function onOpen() {
SpreadsheetApp.getUi().createMenu('🔥 Firebase')
.addItem('⏪ Export to Firestore', 'main')
.addItem('⏩ Import from Firestore', 'menuImport')
.addToUi();
}
function main() {
var email = "fireb...ccount.com";
var key = "-----BEGI...rxEp...RIVATE KEY-----\n";
var projectId = "co...t";
var sheet = SpreadsheetApp.getActiveSheet();
var sheetName = sheet.getName();
var properties = getProperties(sheet);
var records = getRecords(sheet);
var firestore = FirestoreApp.getFirestore(email, key, projectId);
updateFirestoreDocument(firestore, sheetName, documentId, properties, data);
exportToFirestore(firestore, sheetName, properties, records);
}
function updateFirestoreDocument(firestore, collectionName, documentId, properties, data) {
var documentRef = firestore.getDocument(collectionName, documentId);
if (documentRef.exists()) {
properties.forEach(function(prop) {
documentRef.updateData(prop, data[prop]);
});
} else {
firestore.createDocument(collectionName, documentId, data);
}
}
function exportToFirestore(firestore, collectionName, properties, records) {
records.map(function(record) {
var data = {};
properties.forEach(function(prop,i) { data[prop] = record[i]; });
if (data[properties[0]] === undefined || data[properties[0]] === null) {
return;
}
// var documentId = data[properties[1]]; // first column
firestore.createDocument(collectionName, data);
// firestore.createDocument(collectionName, documentId, data, { id: documentId });
});
}
function getProperties(sheet){
return sheet.getRange(2, 1, 1, sheet.getLastColumn()).getValues()[0];
}
function getRecords(sheet) {
var data = sheet.getDataRange().getValues();
var dataToImport = [];
for (var i = 2; i < data.length; i++) {
dataToImport.push(data[i]);
}
return dataToImport;
}
I tried defining documentId
I was expecting documented to be renamed
documented fields got modified wrongly
function main() {
const { email, key, projectId } = getSecrets_();
const sheet = SpreadsheetApp.getActiveSheet();
const firestore = FirestoreApp.getFirestore(email, key, projectId);
exportToFirestore_(firestore, sheet.getName(), getProperties_(sheet), getRecords_(sheet));
}
function getSecrets_() {
return {
email: 'fireb..#..ccount.com',
key: '-----BEGI...rxEp...RIVATE KEY-----\n',
projectId: 'co...t',
};
}
function exportToFirestore_(firestore, collectionName, properties, records) {
records.map(record => {
const data = {};
properties.forEach((prop, i) => data[prop] = record[i]);
return data;
}).forEach(object => {
firestore.createDocument(collectionName, object);
});
}
function getProperties_(sheet) {
return sheet.getRange(2, 1, 1, sheet.getLastColumn()).getValues()[0];
}
function getRecords_(sheet) {
return sheet.getDataRange().getValues().slice(1);
}

How to convert excel data to json format in node js?

I am a beginner in nodejs, I was tasked to read a excel file and convert it into json format so that it can be stored into mongodb database.
excel2.js file:
const XlsxStreamReader = require("xlsx-stream-reader");
var fs = require('fs');
const config = require('./excelpush.json');
const db = require('./dbmanager.js');
var finalList = [];
var singlePerson = {};
class ExcelReader {
readFile() {
var workBookReader = new XlsxStreamReader();
workBookReader.on('error', function (error) {
throw (error);
})
workBookReader.on('worksheet', function (workSheetReader) {
if (workSheetReader.id > 1) {
workSheetReader.skip();
return;
}
var isFirstLine = true;
var headerIndex = [];
workSheetReader.on('row', function (row) {
if (isFirstLine) {
headerIndex = row.values.slice(1);
}
else if (!isFirstLine) {
let rowValues = row.values.slice(1);
let valueIndex = 0;
headerIndex.forEach(currentval => {
singlePerson[currentval] = rowValues[valueIndex];
valueIndex++;
});
finalList.push(singlePerson);
}
isFirstLine = false;
});
workSheetReader.on('end', function () {
});
workSheetReader.process()
});
workBookReader.on('end', function () {
//console.log(finalList);
console.log('finished!');
});
fs.createReadStream(config.filePath).pipe(workBookReader);
}
}
excelReader = new ExcelReader();
excelReader.readFile();
db.connectDb();
db.insertDb(finalList);
dbmanager.js file:
var mongoose = require('mongoose');
const config = require('./db.json');
function connectDb() {
mongoose.connect(`mongodb://${config.dbConfig.host}:${config.dbConfig.port}/${config.dbConfig.dbName}`);
mongoose.connection.once('open', function () {
console.log('Connection has been made');
}).on('error', function (error) {
console.log('error is:' + error);
})
}
function insertDb(list) {
var myschema = new mongoose.Schema({}, { strict: false });
var obj = mongoose.model('myschema', myschema, `${config.dbConfig.collectionName}`);
var obj1 = new obj(list);
obj1.save();
}
module.exports = {
connectDb, insertDb
}
db.json file:
{
"dbConfig": {
"host": "localhost",
"port": "27017",
"dbName": "PRACTICE",
"collectionName": "Excel"
}
}
excelpush.json file:
{
"filePath": "D:\\grv_tracx_updt (1).xlsx"
}
here excel2.js is taking excel file from excel2.json and reading it and storing as a array of objects in finalList variable.
In dbmanager.js file db connection and insertion code are written.
Here i am not able to store data in database, The code gets executed perfectly but data in not stored in mongodb database.
Note: excel file is large.

How to read User ID and Password from CSV file into a Array using Java Script

I have a CSV file as below - accounts.csv
User_Name,Password
TestAccount1,Passw0rd
TestAccount2,Passw0rd
TestAccount3,Passw0rd
TestAccount4,Passw0rd
Trying to write a JavaScript code to read them into a array variable and use it further in script like
UserName[1] and Password [1] or may be like UserName and Password
Tried using the below code:
const fs = require('fs');
var parse = require("csv-parse"); //installed csc-parse
var csvFile = "accounts.csv";
class User {
constructor(user_id, user_password) {
this.user_id = user_id;
this.user_password = user_password;
}
}
const processData = (err, data) => {
if (err) {
console.log(`An error was encountered: ${err}`);
return;
}
//skipping has heading row
data.shift();
const userList = data.map(row => new User(...row));
}
fs.createReadStream(csvFile)
.pipe(parse({ delimiter: ',' }, processData));
Try the below snippet :
const fs = require('fs');
var parse = require("csv-parse"); //installed csv-parse
var csvFile = "accounts.csv";
const userList = [];
const processData = (data) => {
if (data != undefined) {
//console.log(data);
userList.push({ "username" : data[0], "password" : data[1]});
}
}
fs.createReadStream(csvFile)
.pipe(parse({
delimiter: ',',
from_line: 2 // to skip the header row
}, processData))
.on('data', processData)
.on('end', () => {
console.log('CSV file successfully processed');
console.log(userList);
});

How to return Boolean properly in different NodeJS files?

So I have files inside the following folder:
app/controller/token.js
app/controller/news.js
token.js:
"use strict";
var connection = require("../con");
exports.isTokenExists = function(token) {
var checkToken = "SELECT COUNT(`id`) AS 'total' FROM `user` WHERE `token` = '" + token + "'";
var isExists = false;
var count;
var checkResult;
connection.query(checkToken, function(error, rows) {
if (!error) {
checkResult = JSON.parse(JSON.stringify(rows));
for (var i = 0; i < checkResult.length; i++) {
var row = rows[i];
count = row.total;
}
if (count > 0) {
isExists = true;
}
}
});
return isExists;
};
news.js:
"use strict";
var response = require("../response/responses");
var connection = require("../con");
var getToken = require("./token");
exports.news = function(req, res) {
response.send(false, "News API", null, res);
};
exports.allNews = function(req, res) {
var checkTokenExists = getToken.isTokenExists("75d12cc4dc07608d5b87a6cba33cac056df1239c");
if (checkTokenExists) {
var allNewsQuery = "SELECT a.`id`, b.`title` AS `category`, a.`title`, a.`description`, a.`content`, a.`image`, a.`created_date` FROM `news` AS a LEFT JOIN `news_category` AS b ON a.`id_news_category` = b.`id` ORDER BY `created_date` DESC LIMIT 20";
connection.query(allNewsQuery, function(error, rows) {
if (error) {
response.send(true, "" + error, null, res);
} else {
var data = [];
var newsData = JSON.parse(JSON.stringify(rows));
for (var i = 0; i < newsData.length; i++) {
var row = rows[i];
data[i] = {
id: row.id,
idCategory: row.idCategory,
category: row.category,
title: row.title,
description: row.description,
image: row.image,
createdDate: row.created_date
};
}
response.send(false, "News is not empty", data, res);
}
});
} else {
response.send(true, "Error: Token not found", checkTokenExists, res);
}
};
I always getting false value from isTokenExists meanwhile the token is exists in the table.
How do I get true response if the token is exist and how do I get false response if the token is not exists in table?
Any help will be much appreciated.
Regards.
The issue here is that connection.query accepts a callback, but the rest of your code will move passed that without awaiting the result, which is why your isExists always returns false. You can fix this by encapsulating the query with a Promise like this:
"use strict";
const connection = require("../con");
exports.isTokenExists = async function(token) {
const checkToken = "SELECT COUNT(`id`) AS 'total' FROM `user` WHERE `token` = ?";
return new Promise((resolve, reject) => {
connection.query(checkToken, token, function (error, results) {
if (error) return reject(error);
return resolve(results.length > 0);
});
});
};
I also simplified the logic in the callback a bit.
Then, in news.js await the result like this:
exports.allNews = async function(req, res) {
getToken.isTokenExists("75d12cc4dc07608d5b87a6cba33cac056df1239c")
.then(result => {
if (result === true) {
//place your code for handling if the token exists here
}
else {
//place your code for handling if the token does not exist
}
})
.catch(err => {
//handle error
});
}
You are missing async / await concept. You need to wait until your query executes.
1) Write a promise function
export.getCount = function(query) {
return new Promise((res, rej) => {
let count = 0;
connection.query(checkToken, function(error, rows) {
if (!error) {
checkResult = JSON.parse(JSON.stringify(rows));
for (var i = 0; i < checkResult.length; i++) {
var row = rows[i];
count = row.total;
}
}
return res(count);
})
}
2) Write async function which supports await operations
exports.isTokenExists = async function(token) {
var query = "SELECT COUNT(`id`) AS 'total' FROM `user` WHERE `token` = '" + token + "'";
let count = await getCount(query)
return count > 0; // Returns true if count is > 0
};

Why does my code using insertMany() skips some of the records and insert same records multiple times?

I have 9577 unique records in a csv file.
This code inserts 9800 records and insert not all records, but duplicates of some of them. Any idea why it does not inserts the unique 9577 records and also duplicates of some of them? Below I also insert the remain part of the code so you get the whole picture
function bulkImportToMongo(arrayToImport, mongooseModel) {
const Model = require(`../../../models/${mongooseModel}`);
let batchCount = Math.ceil(arrayToImport.length / 100);
console.log(arrayToImport.length);
let ops = [];
for (let i = 0; i < batchCount; i++) {
// console.log(i);
let batch = arrayToImport.slice(i, i + 100);
console.log(batch.length);
ops.push(Model.insertMany(batch));
}
return ops;
return Promise.all(ops).then(results => {
// results is an array of results for each batch
console.log("results: ", results);
});
}
I parse the csv file
const Promise = require("bluebird");
const csv = require("fast-csv");
const path = require("path");
const fs = Promise.promisifyAll(require("fs"));
const promiseCSV = Promise.method((filePath, options) => {
return new Promise((resolve, reject) => {
var records = [];
csv
.fromPath(filePath, options)
.on("data", record => {
records.push(record);
})
.on("end", () => {
// console.log(records);
resolve(records);
});
});
});
And here is the script that connects it all together:
const path = require("path");
const promiseCSV = require("./helpers/ImportCSVFiles");
const {
connectToMongo,
bulkImportToMongo
} = require("./helpers/mongoOperations");
const filePath = path.join(__dirname, "../../data/parts.csv");
const options = {
delimiter: ";",
noheader: true,
headers: [
"facility",
"partNumber",
"partName",
"partDescription",
"netWeight",
"customsTariff"
]
};
connectToMongo("autoMDM");
promiseCSV(filePath, options).then(records => {
bulkImportToMongo(records, "parts.js");
});
//It looks like your issue is simply i++. Perhaps you meant i += 100?
for (let i = 0; i < batchCount; i+=100 /* NOT i++ */) {
//...
}
I solved it.
I hope this helps other... :-)
I had two errors, in the function promiseCSV (changed to parseCSV) and second I had bad logic in bulkImportToMongo.
Complete solution:
I parsed and imported 602.198 objects and here is how long time it took using node --max_old_space_size=8000 on a MacBook Pro with 8gb of ram.
console
➜ database git:(master) ✗ node --max_old_space_size=8000 partImport.js
Connected to db!
Time to parse file: : 5209.325ms
Disconnected from db!
Time to import parsed objects to db: : 153606.545ms
➜ database git:(master) ✗
parseCSV.js
const csv = require("fast-csv");
function promiseCSV(filePath, options) {
return new Promise((resolve, reject) => {
console.time("Time to parse file");
var records = [];
csv
.fromPath(filePath, options)
.on("data", record => {
records.push(record);
})
.on("end", () => {
console.timeEnd("Time to parse file");
resolve(records);
});
});
}
module.exports = promiseCSV;
mongodb.js
const mongoose = require("mongoose");
mongoose.Promise = global.Promise;
function connectToMongo(databaseName) {
mongoose.connect(`mongodb://localhost:27017/${databaseName}`, {
keepAlive: true,
reconnectTries: Number.MAX_VALUE,
useMongoClient: true
});
console.log("Connected to db!");
}
function disconnectFromMongo() {
mongoose.disconnect();
console.log("Disconnected from db!");
}
function bulkImportToMongo(arrayToImport, mongooseModel) {
const Model = require(`../../../models/${mongooseModel}`);
const batchSize = 100;
let batchCount = Math.ceil(arrayToImport.length / batchSize);
let recordsLeft = arrayToImport.length;
let ops = [];
let counter = 0;
for (let i = 0; i < batchCount; i++) {
let batch = arrayToImport.slice(counter, counter + batchSize);
counter += batchSize;
ops.push(Model.insertMany(batch));
}
return Promise.all(ops);
}
module.exports.bulkImportToMongo = bulkImportToMongo;
module.exports.connectToMongo = connectToMongo;
module.exports.disconnectFromMongo = disconnectFromMongo;
partImport.js
const path = require("path");
const parseCSV = require("./helpers/parseCSV");
const {
connectToMongo,
disconnectFromMongo,
bulkImportToMongo
} = require("./helpers/mongodb");
const filePath = path.join(__dirname, "../../data/parts.csv");
const options = {
delimiter: ";",
noheader: true,
headers: [
"facility",
"partNumber",
"partName",
"partDescription",
"netWeight",
"customsTariff"
]
};
connectToMongo("autoMDM");
parseCSV(filePath, options)
.then(records => {
console.time("Time to import parsed objects to db");
return bulkImportToMongo(records, "parts.js");
})
/* .then(result =>
console.log("Total batches inserted: ", result, result.length)
) */
.then(() => {
disconnectFromMongo();
console.timeEnd("Time to import parsed objects to db");
})
.catch(error => console.log(error));

Categories

Resources