Trying to import csv file data to Firestore - javascript

I am using the code below to upload data to Firestore but am getting the error message below:
SyntaxError: Cannot use import statement outside a module
on this line of code:
"import promises from 'fs';"
import { promises } from 'fs';
const { readFile } = promises;
import { promisify } from 'util';
const parse = promisify(import('csv-parse'));
import { Firestore } from '#google-cloud/firestore';
if (process.argv.length < 3) {
console.error('Please include a path to a csv file');
process.exit(1);
}
const db = new Firestore();
function writeToFirestore(records) {
const batchCommits = [];
let batch = db.batch();
records.forEach((record, i) => {
var docRef = db.collection('firebasetest').doc(record.acctid);
batch.set(docRef, record);
if ((i + 1) % 500 === 0) {
console.log(`Writing record ${i + 1}`);
batchCommits.push(batch.commit());
batch = db.batch();
}
});
batchCommits.push(batch.commit());
return Promise.all(batchCommits);
}
async function importCsv(csvFileName) {
const fileContents = await readFile(csvFileName, 'utf8');
const records = await parse(fileContents, { columns: true });
try {
await writeToFirestore(records);
}
catch (e) {
console.error(e);
process.exit(1);
}
console.log(`Wrote ${records.length} records`);
}
importCsv(process.argv[2]).catch(e => console.error(e));

Download and install Firefoo.
sign in with Google. Right-click your project in the left sidebar and choose Import Collections.
Click on the Data File field and select your JSON or CSV file.
That’s it! Click the Import button.
Your CSV rows are imported into Firestore and a progress popup opens.

Related

How correctly use ConnectionPool for mssql?

How version ConnectionPool for mssql is better and why? I try understand,
both work. I want a correct connection, I don't want create redundant connections.
All the examples I found use .then and version A, maybe is another better option?
Should I close connection?
version A
// db.js
import sql from "mssql"
const config = {...}
export const mssqlPool = new sql.ConnectionPool(config).connect()
// n endpoint.js
import { mssqlPool } from "../lib/mssql/db"
try {
const pool = await mssqlPool
const result = await pool.request().query(`SELECT * FROM table`)
console.log(result)
} catch (err) {
console.log('error')
}
version B
// db.js
import sql from "mssql"
const config = {...}
export const mssqlPool = new sql.ConnectionPool(config)
// n endpoint.js
import { mssqlPool } from "../lib/mssql/db"
try {
const pool = await mssqlPool.connect();
const result = await pool.request().query(`SELECT * FROM table`)
console.log(result)
} catch (err) {
console.log('error')
}

Get failed rows when using Mongoose Model.insertMany

We are trying to import a csv with around 30.000 lines of data into a MongoDB database using Mongoose. We have created a model with some validators so only correct rows will be added to the database.
First we read the csv with papaparse and create an array out of it. Next we insert the data into the database with the mongoose Model.insertMany method. To continu inserting data when a row fails we use the option ordered: false.
This all works but we are looking for a way to collect the failed rows so we can check why a row didn't get passed the validation.
Is there a way to get the failed rows with the insertMany method?
...
const readCSV = async (filePath) => {
const csvFile = fs.readFileSync(filePath);
const csvData = csvFile.toString();
return new Promise(resolve => {
Papa.parse(csvData, {
header: true,
transformHeader: header => header.trim(),
complete: results => {
console.log('Complete', results.data.length, 'records.');
resolve(results.data);
}
});
});
};
const start = async () => {
try {
await connectDB(process.env.DATABASE_URL);
const parsedData = await readCSV(csvFilePath);
const response = await Company.insertMany(parsedData, { ordered: false });
console.log(response);
} catch(error) {
console.log(error);
}
}
start();

copy folder in node using cp feature

I am trying to copy a folder and all of it's content using node.js cp feature as follows
fs.cp('D:\\Developer\\insomniac-beta\\template', dir_path, {recursive: true});
however its throwing me this error
node:internal/validators:232
throw new ERR_INVALID_ARG_TYPE(name, 'Function', value);
^
TypeError [ERR_INVALID_ARG_TYPE]: The "cb" argument must be of type function. Received undefined
at makeCallback (node:fs:191:3)
at Object.cp (node:fs:2848:14)
at D:\Developer\igbot\generate_config.js:30:13
at FSReqCallback.oncomplete (node:fs:193:23) {
code: 'ERR_INVALID_ARG_TYPE'
}
how is this possible ? i do not have any calls to cb ?
If you dont want to use asynchronous copy with callback you can use synchronous version.
fs.cpSync(sourceDir, destDir, {recursive: true});
You are missing one argument. As mentioned in the documentation, fs.cp is an asynchronous function that takes in a callback function
the final arguement needs to be a callback function
fs.cp('D:\\Developer\\insomniac-beta\\template', dir_path, (err)=>{
// handle error
})
It seems like you're using the promises API, but you didn't show how you import the module. Here's an example with the current Node LTS (v16.x):
Ref: fsPromises.cp(src, dest[, options])
import {promises as fs} from 'fs';
// ...
await fs.cp(sourceDir, destDir, {recursive: true});
Here's a full, self-contained example which creates a sample dir structure, copies it, verifies the copy, and cleans up the sample data:
example.mjs:
import * as path from 'path';
import {constants as fsConstants, promises as fs} from 'fs';
import {fileURLToPath} from 'url';
import {ok as assert} from 'assert/strict';
// Create sample folder structure, return relative file paths
async function createSampleFiles (rootDir) {
const writeFileOpts = {encoding: 'utf8'};
const filePaths = [];
await fs.mkdir(rootDir, {recursive: true});
let fPath = 'hello.txt';
filePaths.push(fPath);
fPath = path.join(rootDir, fPath);
let text = 'hello world\n';
await fs.writeFile(fPath, text, writeFileOpts);
let dir = 'more';
await fs.mkdir(path.join(rootDir, dir), {recursive: true});
fPath = path.join(dir, 'wow.txt');
filePaths.push(fPath);
fPath = path.join(rootDir, fPath);
text = 'wow\n';
await fs.writeFile(fPath, text, writeFileOpts);
return filePaths;
}
async function fsEntryExists (filePath) {
try {
await fs.access(filePath, fsConstants.F_OK);
return true;
}
catch (ex) {
if (ex instanceof Error && ex.code === 'ENOENT') return false;
throw ex;
}
}
async function assertFSEntryExists (filePath) {
assert(await fsEntryExists(filePath), `FS entry not found for "${filePath}"`);
}
async function main () {
const moduleDir = path.dirname(fileURLToPath(import.meta.url));
const sourceDir = path.join(moduleDir, 'data');
const destDir = path.join(moduleDir, 'data-copy');
const relativePaths = await createSampleFiles(sourceDir);
await fs.cp(sourceDir, destDir, {recursive: true});
let exitCode = 0;
try {
const filePaths = relativePaths.map(fPath => path.join(destDir, fPath));
for (const fPath of filePaths) await assertFSEntryExists(fPath);
console.log('Copy successful');
}
catch {
console.error('Copy failed');
exitCode = 1;
}
finally {
// Cleanup
for (const dir of [sourceDir, destDir]) {
if (await fsEntryExists(dir)) await fs.rm(dir, {recursive: true});
}
process.exit(exitCode);
}
}
main();
$ node --version
v16.15.0
$ node example.mjs
Copy successful

Getting some problem to read the CSV file inside the firebase functions

I am trying to read the csv file inside the Firebase functions so that i can send the mail to the all the records. I am planning to go with the following procedure
upload the csv
fire a on finalize function
read the file and send emails
Below is the function
import * as functions from "firebase-functions";
import * as mkdirp from "mkdirp-promise";
import * as os from "os";
import * as path from "path";
import csv = require('csvtojson');
const gcs = require('#google-cloud/storage')({ keyFilename: 'service-account-credentials.json' });
const csvDirectory = "csv";
export = functions.storage.object().onFinalize(async (object) => {
const filePath = object.name;
const contentType = object.contentType;
const fileDir = path.dirname(filePath);
if(fileDir.startsWith(csvDirectory) && contentType.startsWith("text/csv")) {
const bucket = gcs.bucket(object.bucket);
const file = bucket.file(filePath);
const fileName = path.basename(filePath);
const tempLocalFile = path.join(os.tmpdir(), filePath);
const tempLocalDir = path.dirname(tempLocalFile);
console.log("values", bucket, file, fileName, tempLocalDir, tempLocalFile);
console.log("csv file uploadedeeeed");
await mkdirp(tempLocalDir);
await bucket.file(filePath).download({
destination: tempLocalFile
});
console.log('The file has been downloaded to', tempLocalFile);
csv()
.fromFile(tempLocalFile)
.then((jsonObj) => {
console.log(jsonObj);
})
}
});
While running the code i am only getting csv file uploadeded which i have written inside the console.log and then i get the timeout after 1 minute .i am also not getting the The file has been downloaded to log . Can anybody look at the code and help me to get out of this.
You are mixing up the use of async/await together with a call to then() method. You should also use await for the fromFile() method.
The following should do the trick (untested):
export = functions.storage.object().onFinalize(async (object) => {
const filePath = object.name;
const contentType = object.contentType;
const fileDir = path.dirname(filePath);
try {
if (fileDir.startsWith(csvDirectory) && contentType.startsWith("text/csv")) {
//.....
await mkdirp(tempLocalDir);
await bucket.file(filePath).download({
destination: tempLocalFile
});
console.log('The file has been downloaded to', tempLocalFile);
const jsonObj = await csv().fromFile(tempLocalFile);
console.log(jsonObj);
return null;
} else {
//E.g. throw an error
}
} catch (error) {
//.....
}
});
Also note that (independently of the mixed use of async/await and then()), with the following line in your code
csv().fromFile(tempLocalFile).then(...)
you were not returning the Promise returned by the fromFile() method. This is a key point in Cloud Functions.
I would suggest you watch the official Video Series on Cloud Functions (https://firebase.google.com/docs/functions/video-series/) and in particular the videos on Promises titled "Learn JavaScript Promises".

firestore upload a file : Unsupported field value: a custom File object

I am trying to upload a file in the firestore and the following code is working:
export const ticketEdit = values => {
return async (dispatch, getState, { getFirebase, getFirestore }) => {
const firestore = getFirestore();
// delete file
const file = values.file && values.file;
delete values.file;
try {
// uploadFile
if (file) await dispatch(ticketUploadFile(file, ticketId));
// notification
toastr.success(i18n.t("Success", i18n.t("The ticket is updated")));
} catch (e) {
console.log(e);
}
};
};
However, If I will put the lines that I am deleting the file
// delete file
const file = values.file && values.file;
delete values.file;
In the try catch statement like this :
export const ticketEdit = values => {
return async (dispatch, getState, { getFirebase, getFirestore }) => {
const firestore = getFirestore();
try {
// delete file
const file = values.file && values.file;
delete values.file;
if (file) await dispatch(ticketUploadFile(file, ticketId));
// notification
toastr.success(i18n.t("Success", i18n.t("The ticket is updated")));
} catch (e) {
console.log(e);
}
};
};
I get the error
Unsupported field value: a custom File object.
and I would like to know why. Thank you!
The problem was not the try catch statement but that I was trying to update the values before deleting the file.
Lesson learned: delete the files before updating the document.

Categories

Resources