I have some code that when you console.log it, it looks like the image below:
The code I am running is as follows:
onClick={() => {
const stream = fetch(
'https://lichess.org/api/games/user/neio',
{ headers: { Accept: 'application/x-ndjson' } }
);
const onMessage = obj => {
console.log('test', obj);
};
const onComplete = () =>
console.log('The stream has completed');
stream.then(readStream(onMessage)).then(onComplete);
}}
export const readStream = processLine => response => {
const stream = response.body.getReader();
const matcher = /\r?\n/;
const decoder = new TextDecoder();
let buf = '';
const loop = () =>
stream.read().then(({ done, value }) => {
if (done) {
if (buf.length > 0) processLine(JSON.parse(buf));
} else {
const chunk = decoder.decode(value, {
stream: true,
});
buf += chunk;
const parts = buf.split(matcher);
buf = parts.pop();
for (const i of parts) processLine(JSON.parse(i));
return loop();
}
});
return loop();
};
export default readStream;
What I am trying to do is build a parent object that contains all these individual rows of data.
I'm new at promises and fetch etc. So currently, I have no idea on how to build this parent object that contains each individual row.
Any suggestions?
Can't you have a global array and add items to it like:
var arrCollection = [];
...
const onMessage = obj => {
arrCollection.push(obj);
};
You can have an object with those items doing like:
var objCollection = { items: arrCollection };
Related
I have the following function
const extractTestCases = () => {
const filterFile = files.filter(test => test.includes('.test.ts'))
filterFile.forEach(testsuite => {
const testSuites = fs.readFileSync(testsuite, { encoding: "utf8" });
testCases.push(regexMatcher(testSuites, TestRegex, 1))
})
return testCases;
}
filterFile is an array for multiple files where I'm making forEach loop to extract some information from each file, what I want to do is to return an array of objects like this
[{"name of the file (testSuite)":["extracted value from this file regexMatcher(testSuites, TestRegex, 1)"]},{"testSuite2":["","",""]},...]
Try something like this :
const extractTestCases = () => {
const filesArray = []
const filterFile = files.filter(test => test.includes('.test.ts'))
filterFile.forEach(testsuite => {
const testSuites = fs.readFileSync(testsuite, { encoding: "utf8" });
testCases.push(regexMatcher(testSuites, TestRegex, 1))
filesArray.push({[testsuite]: regexMatcher(testSuites, TestRegex, 1)})
})
return filesArray;
}
I download data from API in chunks decrypt it and than pass to ReadableStream.
But after last chunk, the file is not downloaded.
I work with axios and StreamSaver.js
Code:
Above in the code I declare:
this.filestream = streamSaver.createWriteStream('sample.jpg');
this.writer = await this.filestream.getWriter();
let readableStream;
readableStream = new ReadableStream({
start(ctrl) {
const nextChunk = async () => {
let fileDataResponse = await that.$api.post(
'endpoint', {
file_id: UUID,
chunk_index: index
}, {
headers: {
...
}
}
);
done =
fileDataResponse.data.length <=
fileDataResponse.data.current_index;
if (fileDataResponse.data.data) {
let data = await that.decryptData(fileDataResponse.data.data);
ctrl.enqueue(data);
}
if (!done) {
index += 1;
nextChunk();
} else {
ctrl.close();
}
};
nextChunk();
}
});
const reader = readableStream.getReader();
const close = () => {
that.writer.close();
};
const pump = () =>
reader.read().then((res) => {
if (!res.done) {
that.writer.write(res.value).then(pump);
} else {
close();
}
});
pump();
Where could be my error here?
Thank you a lot!
Issue was the res.value is not an Int8Array
Using the IndexedDB API we have these 2 methods: getAll() and getAllKeys() with an usage example below:
let transaction = this.db.transaction(["table"]);
let object_store = transaction.objectStore("table");
request = object_store.getAll(); /* or getAllKeys() */
request.onerror = (event) => {
console.err("error fetching data");
};
request.onsuccess = (event) => {
console.log(request.result);
};
The problem is getAll() seems to retrieve only the data in an array format, and getAllKeys() gets all the keys without the data. I could not find a method to get both keys and values.
Isn't there a better way of getting the data and the keys with one call, like it is stored?
If not, is there a nicer way I could do this without making the code too confusing with multiple asynchronous calls happening?
I was able to retrieve all values with their keys with one callback function using an IDBCursor like this:
transaction = this.db.transaction(["table"]);
object_store = transaction.objectStore("table");
request = object_store.openCursor();
request.onerror = function(event) {
console.err("error fetching data");
};
request.onsuccess = function(event) {
let cursor = event.target.result;
if (cursor) {
let key = cursor.primaryKey;
let value = cursor.value;
console.log(key, value);
cursor.continue();
}
else {
// no more results
}
};
Alternatively you can use getAllKeys, followed by a transaction to fetch the values for each key.
const getAll = (db, store) => new Promise((res, rej) => {
// Fetch keys
const keysTr = db.transaction(store).objectStore(store).getAllKeys()
keysTr.onsuccess = (event) => {
const keys = event.target.result
if (keys?.length) {
// Start a new transaction for final result
const valuesTr = db.transaction(store)
const objStore = valuesTr.objectStore(store)
const result = [] // { key, value }[]
// Iterate over keys
keys.forEach(key => {
const tr = objStore.get(key)
tr.onsuccess = e => {
result.push({
key,
value: e.target.result
})
}
})
// Resolve `getAll` with final { key, value }[] result
valuesTr.oncomplete = (event) => {
res(result)
}
valuesTr.onerror = (event) => {
rej(event)
}
}
else
res([])
}
keysTr.onerror = (event) => {
rej(event)
}
})
I have 9577 unique records in a csv file.
This code inserts 9800 records and insert not all records, but duplicates of some of them. Any idea why it does not inserts the unique 9577 records and also duplicates of some of them? Below I also insert the remain part of the code so you get the whole picture
function bulkImportToMongo(arrayToImport, mongooseModel) {
const Model = require(`../../../models/${mongooseModel}`);
let batchCount = Math.ceil(arrayToImport.length / 100);
console.log(arrayToImport.length);
let ops = [];
for (let i = 0; i < batchCount; i++) {
// console.log(i);
let batch = arrayToImport.slice(i, i + 100);
console.log(batch.length);
ops.push(Model.insertMany(batch));
}
return ops;
return Promise.all(ops).then(results => {
// results is an array of results for each batch
console.log("results: ", results);
});
}
I parse the csv file
const Promise = require("bluebird");
const csv = require("fast-csv");
const path = require("path");
const fs = Promise.promisifyAll(require("fs"));
const promiseCSV = Promise.method((filePath, options) => {
return new Promise((resolve, reject) => {
var records = [];
csv
.fromPath(filePath, options)
.on("data", record => {
records.push(record);
})
.on("end", () => {
// console.log(records);
resolve(records);
});
});
});
And here is the script that connects it all together:
const path = require("path");
const promiseCSV = require("./helpers/ImportCSVFiles");
const {
connectToMongo,
bulkImportToMongo
} = require("./helpers/mongoOperations");
const filePath = path.join(__dirname, "../../data/parts.csv");
const options = {
delimiter: ";",
noheader: true,
headers: [
"facility",
"partNumber",
"partName",
"partDescription",
"netWeight",
"customsTariff"
]
};
connectToMongo("autoMDM");
promiseCSV(filePath, options).then(records => {
bulkImportToMongo(records, "parts.js");
});
//It looks like your issue is simply i++. Perhaps you meant i += 100?
for (let i = 0; i < batchCount; i+=100 /* NOT i++ */) {
//...
}
I solved it.
I hope this helps other... :-)
I had two errors, in the function promiseCSV (changed to parseCSV) and second I had bad logic in bulkImportToMongo.
Complete solution:
I parsed and imported 602.198 objects and here is how long time it took using node --max_old_space_size=8000 on a MacBook Pro with 8gb of ram.
console
➜ database git:(master) ✗ node --max_old_space_size=8000 partImport.js
Connected to db!
Time to parse file: : 5209.325ms
Disconnected from db!
Time to import parsed objects to db: : 153606.545ms
➜ database git:(master) ✗
parseCSV.js
const csv = require("fast-csv");
function promiseCSV(filePath, options) {
return new Promise((resolve, reject) => {
console.time("Time to parse file");
var records = [];
csv
.fromPath(filePath, options)
.on("data", record => {
records.push(record);
})
.on("end", () => {
console.timeEnd("Time to parse file");
resolve(records);
});
});
}
module.exports = promiseCSV;
mongodb.js
const mongoose = require("mongoose");
mongoose.Promise = global.Promise;
function connectToMongo(databaseName) {
mongoose.connect(`mongodb://localhost:27017/${databaseName}`, {
keepAlive: true,
reconnectTries: Number.MAX_VALUE,
useMongoClient: true
});
console.log("Connected to db!");
}
function disconnectFromMongo() {
mongoose.disconnect();
console.log("Disconnected from db!");
}
function bulkImportToMongo(arrayToImport, mongooseModel) {
const Model = require(`../../../models/${mongooseModel}`);
const batchSize = 100;
let batchCount = Math.ceil(arrayToImport.length / batchSize);
let recordsLeft = arrayToImport.length;
let ops = [];
let counter = 0;
for (let i = 0; i < batchCount; i++) {
let batch = arrayToImport.slice(counter, counter + batchSize);
counter += batchSize;
ops.push(Model.insertMany(batch));
}
return Promise.all(ops);
}
module.exports.bulkImportToMongo = bulkImportToMongo;
module.exports.connectToMongo = connectToMongo;
module.exports.disconnectFromMongo = disconnectFromMongo;
partImport.js
const path = require("path");
const parseCSV = require("./helpers/parseCSV");
const {
connectToMongo,
disconnectFromMongo,
bulkImportToMongo
} = require("./helpers/mongodb");
const filePath = path.join(__dirname, "../../data/parts.csv");
const options = {
delimiter: ";",
noheader: true,
headers: [
"facility",
"partNumber",
"partName",
"partDescription",
"netWeight",
"customsTariff"
]
};
connectToMongo("autoMDM");
parseCSV(filePath, options)
.then(records => {
console.time("Time to import parsed objects to db");
return bulkImportToMongo(records, "parts.js");
})
/* .then(result =>
console.log("Total batches inserted: ", result, result.length)
) */
.then(() => {
disconnectFromMongo();
console.timeEnd("Time to import parsed objects to db");
})
.catch(error => console.log(error));
I'm trying to figure out a more efficient away to create the list items in the DOM.
At the moment the list is created as each API request is made.
I'm pushing each object into its own Array, I would like to create the list once all the data has loaded.
Additionally i'm using Webpack and Babel.
let streamApi = 'https://wind-bow.glitch.me/twitch-api/streams/';
let twitchUsers = ['ESL_SC2', 'OgamingSC2', 'freecodecamp', 'noobs2ninjas', 'comster404'];
let streamByUser = [];
window.onload = function() {
//Make a API request for each user and store in an array
twitchUsers.map((user) => {
fetch(streamApi + user, {method: 'GET'})
.then(response => response.json())
.then(json => {
streamByUser.push(json);
let uL = document.getElementById("user-list");
let listItem = document.createElement("li");
listItem.className = "list-group-item";
if (json.stream === null) {
listItem.innerHTML = "null";
} else {
listItem.innerHTML = json.stream.channel.display_name;
}
uL.appendChild(listItem);
});
});
};
UPDATE:
All is working!
Not tested but I hope it should work as expected.
const streamApi = "https://wind-bow.glitch.me/twitch-api/streams/";
const twitchUsers = [
"ESL_SC2",
"OgamingSC2",
"freecodecamp",
"noobs2ninjas",
"comster404"
];
const twitchUsersStreams = twitchUsers.map(user =>
fetch(streamApi + user, { method: "GET" }).then(res => res.json())
);
let streamByUser = [];
window.onload = function() {
Promise
.all(twitchUsersStreams)
.then(everythingArray => {
//do something with everythingArray after all the requests resolved
})
.catch(err => {
// As soon as any of the 'fetch' results in promise rejection
});
};
I would probably do something like this because I really like to decompose a task into small functions that reduce the need for inline comments and keep mutable state to a minimum.
const streamApi = 'https://wind-bow.glitch.me/twitch-api/streams/';
const twitchUsers = ['ESL_SC2', 'OgamingSC2', 'freecodecamp', 'noobs2ninjas', 'comster404'];
window.onload = async function () {
const list = document.getElementById("user-list");
const addToList = list.appendChild.bind(list);
const twitchStreams = await fetchUsers(twitchUsers);
twitchStreams.map(toListItem).forEach(addToList);
};
async function fetchUser(user) {
const response = await fetch(`${streamApi}${user}`, {method: 'GET'});
return response.json();
}
function fetchUsers(users) {
return Promise.all(users.map(fetchUser));
}
function toListItem(user) {
const listItem = document.createElement("li");
listItem.className = "list-group-item";
listItem.innerHTML = user.stream !== null
? user.stream.channel.display_name
: "null";
return listItem;
}