Create a time counter to insert in array - javascript

I have a problem with creating a time in milliseconds to add to an array.
I have an app that reads from two external devices. So I need to insert a time about when these data are read.
I have a function like this:
async setupNotifications1(device) {
const service = this.serviceGeneral();
device.monitorCharacteristicForService(
service,
this.AccGyrMg,
(error, characteristic) => {
if (error) {
this.error(error.message);
return;
}
const buf = Buffer.from(characteristic.value, "base64");
const [...acc_dx] = [2, 4, 6].map(index => buf.readInt16LE(index));
this.setState(state => ({
acc_dx,
array_acc_dx: [
...state.array_acc_dx,
[acc_dx],
]
}));
/* pressure */
device.monitorCharacteristicForService(
service,
this.Pressure,
(error, characteristic) => {
if (error) {
this.error(error.message);
return;
}
const buf = Buffer.from(characteristic.value, "base64");
const [...pressure_dx] = [0, 2, 4, 6, 8].map(index => buf.readUInt16LE(index));
this.setState(state => ({
pressure_dx,
array_pressure_dx: [
...state.array_pressure_dx,
[pressure_dx]
]
}));
}
);
How can I create a time to concatenate at [...acc_dx] and [pressure_dx] ? Thank you

You could look into using setTimeout() for doing this.
https://facebook.github.io/react-native/docs/timers
So in your case you could have something like:
setTimeout(() => {
addToArray();
}, 300);

Related

Update Google sheet Every 1hr

How can I make this script run and update the google sheet every 1hr. I want new data to append at the bottom not to overwrite the existing data and is there any possibility it will delete the duplicate data automatically on the basis of Column C, deleting old values and leaves new.
function youTubeSearchResults() {
// 1. Retrieve values from column "A".
const sheet = SpreadsheetApp.getActiveSpreadsheet().getActiveSheet();
const values = sheet.getRange("A2:A" + sheet.getLastRow()).getDisplayValues().filter(([a]) => a);
// 2. Retrieve your current values.
const modifyResults = values.flatMap(([keywords]) => {
const searchResults = JSON.parse(UrlFetchApp.fetch(`https://yt.lemnoslife.com/noKey/search?part=snippet&q=${keywords}&maxResults=10&type=video&order=viewCount&videoDuration=short&publishedAfter=2022-09-27T00:00:00Z`).getContentText());
const fSearchResults = searchResults.items.filter(function (sr) { return sr.id.kind === "youtube#video" });
return fSearchResults.map(function (sr) { return [keywords, sr.id.videoId, `https://www.youtube.com/watch?v=${sr.id.videoId}`, sr.snippet.title, sr.snippet.publishedAt, sr.snippet.channelTitle, sr.snippet.channelId, `https://www.youtube.com/channel/${sr.snippet.channelId}`, sr.snippet.thumbnails.high.url] });
});
// 3. Retrieve viewCounts and subscriberCounts.
const { videoIds, channelIds } = modifyResults.reduce((o, r) => {
o.videoIds.push(r[1]);
o.channelIds.push(r[6]);
return o;
}, { videoIds: [], channelIds: [] });
const limit = 50;
const { viewCounts, subscriberCounts } = [...Array(Math.ceil(videoIds.length / limit))].reduce((obj, _) => {
const vIds = videoIds.splice(0, limit);
const cIds = channelIds.splice(0, limit);
const res1 = YouTube.Videos.list(["statistics"], { id: vIds, maxResults: limit }).items.map(({ statistics: { viewCount } }) => viewCount);
const obj2 = YouTube.Channels.list(["statistics"], { id: cIds, maxResults: limit }).items.reduce((o, { id, statistics: { subscriberCount } }) => (o[id] = subscriberCount, o), {});
const res2 = cIds.map(e => obj2[e] || null);
obj.viewCounts = [...obj.viewCounts, ...res1];
obj.subscriberCounts = [...obj.subscriberCounts, ...res2];
return obj;
}, { viewCounts: [], subscriberCounts: [] });
const ar = [viewCounts, subscriberCounts];
const rr = ar[0].map((_, c) => ar.map(r => r[c]));
// 4. Merge data.
const res = modifyResults.map((r, i) => [...r, ...rr[i]]);
// 5. Put values on Spreadsheet.
sheet.getRange(2, 2, res.length, res[0].length).setValues(res);
}
Hourly Trigger youTubeSearchResults
function hourlytrigger() {
if(ScriptApp.getProjectTriggers().filter(t => t.getHandlerFunction() == "youTubeSearchResults").length==0) {
ScriptApp.newTrigger("youTubeSearchResults").timeBased().everyHours(1).create();
}
}
To append data:
sheet.getRange(sheet.getLastRow() + 1, 2, res.length, res[0].length).setValues(res);

Firestore pagination backwards goes to first record always

I am making a simple Firestore pagination.
When it goes forward works as expected, but when it should go backwards it always goes to the first record.
I have been stuck here for a while, even crammed both (forward and backward) in the same function to ease the debugging.
Edit:
Added simplified version (still the same undesired results):
async init() {
const result = await (await firestore())
.collection('documents')
.limit(this.limit)
.orderBy('createTimestamp')
// .orderBy('userName')
.get()
console.log(result)
this.lastVisible = result.docs[result.docs.length - 1]
;[this.firstVisible] = result.docs
result.forEach(doc => {
console.log('doc', doc.data())
this.myDocuments.push(doc)
})
},
async forward() {
const result = await (await firestore())
.collection('documents')
.orderBy('createTimestamp')
// .orderBy('userName')
.startAfter(this.lastVisible)
.limit(this.limit)
.get()
this.lastVisible = result.docs[result.docs.length - 1]
;[this.firstVisible] = result.docs
if (result.empty === false)
result.forEach(doc => {
console.log('doc', doc.data())
this.myDocuments.push(doc)
})
},
async backward() {
const result = await (await firestore())
.collection('documents')
.orderBy('createTimestamp')
// .orderBy('userName')
.endBefore(this.lastVisible)
.limit(this.limit)
.get()
this.lastVisible = result.docs[result.docs.length - 1]
;[this.firstVisible] = result.docs
if (result.empty === false)
result.forEach(doc => {
console.log('doc', doc.data())
this.myDocuments.push(doc)
})
},
Added the calling method:
getAllDocuments: async ({ rootState, commit }, payload) => {
const documentsDb = new DocumentsDB(`${rootState.authentication.user.id}`)
console.log('Get all documents(admin)')
console.log('payload :>> ', payload)
const { startAt, endBefore, constraints, limit } = payload
console.log('startAt :>> ', startAt)
console.log('endBefore :>> ', endBefore)
console.log('limit :>> ', limit)
const documents = await documentsDb.readWithPagination(constraints, startAt, endBefore, limit)
console.log('documents', documents)
// const documents = await documentsDb.readAllAsAdmin()
// console.log('documents: ', documents)
commit('setDocuments', documents) },
I left the console.log() for debuging reasons.
payload :>> {constraints: null, endBefore: "2UxB7Z1HWCmvkEcfLZ5H", startAt: null, limit: 5}constraints: nullendBefore: "2UxB7Z1HWCmvkEcfLZ5H"limit: 5startAt: null__proto__: Object
Thhe original code
async readWithPagination(constraints = null, startAt = null, endBefore = null, limit = null) {
const collectionRef = (await firestore()).collection(this.collectionPath)
let query = collectionRef
if (limit) query = query.limit(limit)
query.orderBy('createTimestamp')
if (constraints) {
constraints.forEach(constraint => {
query = query.where(...constraint)
})
}
query = query.orderBy(firebase.firestore.FieldPath.documentId())
if (startAt) {
query = query.startAfter(startAt)
}
if (endBefore) {
query = query.endBefore(endBefore)
console.log('query :>> ', query)
}
const formatResult = result =>
result.docs.map(ref =>
this.convertObjectTimestampPropertiesToDate({
id: ref.id,
...ref.data(),
})
)
return query.get().then(formatResult) }
Edit as requested in comments:
dispatchPaginatedForwardDocuments(startAt = null) {
const payload = {}
payload.orderBy = [['createTimestamp', 'asc']]
payload.constraints = [['status', '==', 4]]
payload.limit = this.limit
payload.startAt = startAt?.id || null
this.$store.dispatch('admin/getPaginatedForwardDocuments', payload, { root: true })
},
dispatchPaginatedPrevDocuments() {
const payload = {}
payload.orderBy = [['createTimestamp', 'asc']]
payload.constraints = [['status', '==', 4]]
payload.limit = this.limit
payload.startAt = this.lastDocument.id
payload.endBefore = this.firstDocument.id
this.$store.dispatch('admin/getPaginatedBackwardsDocuments', payload, { root: true })
},
And this:
import UserDocumentsDB from '#/firebase/user-documents-db'
// import UsersDB from '#/firebase/users-db'
import DocumentsDB from '#/firebase/documents-db'
import { storage } from 'firebase'
export default {
// Fetch documents with pagination forwards and constraints
// This works as expected
getPaginatedForwardDocuments: async ({ rootState, commit }, payload) => {
const documentsDb = new DocumentsDB(`${rootState.authentication.user.id}`)
const documents = await documentsDb.readPaginatedForward(payload)
console.log('documents: ', documents)
commit('setDocuments', documents)
},
// Fetch documents with pagination backwards and constraints
getPaginatedBackwardsDocuments: async ({ rootState, commit }, payload) => {
console.log('getPaginatedBackwardsDocuments', payload)
const documentsDb = new DocumentsDB(`${rootState.authentication.user.id}`)
const documents = await documentsDb.readPaginatedBackwards(payload)
console.log('documents: ', documents)
commit('setDocuments', documents)
},

Transform API data and fetching every 60 seconds [react]

Hi I'm currently having problems with formatting data I'm receiving from an API
Should I use reduce instead of forEach?
Here is the desired output, an array of objects with just the titles and prices:
[
{
title: "Meter",
price:"9.99"
},
{
title: "Plan One",
price:"11.99"
}
]
Here is how I am getting the data, setting it, refetching it every 60 seconds:
const [products, setProducts] = useState([]);
const formatProduct = (products) =>{
let result = [];
products.forEach((product)=>{
if(product.variants['price'] !== "0.00"){
result.push({product["title"],product.variants[0]["price"]})
}
})
return result
}
useEffect(() => {
const fetchData = async () => {
const axiosResponse = await axios(`https://www.today/products.json`);
setProducts(formatProduct(axiosResponse.data.products));
};
const reFetchData = setInterval(() => setProducts(fetchData()), 60000);
return () => {
clearInterval(reFetchData);
};
}, []);
Adding object to the result array you have to specify the name of props. Also you've missed index of variants when you compare a price:
const formatProduct = (products) =>{
let result = [];
products.forEach((product)=>{
if(product.variants[0]['price'] !== "0.00"){ //index 0
result.push({
title: product["title"], // specify the prop name
price: product.variants[0]["price"]
})
}
})
return result
}
There is another problem in your code. What happens if variant doesn't contain any item? I would check array length and use filter and map functions:
const formatProduct = (products) => {
return products
.filter(p => p.variants.lenght>0 && p.variants[0]!=="0.00")
.map(p => {title: p.title, price: p.variants[0].price});
}
Another problem is how you call setProducts(fetchData()) in the interval function. You shouldn't call setProducts() because you're already calling it in fetchData(). So it has to be:
const reFetchData = setInterval(() => fetchData(), 60000);

Unhandled Promise Rejection in React

I have an app that read data from external devices. These data are like acceleration, gyroscopic, magnetometer and pressure.
I'm trying to read the data in this way:
async setupNotifications2(device) {
let i = 0
const service = this.serviceGeneral();
while(i<10 ) {
i++
const promises = [
device.readCharacteristicForService(service, this.AccGyrMg),
device.readCharacteristicForService(service, this.Pressure)
]
Promise.all(promises)
.then((values) => {
// const time = new Date().getMilliseconds()
const time = bufAccGyrMg.readInt16LE(0);
const [...acc_sx] = [0,2, 4, 6].map(index => bufAccGyrMg.readInt16LE(index));
this.setState(state => ({acc_sx,array_acc_sx: [...state.array_acc_sx,[time , acc_sx]]}));
const [...gyr_sx] = [8, 10, 12].map(index => bufAccGyrMg.readInt16LE(index));
this.setState(state => ({gyr_sx,array_gyr_sx: [...state.array_gyr_sx,[time , gyr_sx]]}));
const [...mg_sx] = [14,16,18].map(index => bufAccGyrMg.readInt16LE(index));
this.setState(state => ({gyr_sx,array_mg_sx: [...state.array_mg_sx,[time , mg_sx]]}));
const bufPressure = Buffer.from(values[1].value, "base64");
const [...pressure_sx] = [0, 2, 4, 6, 8].map(index => bufPressure.readUInt16LE(index));
this.setState(state => ({pressure_sx,array_pressure_sx: [...state.array_pressure_sx,[time, pressure_sx]]}));
})
}
}
Now I have insert a condition inside a while only to try the code.
When I start the app, I receive this error:
YellowBox.js:67 Possible Unhandled Promise Rejection (id: 0):
BleError: Operation was rejected
How can I do, in your opinion?? Thank you.
I refactored your code a bit to help get rid of the Unhandled Promise Rejection error and help you point down the issue:
async setupNotifications2(device) {
//first of all, put everything inside a big try/catch block
try {
let i = 0
const service = this.serviceGeneral();
while(i<10 ) {
i++
const promises = [
// then make sure every promise passed to Promise.all() catches its own errors
device.readCharacteristicForService(service, this.AccGyrMg).catch( e => console.log(`err in first readCharacteristicForService `, e)),
device.readCharacteristicForService(service, this.Pressure).catch( e => console.log(`err in second readCharacteristicForService `, e))
]
// giben you're in an async function, you can do this to simplify a bit your code:
const values = await Promise.all(promises);
// const time = new Date().getMilliseconds()
const time = bufAccGyrMg.readInt16LE(0);
// this is an array, no need to overcomplicate with destructuring assignment... you can do the same below
const acc_sx = [0,2, 4, 6].map(index => bufAccGyrMg.readInt16LE(index));
this.setState(state => ({acc_sx,array_acc_sx: [...state.array_acc_sx,[time , acc_sx]]}));
const [...gyr_sx] = [8, 10, 12].map(index => bufAccGyrMg.readInt16LE(index));
this.setState(state => ({gyr_sx,array_gyr_sx: [...state.array_gyr_sx,[time , gyr_sx]]}));
const [...mg_sx] = [14,16,18].map(index => bufAccGyrMg.readInt16LE(index));
this.setState(state => ({gyr_sx,array_mg_sx: [...state.array_mg_sx,[time , mg_sx]]}));
const bufPressure = Buffer.from(values[1].value, "base64");
const [...pressure_sx] = [0, 2, 4, 6, 8].map(index => bufPressure.readUInt16LE(index));
this.setState(state => ({pressure_sx,array_pressure_sx: [...state.array_pressure_sx,[time, pressure_sx]]}));
} catch (err){
console.error(`general error: `, err)
}
}

rxjs subscribing late results to empty stream

I have the following piece of code. As is, with a couple of lines commented out, it works as expected. I subscribe to a stream, do some processing and stream the data to the client. However, if I uncomment the comments, my stream is always empty, i.e. count in getEntryQueryStream is always 0. I suspect it has to do with the fact that I subscribe late to the stream and thus miss all the values.
// a wrapper of the mongodb driver => returns rxjs streams
import * as imongo from 'imongo';
import * as Rx from 'rx';
import * as _ from 'lodash';
import {elasticClient} from '../helpers/elasticClient';
const {ObjectId} = imongo;
function searchElastic({query, sort}, limit) {
const body = {
size: 1,
query,
_source: { excludes: ['logbookType', 'editable', 'availabilityTag'] },
sort
};
// keep the search results "scrollable" for 30 secs
const scroll = '30s';
let count = 0;
return Rx.Observable
.fromPromise(elasticClient.search({ index: 'data', body, scroll }))
.concatMap(({_scroll_id, hits: {hits}}) => {
const subject = new Rx.Subject();
// subject needs to be subscribed to before adding new values
// and therefore completing the stream => execute in next tick
setImmediate(() => {
if(hits.length) {
// initial data
subject.onNext(hits[0]._source);
// code that breaks
//if(limit && ++count === limit) {
//subject.onCompleted();
//return;
//}
const handleDoc = (err, res) => {
if(err) {
subject.onError(err);
return;
}
const {_scroll_id, hits: {hits}} = res;
if(!hits.length) {
subject.onCompleted();
} else {
subject.onNext(hits[0]._source);
// code that breaks
//if(limit && ++count === limit) {
//subject.onCompleted();
//return;
//}
setImmediate(() =>
elasticClient.scroll({scroll, scrollId: _scroll_id},
handleDoc));
}
};
setImmediate(() =>
elasticClient.scroll({scroll, scrollId: _scroll_id},
handleDoc));
} else {
subject.onCompleted();
}
});
return subject.asObservable();
});
}
function getElasticQuery(searchString, filter) {
const query = _.cloneDeep(filter);
query.query.filtered.filter.bool.must.push({
query: {
query_string: {
query: searchString
}
}
});
return _.extend({}, query);
}
function fetchAncestors(ancestorIds, ancestors, format) {
return imongo.find('session', 'sparse_data', {
query: { _id: { $in: ancestorIds.map(x => ObjectId(x)) } },
fields: { name: 1, type: 1 }
})
.map(entry => {
entry.id = entry._id.toString();
delete entry._id;
return entry;
})
// we don't care about the results
// but have to wait for stream to finish
.defaultIfEmpty()
.last();
}
function getEntryQueryStream(entriesQuery, query, limit) {
const {parentSearchFilter, filter, format} = query;
return searchElastic(entriesQuery, limit)
.concatMap(entry => {
const ancestors = entry.ancestors || [];
// if no parents => doesn't match
if(!ancestors.length) {
return Rx.Observable.empty();
}
const parentsQuery = getElasticQuery(parentSearchFilter, filter);
parentsQuery.query.filtered.filter.bool.must.push({
terms: {
id: ancestors
}
});
// fetch parent entries
return searchElastic(parentsQuery)
.count()
.concatMap(count => {
// no parents match query
if(!count) {
return Rx.Observable.empty();
}
// fetch all other ancestors that weren't part of the query results
// and are still a string (id)
const restAncestorsToFetch = ancestors.filter(x => _.isString(x));
return fetchAncestors(restAncestorsToFetch, ancestors, format)
.concatMap(() => Rx.Observable.just(entry));
});
});
}
function executeQuery(query, res) {
try {
const stream = getEntryQueryStream(query);
// stream is passed on to another function here where we subscribe to it like:
// stream
// .map(x => whatever(x))
// .subscribe(
// x => res.write(x),
// err => console.error(err),
// () => res.end());
} catch(e) {
logger.error(e);
res.status(500).json(e);
}
}
I don't understand why those few lines of code break everything or how I could fix it.
Your use case is quite complex, you can start off with building up searchElastic method like the pattern bellow.
convert elasticClient.scroll to an observable first
setup the init data for elasticClient..search()
when search is resolved then you should get your scrollid
expand() operator let you recursively execute elasticClientScroll observable
use map to select data you want to return
takeWhile to decide when to complete this stream
The correct result will be once you do searchElastic().subscribe() the stream will emit continuously until there's no more data to fetch.
Hope this structure is correct and can get you started.
function searchElastic({ query, sort }, limit) {
const elasticClientScroll = Observable.fromCallback(elasticClient.scroll)
let obj = {
body: {
size: 1,
query,
_source: { excludes: ['logbookType', 'editable', 'availabilityTag'] },
sort
},
scroll: '30s'
}
return Observable.fromPromise(elasticClient.search({ index: 'data', obj.body, obj.scroll }))
.expand(({ _scroll_id, hits: { hits } }) => {
// guess there are more logic here .....
// to update the scroll id or something
return elasticClientScroll({ scroll: obj.scroll, scrollId: _scroll_id }).map(()=>
//.. select the res you want to return
)
}).takeWhile(res => res.hits.length)
}

Categories

Resources