Image Upload in GraphQL - javascript

how can i handle image uploads in graphql
Through multer using express route to handle upload and query from graphql to view the images and other data
app.use('/graphql', upload);
app.use('/graphql', getData, graphqlHTTP(tokenData => ({
schema,
pretty: true,
tokenData,
graphiql: true,
})));

This is a duplicate of How would you do file uploads in a React-Relay app?
In short, yes you can do a file upload in graphql with react + relay.
You need to write the Relay update store action, for example:
onDrop: function(files) {
files.forEach((file)=> {
Relay.Store.commitUpdate(
new AddImageMutation({
file,
images: this.props.User,
}),
{onSuccess, onFailure}
);
});
},
Then implement a mutation for the Relay store
class AddImageMutation extends Relay.Mutation {
static fragments = {
images: () => Relay.QL`
fragment on User {
id,
}`,
};
getMutation() {
return Relay.QL`mutation{ introduceImage }`;
}
getFiles() {
return {
file: this.props.file,
};
}
getVariables() {
return {
imageName: this.props.file.name,
};
}
getFatQuery() {
return Relay.QL`
fragment on IntroduceImagePayload {
User {
images(first: 30) {
edges {
node {
id,
}
}
}
},
newImageEdge,
}
`;
}
getConfigs() {
return [{
type: 'RANGE_ADD',
parentName: 'User',
parentID: this.props.images.id,
connectionName: 'images',
edgeName: 'newImageEdge',
rangeBehaviors: {
'': 'prepend',
},
}];
}
}
In you server-side schema, preform update
const imageMutation = Relay.mutationWithClientMutationId({
name: 'IntroduceImage',
inputFields: {
imageName: {
type: new GraphQL.GraphQLNonNull(GraphQL.GraphQLString),
},
},
outputFields: {
newImageEdge: {
type: ImageEdge,
resolve: (payload, args, options) => {
const file = options.rootValue.request.file;
//write the image to you disk
return uploadFile(file.buffer, filePath, filename)
.then(() => {
/* Find the offset for new edge*/
return Promise.all(
[(new myImages()).getAll(),
(new myImages()).getById(payload.insertId)])
.spread((allImages, newImage) => {
const newImageStr = JSON.stringify(newImage);
/* If edge is in list return index */
const offset = allImages.reduce((pre, ele, idx) => {
if (JSON.stringify(ele) === newImageStr) {
return idx;
}
return pre;
}, -1);
return {
cursor: offset !== -1 ? Relay.offsetToCursor(offset) : null,
node: newImage,
};
});
});
},
},
User: {
type: UserType,
resolve: () => (new myImages()).getAll(),
},
},
mutateAndGetPayload: (input) => {
//break the names to array.
let imageName = input.imageName.substring(0, input.imageName.lastIndexOf('.'));
const mimeType = input.imageName.substring(input.imageName.lastIndexOf('.'));
//wirte the image to database
return (new myImages())
.add(imageName)
.then(id => {
//prepare to wirte disk
return {
insertId: id,
imgNmae: imageName,
};
});
},
});
All the code above you can find them in this repo https://github.com/bfwg/relay-gallery
There is also a live demo http://fanjin.io

Related

How to get Contentful Rich Text HTML renderer to render bullet points?

I am trying to render the bulletpoints using the Contentful Rich Text HTML renderer, however, the result is the following screenshot, where the bullet point list I want to result becomes [object Object]:
The following is my code:
<script>
import { documentToHtmlString } from '#contentful/rich-text-html-renderer'
import { BLOCKS } from '#contentful/rich-text-types'
const contentful = require('contentful')
const config = {
space: '341l220skrs9',
accessToken: 'a2kEigyr-81a4J8sPyN1-UNU9R9eO3cPQDdKb0GkRWc',
}
const client = contentful.createClient(config)
export default {
async asyncData({ params }) {
const projects = await client.getEntries({
'fields.slug': params.post,
content_type: 'blogPost',
})
const newProjects = projects.items[0].fields
console.log('DDD', projects)
return {
newProjectss: newProjects,
}
},
data() {
return {
options: {
renderNode: {
[BLOCKS.EMBEDDED_ASSET]: ({
data: {
target: { fields },
},
}) =>
`<img src="${fields.file.url}" height="${fields.file.details.image.height}" width="${fields.file.details.image.width}" alt="${fields.description}"/>`,
[BLOCKS.UL_LIST]: (node, children) => <ul>{children}</ul>,
[BLOCKS.OL_LIST]: (node, children) => <ol>{children}</ol>,
[BLOCKS.LIST_ITEM]: (node, children) => <li>{children}</li>,
},
},
}
},
methods: {
documentToHtmlString(text) {
return documentToHtmlString(text.body, this.options)
},
},
}
</script>
And this is what I am trying to render:

How to receive data from preload.js to renderer.js

I'm working on a button click that opens a dialog to get folder root. I'm currently using invoke/handle. I created a promise function to return my data but not sure how to move my data from preload.js to renderer.js
Main.js
ipcMain.handle(HANDLE_FETCH_DATA, (events) => {
dialog.showOpenDialog(win, {
properties: ['openDirectory']
}).then(result => {
if (result.canceled === true) return;
const filePath = result.filePaths[0]
return crawlDirectory(filePath)
})
})
Preload.js
loadDirectory(channel) {
return ipcRenderer.invoke(channel)
}
Renderer.js
async function loadDirectoryFolders() {
// Activate ShowDialog
const data = await app.filesApi.loadDirectory(HANDLE_FETCH_DATA)
console.log(data)
}
CrawlDirectory(filepath)
const crawlDirectory = (dir) => {
return new Promise((resolve, reject) => {
const folderName = path.basename(dir);
// create new model object
const model = {
id: short.generate(),
name: folderName,
children: [],
toggled: false,
active: false,
loading: false,
decorators: {},
animations: []
};
//model['name'] = folderName;
const folders = fs.readdirSync(dir).filter(file => file !== '.DS_Store'); //['Dance', 'Disco', 'Hip Hop', 'House', 'R&B', 'Reggae']
for (let folder in folders) {
let next = path.join(dir, folders[folder]);
let isDirectory = fs.lstatSync(next).isDirectory();
if (isDirectory) {
model['children'].push(crawlDirectory(next))
} else {
model['children'].push({ name: path.basename(next) })
}
}
resolve(model)
})
}

React Native - {"name":"Invariant Violation","framesToPop":1}

I'm trying to implement meilisearch api in React native and it is working fine with my simulator and after I publish the app some of the users cannot see the data returning from meilisearch, the error is
{"name":"Invariant Violation","framesToPop":1}
This is my code
Meilisearch.js
import axios from 'axios';
import { meilisearchConfig } from '../Config';
const MeilisearchApi = async (payload, success, failed) => {
try {
const response = await axios({
method: 'post',
url: `${meilisearchConfig?.host}indexes/activities/search`,
data: payload,
headers: {
'X-Meili-API-Key': meilisearchConfig?.apiKey,
},
});
success?.(response?.data);
} catch (err) {
failed?.(err);
}
};
export default MeilisearchApi;
This is the normalizer for returning data
import moment from 'moment';
import { IActivity, IByDateGroupFilter } from 'reducers/types';
export const activityNormalizer = (state, { hits, offset }) => {
const {
melisearchActivityData: { byDate, dates, all },
} = state;
const isRefreshing = offset === 0;
const newAll = isRefreshing ? hits : [...all, ...hits];
const datesNew: string[] = isRefreshing ? [] : dates;
const byDateNew: any = isRefreshing ? {} : byDate;
const byDateGroup: IByDateGroupFilter[] = [];
hits.forEach((activity: IActivity) => {
const date = getFormattedDate(activity.created_at);
if (byDateNew[date]) byDateNew[date].push({ ...activity });
else {
byDateNew[date] = [{ ...activity }];
datesNew.push(date);
}
});
Object.keys(byDateNew).forEach((key) => {
byDateGroup.push({
title: key,
data: byDateNew[key],
});
});
return {
dates: datesNew,
byDate: byDateNew,
byDateGroup,
all: newAll,
};
};
This is how i call my Meilisearch API method
MeilisearchApi(
{
q: search,
filters: filters,
offset: newOffset,
limit: PAGE_SIZE,
},
({ hits }: { hits: any[] }) => {
setDataLoaded(true);
setMelisearchActivitiesToRedux({ hits, offset: newOffset });
if (newOffset === 0) {
sectionList?.current?.scrollToLocation({
itemIndex: 1,
});
}
},
(err: any) => {
setDataLoaded(true);
log(err)
},
);
No Idea how this error happens, when users kill the app and logging again this works fine

Gatsby createPages lifecycle method running before onCreateNode method finishes adding needed node fields

I am working on a Gatsby site that uses gatsby-source-wordpress. In my gatbsy-node.js file, I use the onCreateNote lifecycle method to determine if the node is a certain WordPress custom post type, then I reach out to a separate API to get related information for the post type, use createNodeField to add it as a field, and sometimes also use createRemoteFileNode to add images sourced from the API to a field on the new node.
Now this works great most of the time, but occasionally the createPages lifecycle method runs while the image/node code is still happening (I believe). This means that the image fields don't exist yet, and the page creation fails. Then after it fails, I see a console message in the log that I set up where it notifies me that the new field has successfully been added to the node.
How can I make sure that all of those nodes are finished and the data is complete, BEFORE the createPages lifecycle runs? It seems when the client uploads a larger image, this is more likely to fail... which makes sense if I'm understanding this correctly. Here is the code from my gatsby-node.js file:
const path = require(`path`);
const slash = require(`slash`);
const fetch = require('node-fetch');
const { createRemoteFileNode } = require(`gatsby-source-filesystem`)
exports.onCreateNode = ({ node, actions, store, cache,createNodeId, }) => {
const { createNode, createNodeField } = actions;
function getData(url) {
return new Promise((resolve, reject) => {
fetch(url)
.then((response) => response.json())
.then((data) => {
resolve(data);
});
})
}
if( node.internal.type === "wordpress__wp_location"){
const yextID = node.acf.yext_entity_id;
const yextOrthos = node.acf.location_orthodontists;
try {
const getLocation = async () => {
const data = await fetch("https://api.yext.com/v2/accounts/me/entities?api_key=" + process.env.YEXT_API_KEY + "&v=20191114&filter=%7B%22%24anywhere%22%3A%20%22" + yextID + "%22%7D&entityTypes=healthcareFacility")
.then(response => response.json());
// Transform the data into json
if( data && data.response && data.response.count === 1 ){
createNodeField({
node,
name: `yextLocation`,
value: data.response.entities[0]
});
} else {
console.log("NO LOCATIONS FOUND");
}
};
function getOrthos(){
let orthodontists = [];
yextOrthos.forEach( (ortho, i) => {
orthodontists.push(getData("https://api.yext.com/v2/accounts/me/entities?api_key=" + process.env.YEXT_API_KEY + "&v=20191114&filter=%7B%22%24anywhere%22%3A%20%22" + ortho.acf.yext_entity_ortho_id + "%22%7D&entityTypes=healthcareProfessional"));
});
Promise.all(orthodontists).then( (orthoData) => {
if( orthoData.length ){
let finalOrthos = [];
orthoData.forEach( (finalOrtho, x) => {
finalOrthos.push(finalOrtho.response.entities[0]);
});
createNodeField({
node,
name: `yextOrthos`,
value: finalOrthos
});
} else {
console.log("NO DOCTORS FOUND");
}
});
}
getLocation();
getOrthos();
} catch (error) {
console.log(error);
}
}
if( node.internal.type === "wordpress__wp_orthodontist"){
const yextID = node.acf.yext_entity_ortho_id;
const wpID = node.wordpress_id;
try {
const getTextOrtho = async () => {
const data = await fetch("https://api.yext.com/v2/accounts/me/entities?api_key=" + process.env.YEXT_API_KEY + "&v=20191114&filter=%7B%22%24anywhere%22%3A%20%22" + yextID + "%22%7D&entityTypes=healthcareProfessional")
.then(response => response.json());
// Transform the data into json
if( data && data.response && data.response.count === 1 ){
googleProfilePhoto = data.response.entities[0].googleProfilePhoto.url;
createNodeField({
node,
name: `yextOrthodontist`,
value: data.response.entities[0]
});
if( data.response.entities[0].googleProfilePhoto && data.response.entities[0].googleProfilePhoto.url){
createNodeField({
node,
name: `yextProfilePicture`,
value: data.response.entities[0].googleProfilePhoto.url
});
let fileNode = await createRemoteFileNode({
url: data.response.entities[0].googleProfilePhoto.url, // string that points to the URL of the image
parentNodeId: node.id, // id of the parent node of the fileNode you are going to create
createNode, // helper function in gatsby-node to generate the node
createNodeId, // helper function in gatsby-node to generate the node id
cache, // Gatsby's cache
store, // Gatsby's redux store
})
// if the file was created, attach the new node to the parent node
if (fileNode) {
console.log("GOOGLE PROFILE NODE CREATED!")
node.featuredImg___NODE = fileNode.id
} else {
console.log("ERROR! fileNode not Created!");
}
} else {
console.log("NO GOOGLE PROFILE PHOTO FOUND");
}
} else {
console.log("NO ORTHODONTISTS FOUND");
}
}
const getWpLocations = async () => {
const data = await fetch(process.env.GATSBY_WP_BASEURL+ "/wp-json/custom_endpoint/v1/locations_by_orthodontist?orthodontist_id=" + wpID).then(response => response.json());
if( data ){
createNodeField({
node,
name: `wpLocations`,
value: data
});
} else {
console.log("NO ORTHODONTISTS FOUND");
}
}
getTextOrtho();
getWpLocations();
} catch (error) {
console.log(error);
}
}
}
exports.createPages = async ({ graphql, actions }) => {
const { createPage } = actions;
const result = await graphql(`
{
locations: allWordpressWpLocation(filter: {status: {eq: "publish"}}) {
nodes {
id
path
acf {
location_orthodontists {
acf {
yext_entity_ortho_id
}
}
yext_entity_id
}
}
}
pages: allWordpressPage(
filter: {
wordpress_id: {nin: [177, 183, 8, 42, 44, 185, 46]}
status: {eq: "publish"}
}) {
nodes {
id
wordpress_id
path
}
}
orthodontists: allWordpressWpOrthodontist(filter: {status: {eq: "publish"}}) {
nodes {
id
path
}
}
posts: allWordpressPost(filter: {status: {eq: "publish"}}) {
nodes {
slug
id
}
}
}
`);
// Check for any errors
if (result.errors) {
throw new Error(result.errors);
}
const { locations, pages, orthodontists, posts } = result.data;
const locationTemplate = path.resolve(`./src/templates/location.js`);
const pageTemplate = path.resolve(`./src/templates/page.js`);
const orthoTemplate = path.resolve(`./src/templates/orthodontist.js`);
const postTemplate = path.resolve(`./src/templates/post.js`);
const blogTemplate = path.resolve(`./src/templates/blog.js`);
locations.nodes.forEach(node => {
let orthodontists = [];
node.acf.location_orthodontists.forEach(ortho => {
orthodontists.push(ortho.acf.yext_entity_ortho_id);
});
let orthodontistList = orthodontists.join();
createPage({
path: `${node.path}`,
component: slash(locationTemplate),
context: {
id: node.id,
yextId: node.acf.yext_entity_id,
yextOrthoIds: orthodontists
},
});
});
pages.nodes.forEach(node => {
createPage({
path: `${node.path}`,
component: slash(pageTemplate),
context: {
id: node.id,
},
});
});
orthodontists.nodes.forEach(node => {
createPage({
path: `${node.path}`,
component: slash(orthoTemplate),
context: {
id: node.id,
},
});
});
posts.nodes.forEach(node => {
createPage({
path: `${node.slug}`,
component: slash(postTemplate),
context: {
id: node.id,
},
});
});
const postsPerPage = 12;
const numPages = Math.ceil(posts.nodes.length / postsPerPage);
Array.from({ length: numPages }).forEach((_, i) => {
createPage({
path: i === 0 ? `/blog` : `/blog/page/${i + 1}`,
component: slash(blogTemplate),
context: {
limit: postsPerPage,
skip: i * postsPerPage,
numPages,
currentPage: i + 1,
},
})
})
};
Thanks for any information you can provide! I imagine this is probably due to me still learning to use asynchronous behavior in JS, but I just can't seem to find information on how to make this happen.
Please let me know if I can explain the situation any better!
After a rewrite, this seems to have solved the issue I was having. I'll be honest, I'm still working on completely understanding the ins and outs on async/await/promises functionality in JS, but hopefully if someone encounters a similar problem, viewing this rewrite may help:
const path = require(`path`);
const slash = require(`slash`);
const fetch = require('node-fetch');
const { createRemoteFileNode } = require(`gatsby-source-filesystem`)
exports.onCreateNode = async ({ node, actions, store, cache,createNodeId, }) => {
const { createNode, createNodeField } = actions;
const getData = async (url) => {
return new Promise((resolve, reject) => {
fetch(url)
.then((response) => response.json())
.then((data) => {
resolve(data);
});
})
}
const getLocation = async (yextID) => {
const data = await getData("https://api.yext.com/v2/accounts/me/entities?api_key=" + process.env.YEXT_API_KEY + "&v=20191114&filter=%7B%22%24anywhere%22%3A%20%22" + yextID + "%22%7D&entityTypes=healthcareFacility");
// Transform the data into json
if( data && data.response && data.response.count === 1 ){
createNodeField({
node,
name: `yextLocation`,
value: data.response.entities[0]
});
} else {
console.log("NO LOCATIONS FOUND");
}
};
const getOrthos = async (yextOrthos) => {
let orthodontists = [];
yextOrthos.forEach( (ortho, i) => {
orthodontists.push(getData("https://api.yext.com/v2/accounts/me/entities?api_key=" + process.env.YEXT_API_KEY + "&v=20191114&filter=%7B%22%24anywhere%22%3A%20%22" + ortho.acf.yext_entity_ortho_id + "%22%7D&entityTypes=healthcareProfessional"));
});
Promise.all(orthodontists).then( (orthoData) => {
if( orthoData.length ){
let finalOrthos = [];
orthoData.forEach( (finalOrtho, x) => {
finalOrthos.push(finalOrtho.response.entities[0]);
});
createNodeField({
node,
name: `yextOrthos`,
value: finalOrthos
});
} else {
console.log("NO DOCTORS FOUND");
}
});
};
const getTextOrtho = async (yextID) => {
const data = await getData("https://api.yext.com/v2/accounts/me/entities?api_key=" + process.env.YEXT_API_KEY + "&v=20191114&filter=%7B%22%24anywhere%22%3A%20%22" + yextID + "%22%7D&entityTypes=healthcareProfessional");
if( data && data.response && data.response.count === 1 ){
createNodeField({
node,
name: `yextOrthodontist`,
value: data.response.entities[0]
});
if( data.response.entities[0].googleProfilePhoto && data.response.entities[0].googleProfilePhoto.url){
createNodeField({
node,
name: `yextProfilePicture`,
value: data.response.entities[0].googleProfilePhoto.url
});
let fileNode = await createRemoteFileNode({
url: data.response.entities[0].googleProfilePhoto.url, // string that points to the URL of the image
parentNodeId: node.id, // id of the parent node of the fileNode you are going to create
createNode, // helper function in gatsby-node to generate the node
createNodeId, // helper function in gatsby-node to generate the node id
cache, // Gatsby's cache
store, // Gatsby's redux store
});
// if the file was created, attach the new node to the parent node
if (fileNode) {
node.featuredImg___NODE = fileNode.id;
console.log("GOOGLE PROFILE NODE CREATED!")
} else {
console.log("ERROR! fileNode not Created!");
}
} else {
console.log("NO GOOGLE PROFILE PHOTO FOUND");
}
} else {
console.log("NO ORTHODONTISTS FOUND");
}
};
const getWpLocations = async (wpID) => {
const data = await getData(process.env.GATSBY_WP_BASEURL+ "/wp-json/perch_endpoint/v1/locations_by_orthodontist?orthodontist_id=" + wpID);
if( data ){
createNodeField({
node,
name: `wpLocations`,
value: data
});
} else {
console.log("NO ORTHODONTISTS FOUND");
}
}
if( node.internal.type === "wordpress__wp_location"){
const yextID = node.acf.yext_entity_id;
const yextOrthos = node.acf.location_orthodontists;
try {
await getLocation(yextID);
await getOrthos(yextOrthos);
} catch (error) {
console.log(error);
}
}
if( node.internal.type === "wordpress__wp_orthodontist"){
const yextID = node.acf.yext_entity_ortho_id;
const wpID = node.wordpress_id;
try {
await getTextOrtho(yextID);
await getWpLocations(wpID);
} catch (error) {
console.log(error);
}
}
}
exports.createPages = async ({ graphql, actions }) => {
const { createPage } = actions;
const result = await graphql(`
{
locations: allWordpressWpLocation(filter: {status: {eq: "publish"}}) {
nodes {
id
path
acf {
location_orthodontists {
acf {
yext_entity_ortho_id
}
}
yext_entity_id
}
}
}
pages: allWordpressPage(
filter: {
wordpress_id: {nin: [177, 183, 8, 42, 44, 185, 46]}
status: {eq: "publish"}
}) {
nodes {
id
wordpress_id
path
}
}
orthodontists: allWordpressWpOrthodontist(filter: {status: {eq: "publish"}}) {
nodes {
id
path
}
}
posts: allWordpressPost(filter: {status: {eq: "publish"}}) {
nodes {
slug
id
}
}
}
`);
// Check for any errors
if (result.errors) {
throw new Error(result.errors);
}
const { locations, pages, orthodontists, posts } = result.data;
const locationTemplate = path.resolve(`./src/templates/location.js`);
const pageTemplate = path.resolve(`./src/templates/page.js`);
const orthoTemplate = path.resolve(`./src/templates/orthodontist.js`);
const postTemplate = path.resolve(`./src/templates/post.js`);
const blogTemplate = path.resolve(`./src/templates/blog.js`);
locations.nodes.forEach(node => {
let orthodontists = [];
node.acf.location_orthodontists.forEach(ortho => {
orthodontists.push(ortho.acf.yext_entity_ortho_id);
});
let orthodontistList = orthodontists.join();
createPage({
path: `${node.path}`,
component: slash(locationTemplate),
context: {
id: node.id,
yextId: node.acf.yext_entity_id,
yextOrthoIds: orthodontists
},
});
});
pages.nodes.forEach(node => {
createPage({
path: `${node.path}`,
component: slash(pageTemplate),
context: {
id: node.id,
},
});
});
orthodontists.nodes.forEach(node => {
createPage({
path: `${node.path}`,
component: slash(orthoTemplate),
context: {
id: node.id,
},
});
});
posts.nodes.forEach(node => {
createPage({
path: `${node.slug}`,
component: slash(postTemplate),
context: {
id: node.id,
},
});
});
const postsPerPage = 12;
const numPages = Math.ceil(posts.nodes.length / postsPerPage);
Array.from({ length: numPages }).forEach((_, i) => {
createPage({
path: i === 0 ? `/blog` : `/blog/page/${i + 1}`,
component: slash(blogTemplate),
context: {
limit: postsPerPage,
skip: i * postsPerPage,
numPages,
currentPage: i + 1,
},
})
})
};

Mongoose: How to update an existing element in array?

I was wondering if there is a better way to update an existing element in an array instead of fetching database three times. If you have any ideas I would appreciate it. Thank you!
const creatStock = async (symbol, webApiData) => {
try {
// reversed array
const webApiDataReversed = webApiData.reverse();
const query = { symbol };
const update = { $addToSet: { data: webApiDataReversed } };
const options = { upsert: true, new: true };
// create/update Stock
const stockResult = await Stock.findOneAndUpdate(query, update, options);
const lastElement = stockResult.data.length - 1;
const updatePull = {
$pull: { data: { date: stockResult.data[lastElement].date } },
};
// removes last date from data array
await Stock.findOneAndUpdate(query, updatePull);
// update Stock
await Stock.findOneAndUpdate(query, update);
} catch (ex) {
console.log(`creatStock error: ${ex}`.red);
}
};
Schema
const ChildSchemaData = new mongoose.Schema({
_id: false,
date: { type: mongoose.Types.Decimal128 },
open: { type: mongoose.Types.Decimal128 },
high: { type: mongoose.Types.Decimal128 },
low: { type: mongoose.Types.Decimal128 },
close: { type: mongoose.Types.Decimal128 },
volume: { type: mongoose.Types.Decimal128 },
});
const ParentSchemaSymbol = new mongoose.Schema({
symbol: {
type: String,
unique: true,
},
// Array of subdocuments
data: [ChildSchemaData],
});
module.exports.Stock = mongoose.model('Stock', ParentSchemaSymbol);
Output
Well, if you don't need to return the updated document, Please try this one - this will just return a write result, with this things can be achieved in one DB call :
const creatStock = async (symbol, webApiData) => {
try {
// reversed array
const webApiDataReversed = webApiData.reverse();
const query = { symbol };
await Stock.bulkWrite([
{
updateOne:
{
"filter": query,
"update": { $pop: { data: 1 } }
}
}, {
updateOne:
{
"filter": query,
"update": {
$addToSet: {
data: webApiDataReversed
}
}
}
}
])
} catch (ex) {
console.log(`creatStock error: ${ex}`.red);
}
};
Ref : mongoDB bulkWrite
you can do like this way :
const creatStock = async (symbol, webApiData) => {
try {
// reversed array
const webApiDataReversed = webApiData.reverse();
const query = { symbol };
let stock = await Stock.findOne(query);
if(stock){
let stockData = JSON.parse(JSON.stringify(stock.data));
if(stockData.length>0){
stockData.pop();
}
stockData.concat(webApiDataReversed);
stock.data = stockData;
await stock.save();
}
} catch (ex) {
console.log(`creatStock error: ${ex}`.red);
}

Categories

Resources