Async wait for function to be done - javascript

I'm strugeling with async/await order, I need the code to wait for the getAttachements and proceed after but thats not working atm. I have looked at some options on SE for example a callback function but i did not get it to work yet.
processJsonToXML()
async function processJsonToXML() {
for (let i = 0; i < wordpressOutput.length; i++) {
imageUrls = []
let obj = wordpressOutput[i];
let content = processContent(obj.Content)
await getAttachments().then(attachments =>{
const filledXml = xmlTemplate(
{
title: obj.Title,
content: content,
date: obj.Date,
summary: obj.Excerpt,
unixTime: moment(obj.Date).unix(),
attachments: attachments
}
)
fs.writeFile(`xml-files/${obj.Title}.xml`, filledXml, function (err) {
if (err) throw err;
console.log('File is created successfully.');
});
})
}
}
Second part is the code to fetch an url and create a object out of it with the info I need in the first function.
async function getAttachments(){
const attachments: attachment[] = []
imageUrls.forEach(async url => {
let filename = extractUsingRegex(url, /([a-zA-Z0-9\s_\\.\-\(\):])+(.png|.jpg)/gm);
await fetch(url).then(response => {
response.buffer().then(resolvedBuffer => {
let attachment: attachment = {
type: response.type,
content: resolvedBuffer.toString('base64'),
filename: filename,
size: response.size.toString()
}
console.log(attachment);
attachments.push(attachment)
})
});
})
return attachments
}

Your first part of the code should look like this
processJsonToXML();
async function processJsonToXML() {
await Promise.all(
wordpressOutput.map(async (obj) => {
imageUrls = [];
let content = processContent(obj.Content);
let attachments = await getAttachments();
const filledXml = xmlTemplate({
title: obj.Title,
content: content,
date: obj.Date,
summary: obj.Excerpt,
unixTime: moment(obj.Date).unix(),
attachments: attachments,
});
fs.writeFile(`xml-files/${obj.Title}.xml`, filledXml, function (err) {
if (err) throw err;
console.log("File is created successfully.");
});
}) );}
And Second part of your code should look like this
async function getAttachments(){
const attachments: attachment[] = []
await Promise.all(
imageUrls.map(async url => {
let filename = extractUsingRegex(url, /([a-zA-Z0-9\s_\\.\-\(\):])+(.png|.jpg)/gm);
await fetch(url).then(response => {
response.buffer().then(resolvedBuffer => {
let attachment: attachment = {
type: response.type,
content: resolvedBuffer.toString('base64'),
filename: filename,
size: response.size.toString()
}
console.log(attachment);
attachments.push(attachment)
})
});
}));
return attachments
}
I hope this will help

Related

Node function not working, await part is not getting executed in the given function

While hitting API I'm getting function from my services/ElasticSearch.js and for some reason function there is not working after the axios part.
In the below file I've called function elasticService.updateDocument this function has been brought from another file.
'''
class ProductController {
constructor() { }
async saveProduct(req, res) {
console.log('ITs coming here')
let { _id, Product } = req.body;
if (_id) delete req.body._id;
let elasticResult;
try {
if (Product && Product.Category) {
req.body.Category = Product.Category
delete Product.Category
}
if (Product && Product.URL) {
const exists = await ProductService.checkProductByUrl(Product.URL);
_id = exists._id
}
const result = await ProductService.saveProduct(req.body, _id);
if (result) {
if (_id) {
console.log('Here.... UPDATE')
const savedProduct = await ProductModel.createPayload(req.body);
console.log(savedProduct,'saved_product')
let elaticDoc = await this.createElasticDocData(savedProduct);
console.log(elaticDoc.id,'elasticResult')
elaticDoc.id = result._id;
elaticDoc = new Elastic(elaticDoc);
console.log(elaticDoc,'<----------elaticdoc-------------->')
elasticResult = await elasticService.updateDocument(JSON.stringify(elaticDoc), req.body.Category)
console.log(elasticResult,'elasticResult')
}
else {
console.log('Here.... ADD')
const savedProduct = await ProductModel.createPayload(result);
let elaticDoc = await this.createElasticDocData(savedProduct);
elaticDoc.id = result._id;
elaticDoc = new Elastic(elaticDoc);
elasticResult = await elasticService.createDocument(JSON.stringify(elaticDoc), req.body.Category)
}
const response = new Response(1, "Product is saved successfully", "", "", { product: result, elasticResult: elasticResult });
return res.status(200).send(response);
}
const response = new Response(0, "Error in saving Product", 0, "Product not saved", {});
return res.status(200).send(response);
} catch (error) {
const response = new Response(0, "Unexpected Error", 0, error, {});
return res.status(400).send(response);
}
}
'''
This is the elasticappsearch file where above mentioned is coming from and for some reason it's not working after axios.patch part.
'''
const private_key = process.env.elastic_private_key
const search_key = process.env.elastic_search_key
const axios = require("axios")
class ElasticAppSearch {
async updateDocument(body, engine) {
console.log('Its coming in updateDOCS here')
const response = await axios.patch(`${process.env.elastic_url}/${engine}/documents`, body, {
headers: {
Authorization: `Bearer ${private_key}`,
},
});
console.log(response,'<--===-=-=-=-=-=-=-=-=-=-=-=-response')
return response.data
}
'''

GraphQl mutation wrong result

I have some troubles with mutating data within Graphql. I am saving the origin image into my database and then mutating it with Graphql (external).
In the following points in the code I get the correct data
####################1111111111111111111####################
####################222222222222222####################
####################333333333333333####################
But at point
####################444444444444444444####################
after I mutate the data I am getting wrong image src. It is the edited image src and not the origin src I retrieved from database in my revertImages() function.
Although I pass the correct variable "newVariable" with correct data, the mutation takes over the mutation function edited data that I had previously passed, but takes over the newVariable data. Do I need to clear the cache maybe?
The newVariable data is:
{
productId: 'gid://shopify/Product/6166892019882',
image: {
altText: '',
id: 'gid://shopify/ProductImage/23268973543594',
src: 'https://cdn.shopify.com/s/files/1/0508/3516/1258/products/180622-05-2.jpg?v=1611416719'
}
}
After mutation the result is:
{
productImageUpdate: {
image: {
altText: null,
id: 'gid://shopify/ProductImage/23268973543594',
src: 'https://cdn.shopify.com/s/files/1/0508/3516/1258/products/180622-05-2.jpg?v=1611416762'
},
userErrors: []
}
}
Here are my functions:
const revertImages = async (ctx) => {
let dataToRevert = ctx.request.body.data;
const { accessToken } = ctx.session;
let productsDoc = await Product.find({ productId: { $in: dataToRevert.productId } });
if (!productsDoc) {
ctx.throw('Could not find products');
}
console.log('####################1111111111111111111####################');
console.log(productsDoc);
console.log('####################1111111111111111111####################');
const res = await revertProductImages(productsDoc, accessToken);
if (res) {
console.log('Products reverted');
ctx.response.status = 200;
}
}
async function revertProductImages(products, accessToken) {
console.log('Revert Product Images')
return new Promise((resolve, reject) => {
const map = {};
let updateProduct = null;
let variables = null;
products.forEach(async (item) => {
map[item.productId] = map[item.productId] + 1 || 1;
variables = {
"productId": `gid://shopify/Product/${item.productId}`,
"image": {
"altText": "",
"id": `gid://shopify/ProductImage/${item.imageId}`,
"src": item.originalSrc
}
};
console.log('####################222222222222222####################');
console.log(variables);
console.log('####################222222222222222####################');
updateProduct = await updateProductImage(UPDATE_PRODUCT_BY_ID, variables, accessToken);
if (updateProduct) {
const res = await removeProductFromDb(map);
if (res) {
resolve(res);
}
}
});
})
}
async function updateProductImage(queryName, variables, token) {
console.log('updateProductImage..');
const newVariable = variables;
console.log('####################333333333333333####################');
console.log(newVariable);
console.log('####################333333333333333####################');
return new Promise(async (resolve, reject) => {
let res = null;
try {
res = await axios({
headers: {
'X-Shopify-Access-Token': token,
},
method: 'post',
data: {
query: queryName,
variables: newVariable,
},
url: url,
});
} catch (err) {
console.log(err.message);
}
if (res) {
console.log('Image updated ✔️');
console.log('####################444444444444444444####################');
console.log(res.data.data);
console.log('####################444444444444444444####################');
resolve(res);
} else {
reject('Can not update image');
}
}).catch((err) => {
console.log(err);
});
}
Maybe I didn't understood correctly but here's an answer...
I cannot find the src property in available fields for this mutation (i'm not talking about the ImageInput but the image object).
Can you try with the following request :
mutation productImageUpdate($productId: ID!, $image: ImageInput!) {
productImageUpdate(productId: $productId, image: $image) {
image {
id
originalSrc
transformedSrc
}
userErrors {
field
message
}
}
}
The docs says :
originalSrc : The location of the original image as a URL.
transformedSrc : The location of the transformed image as a URL.
Maybe what you are expecting is in originalSrc ?

multi file upload with skipper-better-s3 and sailjs returns the same key

As seen in the title, I am currently using sailjs + skipper-better-s3 for s3 upload. Started with uploading one file which works great, then because change request the need of multi-file upload at once so I added a for loop but by doing this, all keys will be the same and ended up the only one file is uploaded which is the last uploaded file but with the first upload filename.
I did read some articles and people are saying something like The problem is because for loop is synchronous and file upload is asynchronous and people saying the result of this is using a recursion which I tried too but no luck though, the same thing happens.
My recursive code is below...
s3_upload_multi: async (req, res) => {
const generatePath = (rootPath, fieldName) => {
let path;
// this is just a switch statement here to check which fieldName is provided then value of path will depend on it
// as for the other two variable is just checking if upload content type is correct
return { path };
};
const processUpload = async ({
fieldName,
awsOp,
fileExtension,
rootPath,
fileName,
}) => {
return new Promise(function (resolve, reject) {
req.file(fieldName).upload(awsOp, async (err, filesUploaded) => {
if (err) reject(err);
const filesUploadedF = filesUploaded[0]; // F = first file
const response = {
status: true,
errCode: 200,
msg: 'OK',
response: {
url: filesUploadedF.extra.Location,
size: filesUploadedF.size,
type: fileExtension,
filename: filesUploadedF.filename,
key: filesUploadedF.extra.Key,
field: fieldName,
}
};
resolve(response);
});
});
}
const process_recur = async (files, fieldName) => {
if (files.length <= 0) return;
const fileUpload = files[0].stream;
const rootPath = `${sails.config.aws.upload.path.root}`;
const fileCType = fileUpload.headers['content-type'];
// console.log(fileCType, 'fileCType');
const { path } = generatePath(rootPath, fieldName);
const fileName = fileUpload.filename;
const fileExtension = fileUpload.filename.split('.').pop();
const genRan = await UtilsService.genRan(8);
const fullPath = `${path}${genRan}-${fileName}`;
const awsOp = {
adapter: require('skipper-better-s3'),
key: sails.config.aws.access_key,
secret: sails.config.aws.secret_key,
saveAs: fullPath,
bucket: sails.config.aws.bucket,
s3params: {
ACL: 'public-read'
},
};
const config = {
fieldName,
awsOp,
fileExtension,
rootPath,
fileName,
}
const procceed = await processUpload(config);
files.shift();
await process_recur(files, fieldName);
};
try {
const fieldName = req._fileparser.upstreams[0].fieldName;
const files = req.file(fieldName)._files;
await process_recur(files, fieldName);
} catch (e) {
console.log(e, 'inside UploadService');
return false;
}
}
below is the code for me using for loop which is quite similiar from above though
s3_upload_multi: async (req, res) => {
const generatePath = (rootPath, fieldName) => {
let path;
// this is just a switch statement here to check which fieldName is provided then value of path will depend on it
// as for the other two variable is just checking if upload content type is correct
return { path };
};
const processUpload = async ({
fieldName,
awsOp,
fileExtension,
rootPath,
fileName,
}) => {
return new Promise(function (resolve, reject) {
req.file(fieldName).upload(awsOp, async (err, filesUploaded) => {
if (err) reject(err);
const filesUploadedF = filesUploaded[0]; // F = first file
const response = {
status: true,
errCode: 200,
msg: 'OK',
response: {
url: filesUploadedF.extra.Location,
size: filesUploadedF.size,
type: fileExtension,
filename: filesUploadedF.filename,
key: filesUploadedF.extra.Key,
field: fieldName,
}
};
resolve(response);
});
});
}
try {
const fieldName = req._fileparser.upstreams[0].fieldName;
const files = req.file(fieldName)._files;
for (const file of files) {
const fileUpload = file.stream;
const rootPath = `${sails.config.aws.upload.path.root}`;
const fileCType = fileUpload.headers['content-type'];
// console.log(fileCType, 'fileCType');
const fileName = fileUpload.filename;
const { path } = generatePath(rootPath, fieldName);
const fileExtension = fileUpload.filename.split('.').pop();
// using a variable here because if this is an image, a thumbnail will be created with the same name as the original one
const genRan = await UtilsService.genRan(8);
const fullPath = await `${path}${genRan}-${fileName}`;
const awsOp = {
adapter: require('skipper-better-s3'),
key: sails.config.aws.access_key,
secret: sails.config.aws.secret_key,
saveAs: fullPath,
bucket: sails.config.aws.bucket,
s3params: {
ACL: 'public-read'
},
};
const config = {
fieldName,
awsOp,
fileExtension,
rootPath,
fileName,
}
const procceed = await processUpload(config);
console.log(procceed, 'procceed');
}
} catch (e) {
console.log(e, 'inside UploadService');
return false;
}
}
Which part am I making mistake that's causing such behavior? I checked my path it's totally correct with correct filename too when I console.log
Thanks in advance for any suggestions and help.
Took me quite a lot of time to figure this out ages ago.
Especially you are using skipper-better-s3 which did not conclude as much detailed documentation as skipper, going back to look into skipper documentation actually the saveAs field doesn't only take string but also a function which you can then use that to get each file's filename and return it as needed so actually you do not even need to use neither resursive or for loop at all.
for example with some of your codes
const awsOp = {
adapter: require('skipper-better-s3'),
key: sails.config.aws.access_key,
secret: sails.config.aws.secret_key,
saveAs: (__newFileStream, next) => {
// generatePath is what you wrote
// __newFileStream.filename would the filename of each each before uploading
// the path is pretty much the s3 key which includes your filename too
const { path } = generatePath(rootPath, __newFileStream.filename, fieldName);
return next(undefined, path);
},
bucket: sails.config.aws.bucket,
s3params: {
ACL: 'public-read'
},
};
skipper documentation https://www.npmjs.com/package/skipper#customizing-at-rest-filenames-for-uploads

Render method from class with node express

I have a class with methods which should give me domains of an API.
That also works so far. But if I want to render it with Node Express I get an array with 1.2.3. without the domain name.
I think my problem is located at async await?!
Here is a snippet from my class method:
class ISPConfig {
constructor(base_url, options) {
this.base_url = base_url;
this.options = options;
}
async _call() {
... // gives me the sessionId
}
async getDataByPrimaryId(ispFunction, param) {
try {
const results = await axios.post(this.base_url + ispFunction, {
session_id: await this._call(),
primary_id: param
});
return await results.data.response;
//console.log(results.data.response);
} catch (err){
console.log(err);
}
}
And her a snippet from my app.js:
const renderHome = (req, res) => {
let domains = [],
message = '';
let a = new ispwrapper.ISPConfig(BASE_URL, OPTIONS)
a.getDataByPrimaryId('sites_web_domain_get', { active: 'y' })
.then(response => {
for (let i = 0; i < response.length; i++){
domains = response[i]['domain'].domains;
}
})
.catch(err => {
message = 'Error when retriving domains from ISPApi';
})
.then(() => {
res.render('home', { // 'home' template file for output render
title: 'ISPConfig',
heading: 'Welcome to my ISPConfig Dashboard',
homeActive: true,
domains,
message
});
});
};
With push(domains) I get on the HTML page only 1.2.3.
Which corresponds exactly to the three active domains of my API. But just without the domain names. :(
But if I output in for loop console.log(response[i]['domain'].domains) I get all domains with names in console.
Does anyone see my mistake?
Here is my Solution:
const renderHome = async (req, res) => {
let domain = [],
message = '';
try {
let a = new ispwrapper.ISPConfig(BASE_URL, OPTIONS);
const response = await a.getDataByPrimaryId('sites_web_domain_get', { active: 'y' });
for (let i = 0; i < response.length; i++){
domain.push(response[i].domain);
}
} catch(err) {
message = 'Error when retriving domains from ISPApi';
} finally {
res.render('home', { // 'home' template file for output render
title: 'ISPConfig',
heading: 'Welcome to my ISPConfig Dashboard',
homeActive: true,
domain,
message
});
}
};
Try making your route async too like:
const renderHome = async (req, res) => {
let domains = [],
message = '';
try {
let a = new ispwrapper.ISPConfig(BASE_URL, OPTIONS)
const response = await a.getDataByPrimaryId('sites_web_domain_get', { active: 'y' })
for (let i = 0; i < response.length; i++){
domains.push(response[i].domain);
}
} catch(err) {
message = 'Error when retriving domains from ISPApi';
} finally {
res.render('home', { // 'home' template file for output render
title: 'ISPConfig',
heading: 'Welcome to my ISPConfig Dashboard',
homeActive: true,
domains,
message
});
}
};
Yes ok the undefined is no problem anymore. But how do I get the domains displayed on the HTML page? The call is grey.

Angular 2 Synchronous File Upload

I am trying to upload a file to web api which takes the file as byte array using angular 2 application.
I am not able to pass the byte array from angular 2 page to web api. It looks like the File Reader read method is asynchronous. How do I make this as synchronous call or wait for the file content to be loaded before executing the next line of code?
Below is my code
//attachment on browse - when the browse button is clicked
//It only assign the file to a local variable (attachment)
fileChange = (event) => {
var files = event.target.files;
if (files.length > 0) {
this.attachment = files[0];
}
}
//when the submit button is clicked
onSubmit = () => {
//Read the content of the file and store it in local variable (fileData)
let fr = new FileReader();
let data = new Blob([this.attachment]);
fr.readAsArrayBuffer(data);
fr.onloadend = () => {
this.fileData = fr.result; //Note : This always "undefined"
};
//build the attachment object which will be sent to Web API
let attachment: Attachment = {
AttachmentId: '0',
FileName: this.form.controls["attachmentName"].value,
FileData: this.fileData
}
//build the purchase order object
let order: UpdatePurchaseOrder = {
SendEmail: true,
PurchaseOrderNumber: this.form.controls["purchaseOrderNumber"].value,
Attachment: attachment
}
//call the web api and pass the purchaseorder object
this.updatePoService
.updatePurchaseOrder(this.form.controls["purchaseOrderRequestId"].value, order)
.subscribe(data => {
if (data) {
this.saveSuccess = true;
}
else {
this.saveSuccess = false;
}
},
error => this.errors = error,
() => this.res = 'Completed'
);
}
Any hint would be useful.
regards,
-Alan-
You cannot make this async call synchronous. But you can take advantage of the observables to wait for the files to be read:
//when the submit button is clicked
onSubmit = () => {
let file = Observable.create((observer) => {
let fr = new FileReader();
let data = new Blob([this.attachment]);
fr.readAsArrayBuffer(data);
fr.onloadend = () => {
observer.next(fr.result);
observer.complete()
};
fr.onerror = (err) => {
observer.error(err)
}
fr.onabort = () => {
observer.error("aborted")
}
});
file.map((fileData) => {
//build the attachment object which will be sent to Web API
let attachment: Attachment = {
AttachmentId: '0',
FileName: this.form.controls["attachmentName"].value,
FileData: fileData
}
//build the purchase order object
let order: UpdatePurchaseOrder = {
SendEmail: true,
PurchaseOrderNumber: this.form.controls["purchaseOrderNumber"].value,
Attachment: attachment
}
return order;
})
.switchMap(order => this.updatePoService.updatePurchaseOrder(this.form.controls["purchaseOrderRequestId"].value, order))
.subscribe(data => {
if (data) {
this.saveSuccess = true;
} else {
this.saveSuccess = false;
}
},
error => this.errors = error,
() => this.res = 'Completed'
);
}
I arrived here looking for a solution for a similar issue. I'm performing requests to an endpoint which can response a binary blob if anything goes well or a JSON file in event of error.
this.httpClient.post(urlService, bodyRequest,
{responseType: 'blob', headers: headers})
.pipe(map((response: Response) => response),
catchError((err: Error | HttpErrorResponse) => {
if (err instanceof HttpErrorResponse) {
// here, err.error is a BLOB containing a JSON String with the error message
} else {
return throwError(ErrorDataService.overLoadError(err, message));
}
}));
As FileReaderSync apparently doesn't work in Angular6 I took n00dl3's solution (above) to throw the error after parsing the Blob content:
return this.httpClient.post(urlService, bodyRequest,
{responseType: 'blob', headers: headers})
.pipe(map((response: Response) => response),
catchError((err: Error | HttpErrorResponse) => {
const message = `In TtsService.getTts(${locale},${outputFormat}). ${err.message}`;
if (err instanceof HttpErrorResponse) {
const $errBlobReader: Observable<HttpErrorResponse> = Observable.create((observer) => {
const fr = new FileReader();
const errorBlob = err.error;
fr.readAsText(errorBlob, 'utf8');
fr.onloadend = () => {
const errMsg = JSON.parse(fr.result).message;
const msg = `In TtsService.getTts(${locale},${outputFormat}). ${errMsg}`;
observer.error(ErrorDataService.overLoadError(err, msg));
};
fr.onerror = (blobReadError) => {
observer.error(blobReadError);
};
fr.onabort = () => {
observer.error('aborted');
};
});
return $errBlobReader;
} else {
return throwError(ErrorDataService.overLoadError(err, message));
}
}));
Thanks! You really saved my day!

Categories

Resources