Unable to upload file to S3 using Lambda written in NodeJS - javascript

I'm trying to upload a file (pdf/jpg) using a Lambda function written in NodeJS by triggering the request from PostMan but the code is not working at all. How should I apply the file? Is it in base64 format or using the type as 'File' in the body?
Here is my defective code:-
'use-strict'
const AWS = require("aws-sdk");
const logger = require('./logger').logger;
const moment = require('moment');
const fileType = ('file-type');
const { Buffer } = require('buffer');
//const { fileTypeFromFile } = 'file-type';
const ddbTable = process.env.RUNTIME_DDB_TABLE_FREE_USER_DOCUMENT;
const s3TempBucket = process.env.RUNTIME_S3_TEMP_BUCKET;
const s3 = new AWS.S3();
const getFile = (fileMime, buffer, userId) => {
let fileExt = fileMime.ext;
let hash = sha1(new Buffer(new Date().toString()));
let now = moment().format('YYYY-MM-DD HH:mm:ss');
let filePath = hash + '/';
let fileName = unixTime(now) + '.' + fileExt;
let fileFullName = filePath + fileName;
let fileFullPath = s3TempBucket + userId + fileFullName;
const params = {
Body: buffer,
Bucket: s3TempBucket,
Key: fileName
};
let uploadFile = {
size: buffer.toString('ascii').length,
type: fileMime.mime,
name: fileName,
fullPath: fileFullPath
}
return {
'params': params,
'uploadFile': uploadFile
}
}
exports.lambdaHandler = (event, context) => {
logger.info("Event::", event);
logger.info('Uploading file to bucket::', s3TempBucket);
let body, data;
let statusCode = 200;
const headers = {
'Content-Type': 'application/json',
'Access-Control-Allow-Headers': 'Content-Type',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': '*'
};
//const user = event.body.user;
let request = event.body;
let base64String = request.base64String;
//let buffer = Buffer.from(JSON.stringify(base64String), 'base64');
let buffer = new Buffer(base64String, 'base64');
let fileMime = fileType(buffer);
logger.info(fileMime);
if (fileMime === null) {
return context.fail('String supplied is not file type');
}
//let file = getFile(fileMime, buffer, user.id);
let file = getFile(fileMime, buffer, 'b06eb6f4-0ff0-5cb5-a41c-e000af66c8e9');
let params = file.params;
try {
//await new Promise((resolve, reject) => {
s3.putObject(params, (err, results) => {
if (err) reject(err);
else {
console.log(results);
body = results;
resolve(results)
}
});
// });
} catch (err) {
logger.info(err);
statusCode = 400;
body = err.message;
return err;
} finally {
body = JSON.stringify(data);
}
return {
statusCode,
body,
headers
};
}
Following error is coming on CloudWatch at this line const buffer = Buffer.from(selectedFile, 'base64'):-
2022-01-30T07:20:33.019Z 4c916aca-93e2-4846-84a2-d7f048f1de52 ERROR Invoke Error
{
"errorType": "TypeError",
"errorMessage": "The first argument must be of type string or an instance of Buffer, ArrayBuffer, or Array or an Array-like Object. Received undefined",
"code": "ERR_INVALID_ARG_TYPE",
"stack": [
"TypeError [ERR_INVALID_ARG_TYPE]: The first argument must be of type string or an instance of Buffer, ArrayBuffer, or Array or an Array-like Object. Received undefined",
" at new NodeError (internal/errors.js:322:7)",
" at Function.from (buffer.js:334:9)",
" at Runtime.exports.lambdaHandler [as handler] (/var/task/app.js:62:18)",
" at Runtime.handleOnce (/var/runtime/Runtime.js:66:25)"
]
}
From Postman, I'm trying to send the document (in the base64 format) in the file attribute and name of the document in the name attribute as shown in the below screenshot:-
info: Event::
{
"body": "{\n \"base64String\": \"/9j/4AAQSkZJRgABAQAAAQABAAD/2wCEAAcHBwcIBwgJCQgMDAsMDBEQDg4QERoSFBIUEhonGB0YGB0YJyMqIiAiKiM+MSsrMT5IPDk8SFdOTldtaG2Pj8ABBwcHBwgHCAkJCAwMCwwMERAODhARGhIUEhQSGicYHRgYHRgnIyoiICIqIz4xKysxPkg8OTxIV05OV21obY+PwP/CABEICHAPAAMBIgACEQEDEQH/xAAcAAEAAgMBAQEAAAAAAAAAAAAABwgBBQYEAwL/2gAI

Related

How to upload files using Axios when we know the full path?

I try to upload file using Axios but I want to just use string of file path. Using code below it is working:
<input
id="select-files"
style="visibility: hidden"
type="file"
multiple
#change="handleFilesUpload($event)"
/>
But when I tried to use createReadStream it does not work. I wonder how I could convert these path files to event.target.files.
I already try the code above but it does not work:
let data = {
THE_FILE: "",
BRANCH_ID: this.$store.state.starv.localUser.DOCTOR_INFO["BRANCH_ID"],
ACC_NO: this.locationItem["ACC_NO"],
CHART_NO: this.locationItem["CHART_NO"],
EMP_ID: this.$store.state.starv.localUser.DOCTOR_INFO["EMP_ID"],
CO_EMP_ID: this.doctorList.toString(),
ST: "telehealthclient",
NEW_NAME: "",
MAID: LocalData.getComputerId(),
}
/*
Iterate over any file sent over appending the files to the form data.
*/
data["THE_FILE"] = window.fs.createReadStream(filePath)
let bodyFormData = new FormData()
// if (THE_FILE) {
// bodyFormData.append("THE_FILE", THE_FILE)
// }
for (let key in data) {
bodyFormData.append(key, data[key])
}
I already found the solution to this problem, what we would to do are below:
Encode our file to base64
base64_encode(file) {
// read binary data
let bitmap = window.fs.readFileSync(file);
// convert binary data to base64 encoded string
return new Buffer(bitmap).toString("base64");
},
Create a file-url object from our base64
dataURLtoFile(dataurl, filename) {
const arr = dataurl.split(",");
const mime = arr[0].match(/:(.*?);/)[1];
const bstr = atob(arr[1]);
let n = bstr.length;
const u8arr = new Uint8Array(n);
while (n) {
u8arr[n - 1] = bstr.charCodeAt(n - 1);
n -= 1; // to make eslint happy
}
return new File([u8arr], filename, { type: mime });
},
Create form data from form-data library Upload using Axios
form-multipart
Full code is below
base64_encode(file) {
// read binary data
let bitmap = window.fs.readFileSync(file);
// convert binary data to base64 encoded string
return new Buffer(bitmap).toString("base64");
},
dataURLtoFile(dataurl, filename) {
const arr = dataurl.split(",");
const mime = arr[0].match(/:(.*?);/)[1];
const bstr = atob(arr[1]);
let n = bstr.length;
const u8arr = new Uint8Array(n);
while (n) {
u8arr[n - 1] = bstr.charCodeAt(n - 1);
n -= 1; // to make eslint happy
}
return new File([u8arr], filename, { type: mime });
},
uploadScreenRecord(data) {
return new Promise((resolve, reject) => {
// #1 Convert to base64 first
let base64_video = this.base64_encode(data.file);
// #2 Create file url object from base64
let filename = path.parse(data.file).base;
let fileURLobject = this.dataURLtoFile(
"data:video/mp4;base64," + base64_video,
filename
);
// #3 Create form data from form-data libary
const formData = new formData_();
formData.append("THE_FILE", fileURLobject, filename);
for (let key in data) {
if (key != "file") {
formData.append(key, data[key]);
}
}
// #4 Send to server
let url;
if (SETTING.imedtacDomain.hostname == undefined) {
url = SETTING.webservice.imedtacProtocol + "//" + defaultDomain;
} else {
url =
SETTING.webservice.imedtacProtocol + "//" + SETTING.imedtacDomain.hostname;
}
axios
.post(url + SETTING.imedtacAPI.uploadFile.URL, formData, {
headers: {
"Access-Control-Allow-Origin": "*",
"Content-Type": "multipart/form-data",
},
timeout: 30000,
})
.then(function (response) {
//Return Patient
console.log("[File debug] This is upload file response %o", response);
if (response.data.success) {
resolve(response.data.FILE_URL);
} else {
reject(response.data.message + " screen record upload error");
}
})
.catch(function (error) {
reject(error);
});
});
},
submitFilesTest() {
return new Promise((resolve, reject) => {
//Data
let data = {
file: "/Users/ivanhutomo/Downloads/B0120221214151850_A.mp4",
BRANCH_ID: "xxx",
ACC_NO: "xx",
CHART_NO: "xx",
EMP_ID: "xx",
CO_EMP_ID: "xx",
ST: "xx",
NEW_NAME: "",
MAID: "xx",
};
this.uploadScreenRecord(data)
.then((response) => {
logger.debug("[File debug] File upload URL %o", response);
resolve(response);
})
.catch((error) => {
logger.debug("[File debug] File %o", error);
reject(error);
});
});
},
submitFilesTest()

AWS problems: copy a large file in the same bucket but in different folder

i have a problem with a lambda function, here the problem:
my lambda function needs to handle a PutObject event:
When the PutObject is managed, I have to copy the large file uploaded to the same bucket but to a different folder.
I tried with s3.copyObject (), with s3.PutObject () and with createMultipartUpload () [implementing the whole loop to handle this function: uploadPart, etc ...] but nothing worked!
The event is captured but subsequently the function does not print anything in the console, neither failure nor success.
Here the lambda:
// dependencies
const AWS = require('aws-sdk');
const util = require('util');
const fs = require('fs');
// get reference to S3 client
const s3 = new AWS.S3();
const uploadPart = (params, chunk, partno, final, cb) => {
console.log("##### Upload part: ", partno);
s3.uploadPart({
Body: chunk,
Bucket: params.Bucket,
Key: params.Key,
UploadId: params.UploadId,
PartNumber: partno
}, (err, res) => {
if (err) { console.log('## Errore: failed part uploaded: ', err); return; }
if (cb) cb(null, { size: chunk.length, ETag: res.ETag });
});
};
const completeMultipartUpload = (params, PartMap) => {
console.log("##### 4. INIT COMPLETE MULTIPART UPLOAD");
s3.completeMultipartUpload({
Bucket: params.Bucket,
Key: params.Key,
UploadId: params.UploadId,
MultipartUpload: PartMap
}, (err, data) => {
if (err) { console.log('## Errore: failed complete multipart upload: ', err); return; }
console.log('###### 5. Upload completed: ', JSON.stringify(data));
});
};
exports.handler = async (event, context, callback) => {
// Read options from the event parameter.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
const srcBucket = event.Records[0].s3.bucket.name;
const srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
const maxchunksize = event.Records[0].s3.object.size;
const dstBucket = "bucketsrctest";
console.log("SRC KEY: ", srcKey, ", File Size: ", ((maxchunksize / 1024) / 1024), " MB");
// Infer the file type from the file suffix.
const typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
console.log("Could not determine the file type.");
return;
}
// Check that the file type is supported
const fileType = typeMatch[1].toLowerCase();
if (fileType != "csv") {
console.log(`Unsupported file type: ${fileType}`);
return;
}
const URI_PARTS = srcKey.split('/');
const TOTAL_PARTS = URI_PARTS.length;
const pre_file_folder = URI_PARTS[TOTAL_PARTS - 2];
const hour = URI_PARTS[TOTAL_PARTS - 3];
const day = URI_PARTS[TOTAL_PARTS - 4];
const month = URI_PARTS[TOTAL_PARTS - 5];
const year = URI_PARTS[TOTAL_PARTS - 6];
const sub_folder = URI_PARTS[TOTAL_PARTS - 7];
const main_folder = URI_PARTS[TOTAL_PARTS - 8];
console.log("PATHS: ", URI_PARTS);
const dst = prepareData(main_folder);
try {
const finalDestinationPath = dst.folder + '/' + (dst.subfolder ? dst.subfolder + '/' + dst.renamedFile : dst.renamedFile);
const params = {
Bucket: srcBucket,
CopySource: srcKey,
Key: finalDestinationPath
};
console.log("####1. INITIALIZE UPLOAD: ", finalDestinationPath);
s3.createMultipartUpload({
Bucket: dstBucket,
Key: srcKey,
ContentType: 'text/csv'
}, (err, data) => {
console.log("##### 2. INIT MULTIPART UPLOAD");
if (err) { console.log('## Errore: failed create multipart upload: ', err); return; }
const file = fs.createReadStream(finalDestinationPath);
let pi = 1;
let partMap = [];
let streamedLength = 0;
let uploadedSize = 0;
let curchunk = Buffer(0);
const cmuParams = {
Key: srcKey,
Bucket: dstBucket,
UploadId: data.UploadId
};
const Writable = require('stream').Writable;
const ws = Writable();
ws.oend = ws.end;
ws.end = (chunk, encoding, callback) => {
ws.oend(chunk, encoding, callback);
uploadPart(cmuParams, curchunk, pi, true, (err, data) => {
partMap.push({ ETag: data.ETag, PartNumber: pi });
completeMultipartUpload(cmuParams, { Parts: partMap });
});
};
ws._write = (chunk, enc, next) => {
curchunk = Buffer.concat([curchunk, chunk]);
streamedLength += chunk.length;
if (curchunk.length > maxchunksize) {
uploadPart(cmuParams, curchunk, pi, false, (err, data) => {
uploadedSize += data.length;
partMap.push({ ETag: data.ETag, PartNumber: pi });
pi+=1;
curchunk = Buffer(0);
next();
});
} else {
next();
}
};
file.pipe(ws);
});
} catch(err) {
console.log("Result error: ", err);
return { statusCode: 500, body: err };
}
};
This probably happens because you are using async lambda handler. Thus your function completes, before the body of your handler actually executes.
To fix that, you either have to modify your code to use sync handler, or use promise pattern shown in AWS docs for async handler.

Upload byte array from axios to Node server

Background
Javascript library for Microsoft Office add-ins allows you to get raw content of the DOCX file through getFileAsync() api, which returns a slice of up to 4MB in one go. You keep calling the function using a sliding window approach till you have reed entire content. I need to upload these slices to the server and the join them back to recreate the original DOCX file.
My attempt
I'm using axios on the client-side and busboy-based express-chunked-file-upload middleware on my node server. As I call getFileAsync recursively, I get a raw array of bytes that I then convert to a Blob and append to FormData before posting it to the node server. The entire thing works and I get the slice on the server. However, the chunk that gets written to the disk on the server is much larger than the blob I uploaded, normally of the order of 3 times, so it is obviously not getting what I sent.
My suspicion is that this may have to do with stream encoding, but the node middleware does not expose any options to set encoding.
Here is the current state of code:
Client-side
public sendActiveDocument(uploadAs: string, sliceSize: number): Promise<boolean> {
return new Promise<boolean>((resolve) => {
Office.context.document.getFileAsync(Office.FileType.Compressed,
{ sliceSize: sliceSize },
async (result) => {
if (result.status == Office.AsyncResultStatus.Succeeded) {
// Get the File object from the result.
const myFile = result.value;
const state = {
file: myFile,
filename: uploadAs,
counter: 0,
sliceCount: myFile.sliceCount,
chunkSize: sliceSize
} as getFileState;
console.log("Getting file of " + myFile.size + " bytes");
const hash = makeId(12)
this.getSlice(state, hash).then(resolve(true))
} else {
resolve(false)
}
})
})
}
private async getSlice(state: getFileState, fileHash: string): Promise<boolean> {
const result = await this.getSliceAsyncPromise(state.file, state.counter)
if (result.status == Office.AsyncResultStatus.Succeeded) {
const data = result.value.data;
if (data) {
const formData = new FormData();
formData.append("file", new Blob([data]), state.filename);
const boundary = makeId(12);
const start = state.counter * state.chunkSize
const end = (state.counter + 1) * state.chunkSize
const total = state.file.size
return await Axios.post('/upload', formData, {
headers: {
"Content-Type": `multipart/form-data; boundary=${boundary}`,
"file-chunk-id": fileHash,
"file-chunk-size": state.chunkSize,
"Content-Range": 'bytes ' + start + '-' + end + '/' + total,
},
}).then(async res => {
if (res.status === 200) {
state.counter++;
if (state.counter < state.sliceCount) {
return await this.getSlice(state, fileHash);
}
else {
this.closeFile(state);
return true
}
}
else {
return false
}
}).catch(err => {
console.log(err)
this.closeFile(state)
return false
})
} else {
return false
}
}
else {
console.log(result.status);
return false
}
}
private getSliceAsyncPromise(file: Office.File, sliceNumber: number): Promise<Office.AsyncResult<Office.Slice>> {
return new Promise(function (resolve) {
file.getSliceAsync(sliceNumber, result => resolve(result))
})
}
Server-side
This code is totally from the npm package (link above), so I'm not supposed to change anything in here, but still for reference:
makeMiddleware = () => {
return (req, res, next) => {
const busboy = new Busboy({ headers: req.headers });
busboy.on('file', (fieldName, file, filename, _0, _1) => {
if (this.fileField !== fieldName) { // Current field is not handled.
return next();
}
const chunkSize = req.headers[this.chunkSizeHeader] || 500000; // Default: 500Kb.
const chunkId = req.headers[this.chunkIdHeader] || 'unique-file-id'; // If not specified, will reuse same chunk id.
// NOTE: Using the same chunk id for multiple file uploads in parallel will corrupt the result.
const contentRangeHeader = req.headers['content-range'];
let contentRange;
const errorMessage = util.format(
'Invalid Content-Range header: %s', contentRangeHeader
);
try {
contentRange = parse(contentRangeHeader);
} catch (err) {
return next(new Error(errorMessage));
}
if (!contentRange) {
return next(new Error(errorMessage));
}
const part = contentRange.start / chunkSize;
const partFilename = util.format('%i.part', part);
const tmpDir = util.format('/tmp/%s', chunkId);
this._makeSureDirExists(tmpDir);
const partPath = path.join(tmpDir, partFilename);
const writableStream = fs.createWriteStream(partPath);
file.pipe(writableStream);
file.on('end', () => {
req.filePart = part;
if (this._isLastPart(contentRange)) {
req.isLastPart = true;
this._buildOriginalFile(chunkId, chunkSize, contentRange, filename).then(() => {
next();
}).catch(_ => {
const errorMessage = 'Failed merging parts.';
next(new Error(errorMessage));
});
} else {
req.isLastPart = false;
next();
}
});
});
req.pipe(busboy);
};
}
Update
So it looks like I have found the problem at least. busboy appears to be writing my array of bytes as text in the output file. I get 80,75,3,4,20,0,6,0,8,0,0,0,33,0,44,25 (as text) when I upload the array of bytes [80,75,3,4,20,0,6,0,8,0,0,0,33,0,44,25]. Now need to figure out how to force it to write it as a binary stream.
Figured out. Just in case it helps anyone, there was no problem with busboy or office.js or axios. I just had to convert the incoming chunk of data to Uint8Array before creating a blob from it. So instead of:
formData.append("file", new Blob([data]), state.filename);
like this:
const blob = new Blob([ new Uint8Array(data) ])
formData.append("file", blob, state.filename);
And it worked like a charm.

Upload Image to azure blob from a URL using azure functions in Nodejs

I have a requirement where the user wants to upload an image from a source URL let's say "https://homepages.cae.wisc.edu/~ece533/images/airplane.png" using Azure functions. Right now what I am trying to do is I am calling fetch method and pass the image URL and converting that into the blob but somehow that doesn't seem to be working. Below is the code. Is there any better way to achieve this
const { BlobServiceClient } = require("#azure/storage-blob");
const fetch = require("node-fetch");
const multipart = require("parse-multipart");
const AZURE_STORAGE_CONNECTION_STRING = process.env["AZURE_STORAGE_CONNECTION_STRING"];
module.exports = async function (context, req) {
context.log("JavaScript HTTP trigger function processed a request.");
const name =
req.query.name ||
(req.body &&
req.body.secure_url);
const responseMessage = name
? "Hello, " + name + ". This HTTP triggered function executed successfully."
: "This HTTP triggered function executed successfully. Pass a name in the query string or in the request body for a personalized response.";
// context.log("requested body: ", req.body);
var images = "\""+ req.body.secure_url.toString() +"\"";
context.log("Image URL : ", images);
var bodyBuffer = Buffer.from(JSON.stringify(req.body));
let header = req.headers["content-type"]
let boundary = header.split(" ")[1]
boundary = header.split("=")[1]
// var boundary = multipart.getBoundary(req.headers['content-type']);
var parts = multipart.Parse(req.body, header);
var requestOptions = {
method: 'GET'
};
fetch(images, requestOptions)
.then((response) => {
context.log("Response Blob : ",response.blob())
response.blob()
}) // Gets the response and returns it as a blob
.then((blob) => { main(blob)
}).catch(error => console.log('error', error));
async function main(blob) {
const blobServiceClient = await BlobServiceClient.fromConnectionString(AZURE_STORAGE_CONNECTION_STRING);
const container = "sepik01-rdp-media-assets-migration";
const containerClient = await blobServiceClient.getContainerClient(container);
const blobName = images.toString().replace(/^.*[\\\/]/, "");
context.log("File Name: ", blobName);
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
// const uploadBlobResponse = await blockBlobClient.upload(parts[0].secure_url, parts[0].length);
const uploadBlobResponse = await blockBlobClient.upload(blob, blob.length);
context.res = { body : responseMessage };
context.done();
}
};
when we use package node-fetch to send HTTP request, the response.body will return as Readable stream. Then we can use the stream to upload data to azure blob.
For example
const fetch = require("node-fetch");
const { BlobServiceClient } = require("#azure/storage-blob");
const mime = require("mime");
const AZURE_STORAGE_CONNECTION_STRING =""
module.exports = async function (context, req) {
const images = "https://homepages.cae.wisc.edu/~ece533/images/airplane.png";
const requestOptions = {
method: "GET",
};
const response = await fetch(images, requestOptions);
if (!response.ok)
throw new Error(`unexpected response ${response.statusText}`);
const blobName = images.toString().replace(/^.*[\\\/]/, "");
const blobServiceClient = await BlobServiceClient.fromConnectionString(
AZURE_STORAGE_CONNECTION_STRING
);
const containerClient = await blobServiceClient.getContainerClient("image");
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
const uploadBlobResponse = await blockBlobClient.uploadStream(
response.body,
4 * 1024 * 1024,
20,
{
blobHTTPHeaders: {
blobContentType: mime.getType(blobName),
},
}
);
context.res = { body: uploadBlobResponse._response.status };
};

TypeError: Cannot destructure property value to "null' or "undefined"

I have the following error message in the server console.
SOAP FAIL: Error: timeout of 10000ms exceeded ERROR Error: timeout of
10000ms exceeded [2020-08-03T17:22:51.723-0000][ERROR] Crash report
created but cannot be sent: unhandledRejection Cannot destructure
> property `queryResult` of 'undefined' or 'null'. TypeError: Cannot
> destructure property `queryResult` of 'undefined' or 'null'.
My code is below
invoke:async (conversation, done) => {
// Get query from incoming message
const text = conversation.text();
var query = conversation.properties().query;
conversation.logger().info('Query '+query );
// Set modules
const soapRequest = require('easy-soap-request');
const path = require('path');
const fs = require('fs');
const xml2js = require('xml2js');
const { match } = require('assert');
//SOAP URL
const url = 'https://cap.zudo.com/ws/apf/ticketing/MOS?WSDL';
//Set headers
const sampleHeaders = {
'user-agent': 'sampleTest',
'Content-Type': 'application/xml;charset=UTF-8',
};
const filenameIn = path.join(__dirname, '/request.txt');
const filenameOut = filenameIn
//Replace Query variable inside request.txt fileContent
var REGEX = /<TKT:ProjectNum>(.+)<\/TKT:ProjectNum>/;
var fileContent = fs.readFileSync(filenameIn, 'utf8');
fileContent = fileContent.replace(
REGEX,
'<TKT:ProjectNum>' + query + '</TKT:ProjectNum>'
);
//Write the query
fs.writeFileSync(filenameOut, fileContent, 'utf8');
const xml = fs.readFileSync(path.join(__dirname, '/request.txt'), 'utf-8');
//Callback
let { queryResult } = await soapRequest({ url: url, headers: sampleHeaders, xml: xml, timeout: 10000}).then(results => {
return results;
}).catch(e => {
conversation.logger().info("ERROR "+e);
conversation.reply(e).transition('NOK').keepTurn(true);
done();
});
let { headers, body, statusCode } = await queryResult ;
xml2js.parseString(body,{ mergeAttrs: true }, (err, result) => {
        if(err) {
            conversation.logger().info("ERROR "+err);
            conversation.reply(e).transition('NOK').keepTurn(true);
                  done();
        }
conversation.logger().info("res: "+result);
conversation.reply(result).transition('OK').keepTurn(true);
done();
})
}
Can anyone help me how to resolve this issue?

Categories

Resources