I hope everyone is well.
I have the following code that brings the value of a json:
const https = require('https');
var http = require("https");
var AWS = require('aws-sdk');
exports.handler = async (event, context, callback) => {
console.log('Event: ', JSON.stringify(event, null, 2));
const request = event.Records[0].cf.request;
const https = require('https');
const http_get = require('https');
var options = {
host: 'www.local.dev',
method: 'GET',
timeout: 2000,
path: '/list.json',
headers: {
'Accept': 'application/json'
}
};
const res = await new Promise(resolve => {
http_get.get(options, resolve);
});
let data = await new Promise((resolve, reject) => {
let data = '';
res.on('data', chunk => data += chunk);
res.on('error', err => reject(err));
res.on('end', () => resolve(data));
});
data = JSON.parse(data)
// CONTINUE FUNCTION
return request;
};
I would like to set a timeout, If the request does not return in 2 seconds, I continue the function, The function has other parts, so even if nothing comes from the json endpoint it can continue.
It has no dependency on the json return.
What is happening and that the timeout is not validated and the function is running indefinitely.
I need to use this http module from node.js, I can't use axios here.
To simulate the timeout I am using this app: httpstat.us/501?sleep=60000
Change this code
const res = await new Promise(resolve => {
http_get.get(options, resolve);
});
to this:
const res = http_get.get(options);
https.get returns a stream that is ready to use, and there is no reason to wrap that into a promise (where the syntax you are showing would be incorrect anyway).
Related
I am seeing the following error when trying to extract an image's metadata information with the Sharp module: "Input file contains unsupported image format".
This is only happening for certain signed image urls, particularly ones that contain xmp information in the metadata.
I am hoping someone can help me spot where the issue might be occurring in this code snippet.
Here is the exact code snippet I am using (insert the signed image URL where specified in the doStuff function to test):
const sharp = require("sharp");
const fs = require('fs');
const fetch = require('node-fetch');
async function storeUrlToLocal(sourceUrl) {
const destPath = './';
const request = {
method: 'GET',
encoding: null,
};
response = await fetch(sourceUrl, request);
if (response.status >= 400)
throw new Error(`Failed to fetch data from ${sourceUrl}, status returned = ${response.status}`);
const localPath = `${destPath}test.png`;
const fileStream = fs.createWriteStream(localPath);
return new Promise((resolve, reject) => {
response.body.pipe(fileStream);
response.body.on("error", reject);
response.body.on("end", async () => {
const fileExists = fs.existsSync(localPath);
console.log(`All the data in the file has been read ${localPath} = ${fileExists}`);
resolve(localPath);
});
response.body.on("finish",() => {
console.log('All writes are now complete.');
});
}).catch(error => {
console.log(error);
});
}
async function doStuff() {
const localFilePath = await storeUrlToLocal('<INSERT_IMAGE_URL_HERE>');
// Read image file and extract metadata
let manipulator;
let imageMetadata;
try {
manipulator = sharp(localFilePath, { limitInputPixels: 5000000000 });
console.log('Manipulator = ', manipulator);
imageMetadata = await manipulator.metadata();
console.log("ImageMetadata = ", imageMetadata);
} catch (error) {
console.log(`Image Metadata Extraction Error: ${error.message}`);
throw error;
}
}
doStuff();
This code snippet above fails with the "Input file contains unsupported image format" on the line that extracts metadata (imageMetadata = await manipulator.metadata();)
So the strange thing is, I am able to properly extract the metadata (with no errors) with this same code if I add a short Sleep after this line: const fileStream = fs.createWriteStream(localPath);
So this code snippet (all I'm doing here is adding a short sleep after fs.createWriteSteam) allows the image metadata to be extracted without issue:
const sharp = require("sharp");
const fs = require('fs');
const fetch = require('node-fetch');
async function storeUrlToLocal(sourceUrl) {
const destPath = './';
const request = {
method: 'GET',
encoding: null,
};
response = await fetch(sourceUrl, request);
if (response.status >= 400)
throw new Error(`Failed to fetch data from ${sourceUrl}, status returned = ${response.status}`);
const localPath = `${destPath}test.png`;
const fileStream = fs.createWriteStream(localPath);
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
await sleep(1000);
return new Promise((resolve, reject) => {
response.body.pipe(fileStream);
response.body.on("error", reject);
response.body.on("end", async () => {
const fileExists = fs.existsSync(localPath);
console.log(`All the data in the file has been read ${localPath} = ${fileExists}`);
resolve(localPath);
});
response.body.on("finish",() => {
console.log('All writes are now complete.');
});
}).catch(error => {
console.log(error);
});
}
async function doStuff() {
const localFilePath = await storeUrlToLocal('<INSERT_IMAGE_URL_HERE>');
// Read image file and extract metadata
let manipulator;
let imageMetadata;
try {
manipulator = sharp(localFilePath, { limitInputPixels: 5000000000 });
console.log('Manipulator = ', manipulator);
imageMetadata = await manipulator.metadata();
console.log("ImageMetadata = ", imageMetadata);
} catch (error) {
console.log(`Image Metadata Extraction Error: ${error.message}`);
throw error;
}
}
doStuff();
Why would this Sleep resolve my issues? I don't see any asynchronous calls being run that I would need to be waiting for to complete. Perhaps fs.createWriteStream didn't have enough time to complete its operation? But I do not have the option to await the call to fs.createWriteStream, as it is not async.
I am new to node js. I have used the python call where data comes from. Since the process is a little complex so it taking time to execute and node js to receive a response. The code I mentioned here tried to wait for the function to complete, but the function is repeating the call from the start and goes repeating until the python function finished and to get a response.
So after completion of function
console.log("Image Conversation::")
await bot.say(dumy_msg);
await bot.beginDialog('classfication_done_');
These statements are repeating for two times.
How to resolve the issue?
convo1.ask('Post Your message', async(answer, convo1, bot,message) => {
if(message.type=='file_share') {
console.log("Image Conversation::")
const botToken = bot.api.token;
const destination_path = '/_classification/upload/' + message.files[0].name;
console.log("URl :::",message.files[0].url_private)
console.log('Testing file share' + message.files[0].name);
const url = message.files[0].url_private;
const opts = {
method: 'GET',
url: url,
headers: {
Authorization: 'Bearer ' + botToken,
}
};
request(opts, function (err, res, body) {
console.log('FILE RETRIEVE STATUS', res.statusCode);
}).pipe(fs.createWriteStream(destination_path));
const img = '/_classification/upload/' + message.files[0].name;
spawn = require("child_process").spawn;
const getResponse = () => {
return new Promise((resolve, reject) => {
const process = spawn('python',["/_classification/tensorflow-image-detection-master/classify.py",img] );
process.stdout.on('data', data => {
resolve(data);
});
});
}
const response = await getResponse();
var dumy_msg = `${response}`;
await bot.say(dumy_msg);
await bot.beginDialog('classfication_done_');
}
else{
await bot.say(message,"Please provide jpeg file format, other files are not allowed");
await bot.beginDialog('classfication_done_');
}
});
I think the callback function(async () => {}) itself has been called twice.
I don't know exactly what the role convo1.ask do, but make sure it's not in a sitution call a callback twice.
I am new to Hapi and I want to consume an API when the server starts up. I on start-up I am getting "Error: handler method did not return a value, a promise, or throw an error."
I came across this post and was trying to model my code after the answer supplied here, but it is not working, even though I am returning the promise from the fetch.
Here is my server.js.
"use strict";
var Hapi = require("#hapi/hapi");
var Request = require("request");
const Inert = require("#hapi/inert");
const Vision = require("#hapi/vision");
//Init server
const init = async () => {
const server = Hapi.server({
port: 3000,
host: "localhost",
routes: {
cors: true,
},
});
await server.register(Vision);
await server.register(Inert);
// await server.register(require("vision"));
server.route(require("./routes/getapis").getapis);
await server.start();
console.log(`Server is running on ${server.info.uri}`);
};
process.on("unhandledRejection", (err) => {
console.log(err);
process.exit(1);
});
init();
This is my getApis.js
const fetch = require("node-fetch");
const spendingApiUrl = `https://opendata.maryland.gov/resource/gja3-vy5r.json?$select=vendor_zip,sum(amount)&$where=vendor_zip between '20601' and '21930'&$group=vendor_zip&$order=sum_amount DESC`;
const numberOfTopZips = 3;
let results = [];
let marylandData = [];
exports.getApis = {
method: "GET",
path: "/getapis",
handler: (request, h) => {
return fetch(spendingApiUrl)
.then((response) => response.json())
.then((data) => {
marylandData = data;
console.log(marylandData);
//rateLimitedRequests(topZips(marylandData, numberOfTopZips));
})
.catch((err) => console.log(err));
},
};
I am trying out a demo with AWS lambda function using Axios and Cheerio, I am getting back response after calling the endpoint as {message: Internal Server Error}
exports.lambdaHandler = async (event, context) => {
try {
const axios = require('axios');
const cheerio = require('cheerio');
axios.get('https://www.kitco.com').then((response) => {
const html = response.data;
const $ = cheerio.load(html);
const ask = $('#AU-ask').text();
const bid = $('#AU-bid').text();
const resbid = bid.slice(0,7);
const resask = ask.slice(0,7);
const result = {
"ask": resask,
"bid": resbid
}
return result;
});
response = {
'statusCode': 200,
'body': result
}
} catch (err) {
console.log(err);
return err;
}
return response
};
result is clearly not in response scope, therefore this will result in a typical undefined error.
The solution would be to handle the logic inside axios.get callback, try this:
const axios = require('axios');
const cheerio = require('cheerio');
exports.lambdaHandler = (event, context) => {
axios.get('https://www.kitco.com')
.then((response) => {
const html = response.data;
const $ = cheerio.load(html);
const ask = $('#AU-ask').text();
const bid = $('#AU-bid').text();
const resbid = bid.slice(0, 7);
const resask = ask.slice(0, 7);
const result = {
statusCode: 200,
body: {
ask: resask,
bid: resbid
}
};
console.log(result);
})
.catch(err => {
console.log(err);
});
};
You can get error detail in monitor tab of Lambda console web. I guest you get back an error like response is undefined in return response line.
With your code, return response line will be execute immediately when you call the function, but response did not defined in the lambdaHandler scope.
I recommended that, don’t mix async/await syntax with Promise syntax (.then .catch), just use one of them, I suggest use async/await syntax.
The function will like:
exports.lambdaHandler = async (event, context) => {
try {
const axios = require('axios');
const cheerio = require('cheerio');
const response = await axios.get('https://www.kitco.com'); // wait until we get the response
const html = response.data;
const $ = cheerio.load(html);
const ask = $('#AU-ask').text();
const bid = $('#AU-bid').text();
const resbid = bid.slice(0, 7);
const resask = ask.slice(0, 7);
const result = {
"ask": resask,
"bid": resbid
}
return {
statusCode: 200,
body: JSON.stringify(result), // If you working with lambda-proxy-integrations, the `body` must be a string
}; // return to response the request
} catch (err) {
console.log(err);
return {
statusCode: 500, // Example, http status will be 500 when you got an exception
body: JSON.stringify({error: err}),
}
}
};
Below I have two http requests, one made with request and the other being made with isomorphic-fetch (node-fetch). For some reason the request with request works, however node-fetch is responding back with an error code 503. Is there anything I am missing from the fetch version?
const URL = require('url')
const fetch = require('isomorphic-fetch')
const HttpsProxyAgent = require('https-proxy-agent')
const request = require('request');
const url = process.env.URL
const proxy = process.env.PROXY
const requestPromise = function (url, options) {
if (/^\/\//.test(url)) {
url = 'https:' + url;
}
return new Promise(function(resolve, reject) {
return request.call(this, url, options, function (err, res, body) {
if (err) {
throw new Error(err);
}
res.ok = true;
res.json = function () {
return JSON.parse(res.body);
}
return resolve(res);
});
});
};
function getProxy (url) {
const parsedProxyURL = URL.parse(url);
parsedProxyURL.secureProxy = parsedProxyURL.protocol === 'https:';
return parsedProxyURL
}
requestPromise(url, {
agent:new HttpsProxyAgent(getProxy(proxy))
})
.then(console.log)
.catch(console.log)
fetch(url, {
agent:new HttpsProxyAgent(getProxy(proxy))
})
.then(console.log)
.catch(console.log)
The request module seems to be setting one header automatically, the host that fetch module is not. by making this change it works.
const URL = require('url')
const fetch = require('isomorphic-fetch')
const HttpsProxyAgent = require('https-proxy-agent')
const request = require('request')
function getProxy (url) {
const parsedProxyURL = URL.parse(url)
parsedProxyURL.secureProxy = parsedProxyURL.protocol === 'https:'
return parsedProxyURL
}
const url = process.env.URL
const proxy = process.env.PROXY
fetch(url, {
headers: {host: URL.parse(url).host},
agent: new HttpsProxyAgent(getProxy(proxy))
})
.then(res => res.json()).then(res => {
console.log(res)
})
.catch(console.log)