Nojde does not await for async function and array.length condition - javascript

Trying to implement Twitter API to post tweets with multiple images. I am posting requests from the admin dashboard with an AD id(not the Twitter ad) , fetching the images URL from our database and using the URLs to write image files in the upload directory. Then using the Twitter-api-2 package to post a request to Twitter API to get the mediaIdS and post the tweet.
Problem: When I write files to local upload folders, the async function also get executed, therefore cannot find the media files in the local folder, leading to an error.
const router = require('express').Router()
const { parse } = require('dotenv');
const { link } = require('joi');
const { TwitterApi } = require('twitter-api-v2')
const { FetchSingleAdBasics } = require('../helpers/fetch-single-ad-basics');
const request = require('request');
const fs = require('fs');
const path = require('path');
const https = require('https')
function saveImagesToUploads(url, path){
const fullUrl = url
const localPath = fs.createWriteStream(path)
const request = https.get(fullUrl, function(response){
console.log(response)
response.pipe(localPath)
})
}
var jsonPath1 = path.join(__dirname,'../..','uploads/0.png');
var jsonPath2 = path.join(__dirname,'../..','uploads/1.png');
var jsonPath3= path.join(__dirname,'../..','uploads/2.png');
var jsonPath4 = path.join(__dirname,'../..','uploads/3.png');
router.post('/twitter-post', async(req, res) => {
const {adId} = req.body
const imagesArr = []
const imageIdsArr = []
const {text} = req.body
const AD = adId && await FetchSingleAdBasics(adId);
const PostMessage = `${AD?.year} ${AD?.make} ${AD?.model} ${AD?.trim}\r\n${AD?.engineSize}L Engine\r\nPrice: AED${AD?.addetails[0].price}\r\nMileage: ${AD?.mileage} - ${AD?.mileageUnit}\r\nMechanical Condition: ${AD?.addetails[0].mechanicalCondition}\r\nAvailable in: ${AD?.adcontacts[0]?.location}\r\nCheckout full details at: https://ottobay.com/cars/uc/${AD?.id}`
if (!AD)
return res
.status(422)
.json({ message: "failed", error: "Ad Not Found" })
try {
imagesArr.push(await AD?.adimages[0]?.LeftSideView)
imagesArr.push(await AD?.adimages[0]?.LeftFront)
imagesArr.push(await AD?.adimages[0]?.Front)
imagesArr.push(await AD?.adimages[0]?.FrontRight)
// the following function must await for this to finish
imagesArr?.map((item,index) => {
saveImagesToUploads(item, "./uploads/" + `${index}`+ '.png')
})
const filesArr = [jsonPath1,jsonPath3,jsonPath4,jsonPath2]
console.log(filesArr)
console.log(filesArr?.length)
const idsArray = []
// this function get executed without waiting for previous function, leading to error
// this function does not apply filesArr?.length === 4 condition
filesArr?.length === 4 && await Promise.all(filesArr?.length === 4 && filesArr?.map(async (item) => {
try {
const mediaId = await client.v1.uploadMedia(item,{ mimeType : 'png' })
idsArray.push(mediaId)
return imageIdsArr;
} catch(err) {
console.log(err)
throw err;
}
}));
const response = idsArray?.length === 4 && await client.v1.tweetThread([{ status: PostMessage, media_ids: idsArray }]);
// remove files after successfull tweet
await fs.promises.readdir(jsonUploadPath).then((f) => Promise.all(f.map(e => fs.promises.unlink(`${jsonUploadPath}${e}`))))
res.json({status: 'success', response})
} catch (error) {
res.json({status: 'failed',error})
// console.log("tweets error", error.data.errors);
}
})
module.exports = router

Related

NodeJS API Proxy Server - Post Requests Error 404 - Routing

I am new at NodeJS and am trying to implement a proxy server for my GET requests. GET requests works fine can also update my UI as it should be by performAction and chained promises, however something is wrong with my POST request, I always get a 404 despite I defined the route, it pops up after UI update. Can anybody help me? Thanks!
SERVER
const express = require('express')
const cors = require('cors')
const rateLimit = require('express-rate-limit')
require('dotenv').config()
const errorHandler = require('./middleware/error')
const bodyParser = require('body-parser')
// support parsing of application/json type post data
//support parsing of application/x-www-form-urlencoded post data
const PORT = process.env.PORT || 5000
const app = express()
app.use(bodyParser.urlencoded({ extended: true }));
// Rate limiting
const limiter = rateLimit({
windowMs: 10 * 60 * 1000, // 10 Mins
max: 100,
})
app.use(limiter)
app.set('trust proxy', 1)
// Enable cors
app.use(cors())
// Set static folder
app.use(express.static('public'))
// Routes
app.use('/api', require('./routes'))
// Error handler middleware
app.use(errorHandler)
app.listen(PORT, () => console.log(`Server running on port ${PORT}`))
API PROXY SERVER
const url = require('url')
const express = require('express')
const router = express.Router()
const needle = require('needle')
const apicache = require('apicache')
// Env vars
const API_BASE_URL = process.env.API_BASE_URL
const API_KEY_NAME = process.env.API_KEY_NAME
const API_KEY_VALUE = process.env.API_KEY_VALUE
// Init cache
let cache = apicache.middleware
let projectData = {}
router.get('/', cache('2 minutes'), async (req, res, next) => {
try {
const params = new URLSearchParams({
[API_KEY_NAME]: API_KEY_VALUE,
...url.parse(req.url, true).query,
})
console.log('${API_BASE_URL}?${params}')
const apiRes = await needle('get', `${API_BASE_URL}?${params}`)
const data = apiRes.body
// Log the request to the public API
if (process.env.NODE_ENV !== 'production') {
console.log(`REQUEST: ${API_BASE_URL}?${params}`)
}
res.status(200).json(data)
} catch (error) {
next(error)
}
})
function sendForecastData(req, res) {
const { date, temp, content } = req.body;
let journal_entry_new = new Object();
journal_entry_new.date = date;
journal_entry_new.temp = temp + "°C";
journal_entry_new.content = content;
idx_entry = String("entry_" + idx)
idx = idx + 1
projectData[idx_entry] = JSON.stringify(journal_entry_new);
console.log(projectData)
res.send(projectData)
console.log("Post sucessful.")
}
router.post('/', cache('2 minutes'), async (req, res, next) => {
const postObject = needle.post('/addData', req.body, sendForecastData)
return postObject;
})
function readData(req, res) {
res.send(projectData)
console.log(projectData)
console.log("Read sucessful.")
}
router.get('/readData', readData)
module.exports = router
This is my app.js
//Event-Listener
document.getElementById('generate').addEventListener('click', performAction);
//Declare Fetch Function
//User Input
const the_date = document.getElementById('date');
const temp = document.getElementById('temp');
const content = document.getElementById('content');
// Create a new date instance dynamically with JS
let d = new Date();
let newDate = (d.getMonth() + 1) + "." + (d.getDate()) + '.' + (d.getFullYear());
//Proxy
const fetchWeather = async (zipcode) => {
const url = `/api?q=${zipcode}`
const res = await fetch(url)
const data = await res.json()
console.log(data)
return data;
}
const postData = async (url, data) => {
const response = await fetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(data),
});
try {
const data = await response.json();
return data
}
catch (error) {
console.log("error", error);
}
}
const readData = async () => {
const request = await fetch('/readData');
try {
// Transform into JSON
const readData = await request.json()
return readData
} catch (error) {
console.log("error", error);
}
}
const UpdateUI = async (data) => {
console.log("Data received:")
console.log(data)
console.log("Date: " + data[Object.keys(data)[0]])
console.log("Temp: " + data[Object.keys(data)[1]])
console.log("Content: " + data[Object.keys(data)[2]])
document.getElementById('temp').innerText = "Temperature is: " + data[Object.keys(data)[1]]
document.getElementById('date').innerText = "Date is: " + data[Object.keys(data)[0]]
document.getElementById('content').innerText = "Feeling is: " + data[Object.keys(data)[2]]
console.log("Updated UI")
}
function performAction(e) {
//Check if user input is available
if (document.getElementById('zip').value == "") {
alert("Please type in a zipcode, then I will know where to look up the weather for
you!");
return
}
//Feeling now
let zipcode = (document.getElementById('zip').value).toString()
let feeling_now = (document.getElementById('feelings').value).toString()
fetchWeather(zipcode)
.then(data => {
/*let temp_k = parseFloat(data.list[0].main.temp)*/
/*let temp_c = String((temp_k - 273.15).toFixed(2)) + " °C"*/
let temp_c = parseFloat(data.main.temp) + " °C"
let feeling_now = (document.getElementById('feelings').value).toString()
console.log(temp_c)
console.log(newDate)
console.log(feeling_now)
console.log({ date: newDate, temp: temp_c, content: feelings.value })
return { date: newDate, temp: temp_c, content: feelings.value }
}).then(data => {
postData('/addData', data);
return data
})
.then(data => {
readData('/readData');
return data
})
.then(data => UpdateUI(data))
console.log(feeling_now)
return;
}
My UI is updated, however I get the following error and cannot access localhost:5000/addData - can you help why this is the case?
Defined the routes in the backend, however cannot call them from frontend

Firebase function api call working on emulator but not in production

I'm trying to load data in to Firestore using firebase functions from the function storeCategories(), it currently just runs on start but I plan to schedule it once I have it working.
This function works fine on the emulators but in production I'm getting the either 'socket hang up' or 'Client network socket disconnected before secure TLS connection was established'
storeTrending() works fine so I'm not sure what is causing it.
/** #format */
const functions = require("firebase-functions");
const cors = require("cors");
const express = require("express");
const fetch = require("node-fetch");
const apiKey = functions.config().tmdbapi.key;
const admin = require("firebase-admin");
admin.initializeApp();
const db = admin.firestore();
var app = express();
app.use(cors());
const storeCategories = async () => {
try {
// GET CATEGORY IDS
const categoriesIDRes = await fetch(
`https://api.themoviedb.org/3/genre/movie/list?api_key=${apiKey}`
);
const categoriesInfo = await categoriesIDRes.json();
// CREATE CATEGORY API LINKS
const categoriesURLs = categoriesInfo.genres.map(
(el) =>
`https://api.themoviedb.org/3/discover/movie?api_key=${apiKey}&language=en-US&sort_by=popularity.desc&include_adult=true&include_video=true&page=1&with_genres=${el.id}&with_watch_monetization_types=flatrate`
);
// RETRIEVE POPULAR FILMS FROM EACH CATEGORY
const categoriesRes = await Promise.all(
categoriesURLs.map((el) => fetch(el))
);
const data = await Promise.all(
categoriesRes.map((el) => el.json())
);
const batch = db.batch();
// WRITE TO FIRESTORE FOR FASTER INITIAL LOAD
data.forEach((category, idx) => {
const ref = db
.collection("categories")
.doc(categoriesInfo.genres[idx].name);
batch.set(ref, category);
});
const categoriesInfoRef = db.collection("categoryIDs").doc("IDs");
batch.set(categoriesInfoRef, categoriesInfo);
return batch.commit();
} catch (err) {
console.log(err);
return;
}
};
storeCategories();
const storeTrending = async () => {
try {
const res = await fetch(
`https://api.themoviedb.org/3/trending/all/week?api_key=${apiKey}`
);
const data = await res.json();
db.collection("categories").doc("trending").set(data);
} catch (err) {
console.log(err);
return;
}
};
storeTrending();
Any help with this would be greatly appreciated!
Edit: screenshot of the error

Upload Image to azure blob from a URL using azure functions in Nodejs

I have a requirement where the user wants to upload an image from a source URL let's say "https://homepages.cae.wisc.edu/~ece533/images/airplane.png" using Azure functions. Right now what I am trying to do is I am calling fetch method and pass the image URL and converting that into the blob but somehow that doesn't seem to be working. Below is the code. Is there any better way to achieve this
const { BlobServiceClient } = require("#azure/storage-blob");
const fetch = require("node-fetch");
const multipart = require("parse-multipart");
const AZURE_STORAGE_CONNECTION_STRING = process.env["AZURE_STORAGE_CONNECTION_STRING"];
module.exports = async function (context, req) {
context.log("JavaScript HTTP trigger function processed a request.");
const name =
req.query.name ||
(req.body &&
req.body.secure_url);
const responseMessage = name
? "Hello, " + name + ". This HTTP triggered function executed successfully."
: "This HTTP triggered function executed successfully. Pass a name in the query string or in the request body for a personalized response.";
// context.log("requested body: ", req.body);
var images = "\""+ req.body.secure_url.toString() +"\"";
context.log("Image URL : ", images);
var bodyBuffer = Buffer.from(JSON.stringify(req.body));
let header = req.headers["content-type"]
let boundary = header.split(" ")[1]
boundary = header.split("=")[1]
// var boundary = multipart.getBoundary(req.headers['content-type']);
var parts = multipart.Parse(req.body, header);
var requestOptions = {
method: 'GET'
};
fetch(images, requestOptions)
.then((response) => {
context.log("Response Blob : ",response.blob())
response.blob()
}) // Gets the response and returns it as a blob
.then((blob) => { main(blob)
}).catch(error => console.log('error', error));
async function main(blob) {
const blobServiceClient = await BlobServiceClient.fromConnectionString(AZURE_STORAGE_CONNECTION_STRING);
const container = "sepik01-rdp-media-assets-migration";
const containerClient = await blobServiceClient.getContainerClient(container);
const blobName = images.toString().replace(/^.*[\\\/]/, "");
context.log("File Name: ", blobName);
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
// const uploadBlobResponse = await blockBlobClient.upload(parts[0].secure_url, parts[0].length);
const uploadBlobResponse = await blockBlobClient.upload(blob, blob.length);
context.res = { body : responseMessage };
context.done();
}
};
when we use package node-fetch to send HTTP request, the response.body will return as Readable stream. Then we can use the stream to upload data to azure blob.
For example
const fetch = require("node-fetch");
const { BlobServiceClient } = require("#azure/storage-blob");
const mime = require("mime");
const AZURE_STORAGE_CONNECTION_STRING =""
module.exports = async function (context, req) {
const images = "https://homepages.cae.wisc.edu/~ece533/images/airplane.png";
const requestOptions = {
method: "GET",
};
const response = await fetch(images, requestOptions);
if (!response.ok)
throw new Error(`unexpected response ${response.statusText}`);
const blobName = images.toString().replace(/^.*[\\\/]/, "");
const blobServiceClient = await BlobServiceClient.fromConnectionString(
AZURE_STORAGE_CONNECTION_STRING
);
const containerClient = await blobServiceClient.getContainerClient("image");
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
const uploadBlobResponse = await blockBlobClient.uploadStream(
response.body,
4 * 1024 * 1024,
20,
{
blobHTTPHeaders: {
blobContentType: mime.getType(blobName),
},
}
);
context.res = { body: uploadBlobResponse._response.status };
};

Requests to Express app come with empty body though data being sent, but only in one route

it's nice to join the group of people brave enough to ask the questions on Stack, so that everyone cantake advantage :)
My problem is pretty strange. I'm writing an app in Express, I have two routes so far and everything is going pretty smoothly, yet I've encountered one problem, which I can not seem to solve. In one route, with the patch method, the incoming requests have emmpty body. The rest of app is running smoothly, everything is working fine and this one route seems to be broken, I can not figure out why. Strange enough, yet I found out that the requests DO have body in one case - when I'm sending requests with my tests (supertest) using the .send({ ... }) method. When I'm sending requests with .attach or .field - they come empty. Same with requests sent from Postman (empty). What is causing such strange behavior?
Here are my tests:
const request = require('supertest');
const Image = require('../models/image');
const app = require('../app');
const crypto = require('crypto');
const fs = require('fs')
const { setupImages } = require('./fixtures/db')
beforeEach(setupImages);
describe('[IMAGE] - ', () => {
test('Should get images', async () => {
const main_img = await Image.findOne({ main: true });
const image = await request(app)
.get(`/image/${main_img._id}`)
.expect(200);
expect(image.header['content-type']).toBe('image/png');
});
test('Should delete images', async () => {
const image = await Image.findOne({ description: 'Lorem ipsum' });
await request(app)
.delete(`/image/${image._id}`);
const imageFound = await Image.findById(image._id);
expect(imageFound).toBeNull();
});
//TEST THAT FAILS
test('Should edit images', async () => {
const image = await Image.findOne({ main: false });
await request(app)
.patch(`/image/${image._id}`)
.field('description', 'new desc')
.attach('image', './src/tests/fixtures/imgtest.png')
.expect(200);
const returnChecksum = file => {
return crypto
.createHash('md5')
.update(file, 'utf8')
.digest('hex')
}
const imageEdited = await Image.findById(image._id);
const newImageChecksum = returnChecksum(fs.readFileSync(__dirname + '/fixtures/imgtest.png'));
expect(returnChecksum(imageEdited.image)).toBe(newImageChecksum);
expect(imageEdited.description).toBe('new desc');
});
})
Here are image routes
const express = require('express');
const router = new express.Router();
const Image = require('../models/image');
const chalk = require('chalk');
router.get('/image/:id', async (req, res) => {
const { id } = req.params;
try {
const image = await Image.findById(id);
if (!image) {
return res.status(404).send()
}
res.set('Content-Type', 'image/png');
res.send(image.image);
} catch (e) {
console.log(chalk.red('Error serving image: ') + e);
res.send(500);
}
});
//THE ROUTE THAT FAILS
router.patch('/image/:id', async (req, res) => {
const { id } = req.params;
const updateFields = Object.entries(req.body);
console.log('image patch req body', req.body)
try {
const imageEdited = await Image.findById(id, function (err, doc) {
if (err) { return err; }
updateFields.forEach(field => doc[field[0]] = field[1])
doc.save(res.status(200).send(doc));
});
if (!imageEdited) {
res.status(400).send();
}
} catch (e) {
res.status(500).send();
console.log(chalk.red('Error editing image: ') + e);
}
});
router.delete('/image/:id', async (req, res) => {
const { id } = req.params;
try {
await Image.findByIdAndDelete(id);
res.status(200).send();
} catch (e) {
res.status(500).send();
console.log(chalk.red('Error deleting image: ') + e);
}
});
module.exports = router;
And my app.js file:
const express = require('express');
require('./db/mongoose');
const productRouter = require('./routers/product');
const imageRouter = require('./routers/image');
const app = express();
app.use(express.json());
app.use(productRouter);
app.use(imageRouter);
module.exports = app;
The result of console.log in image route:
console.log src/routers/image.js:30
image patch req body {}
And this is the behavior of the app with changed sending method in test:
test('Should edit images', async () => {
const image = await Image.findOne({ main: false });
await request(app)
.patch(`/image/${image._id}`)
// .field('description', 'new desc')
// .attach('image', './src/tests/fixtures/imgtest.png')
.send({description: 'new desc'})
.expect(200);
const returnChecksum = file => {
return crypto
.createHash('md5')
.update(file, 'utf8')
.digest('hex')
}
const imageEdited = await Image.findById(image._id);
const newImageChecksum = returnChecksum(fs.readFileSync(__dirname + '/fixtures/imgtest.png'));
expect(returnChecksum(imageEdited.image)).toBe(newImageChecksum);
expect(imageEdited.description).toBe('new desc');
});
console.log src/routers/image.js:30
image patch req body { description: 'new desc' }
Thanks in advance!
I've managed to fix it myself, the problem lied in fact that I've forgot about multer. So this strange behavior can be result of not using said library, even if you're not sending any files apparently.

Requesting different endpoints with firestore Cloud Function

I am trying to have a flexible Cloud Function that executes on different end points.
My original Cloud Function looks like this:
const functions = require('firebase-functions')
const admin = require('firebase-admin')
const _ = require('lodash')
const { getObjectValues } = require('./helper-functions.js')
admin.initializeApp()
const json2csv = require('json2csv').parse
exports.csvJsonReport = functions.https.onRequest((request, response) => {
const db = admin.firestore()
const userAnswers = db.collection('/surveys/CNA/submissions')
return (
userAnswers
.get()
// eslint-disable-next-line promise/always-return
.then(querySnapshot => {
let surveySubmissions = []
querySnapshot.forEach(doc => {
const userSubmission = doc.data()
surveySubmissions.push({
..._.mapValues(userSubmission.answers, getObjectValues), // format answers
...userSubmission.anonUser,
})
})
const csv = json2csv(surveySubmissions)
response.setHeader('Content-disposition', 'attachment; filename=cna.csv')
response.set('Content-Type', 'text/csv')
response.status(200).send(csv)
})
.catch(error => {
console.log(error)
})
)
})
I am trying to extend this function to work on multiple collections. In the above function I am targeting the CNA collection. so instead of db.collection('/surveys/CNA/submissions/') I would like it to be db.collection('/surveys/:surveyId/submissions/')
Below is my attempt at trying to extend my original Cloud Function:
const functions = require('firebase-functions')
const admin = require('firebase-admin')
const express = require('express')
const bodyParser = require('body-parser')
const _ = require('lodash')
const { getObjectValues } = require('./helper-functions.js')
admin.initializeApp(functions.config().firebase)
const db = admin.firestore()
const app = express()
const main = express()
main.use('/api/v1', app)
main.use(bodyParser.json())
exports.webApi = functions.https.onRequest(main)
app.get('surveys/:id', (request, response) => {
const surveyId = request.query
const userAnswers = db.collection(`/survey/${surveyId}/submissions`)
return (
userAnswers
.get()
// eslint-disable-next-line promise/always-return
.then(querySnapshot => {
let surveySubmissions = []
querySnapshot.forEach(doc => {
const userSubmission = doc.data()
surveySubmissions.push({
..._.mapValues(userSubmission.answers, getObjectValues), // format answers
...userSubmission.anonUser,
})
})
const csv = json2csv(surveySubmissions)
response.setHeader('Content-disposition', 'attachment; filename=cna.csv')
response.set('Content-Type', 'text/csv')
response.status(200).send(csv)
})
.catch(error => {
console.log(error)
})
)
})
When I request my endpoint: myapp.firebaseapp.com/api/v1/surveys/CNA
Cannot GET /api/v1/surveys/CNA is shown in my browser.
Could someone please point me in the right direction?
To crate a GET /survey/:id endpoint in order to fetch a submission by id, use the following code in your new Cloud Function:
app.get('surveys/:id', (request, response) => {
const surveyId = request.params.id
const userAnswers = db.collection(`/survey/${surveyId}/submissions`)
Let me know if it works for you.

Categories

Resources