JavaScript, fetch, Netlify: Netlify didn't fetch second response - javascript

I've started to learn sass and gave myself a task: create two elements with two independent resources. The image below is what I see in live server in VsCode.
But when I deploying it into netlify, the result is next: .
so i start thinking that problem is my js code:
// Weather
const weatherBlock = document.querySelector('#weather');
async function loadWeather() {
weatherBlock.innerHTML = `
<div class="weather__loading"><img src='assets/loading.gif' alt='Loading...' >
</div>`;
const server =
'https://api.openweathermap.org/data/2.5/weather?units=metric&q=Kyiv&appid=ab54429ab157825232cab97e89867df3';
const response = await fetch(server, {
method: 'GET',
});
const responseResult = await response.json();
if (response.ok) {
getWeather(responseResult);
} else {
weatherBlock.innerHTML = responseResult.message;
}
}
function getWeather(data) {
const location = data.name;
const temp = Math.round(data.main.temp);
const feelsLike = Math.round(data.main.feels_like);
const weatherStatus = data.weather[0].main;
const weatherIcon = data.weather[0].icon;
console.log(data);
const template = `
<div class="weather__header">
<div class="weather__main">
<div class="weather__city">${location}</div>
<div class="weather__status">${weatherStatus}</div>
</div>
<div class="weather__icon">
<img src="http://openweathermap.org/img/w/${weatherIcon}.png" alt="${weatherStatus}">
</div>
</div>
<div class="weather__temp">${temp}</div>
<div class="weather__feels-like">Feels like: ${feelsLike}</div>`;
weatherBlock.innerHTML = template;
}
if (loadWeather) {
loadWeather();
}
// Cat
const catBlock = document.querySelector('#cats');
const catBtn = document.querySelector('.cat-button');
const catImg = document.querySelector('.cat-image');
const catServer = 'http://aws.random.cat/meow';
async function catFetch() {
const catResponse = await fetch (catServer);
const catResponseResult = await catResponse.json();
if (catResponse.ok){
renderCat(catResponseResult);
} else {
catBlock.innerHTML = catResponseResult.message;
}
}
function renderCat(catData) {
const catImgUrl = catData.file
catImg.src = catImgUrl;
}
catBtn.addEventListener('click', catFetch);
if (catFetch){
catFetch()
}
full code: https://github.com/antonkornilov-ua/10-09-2022---Weather-and-Cats

Related

issue to load data from a .js file

I am trying to run a web page with that js file, for some reason, it doesn't want to work and when I try the console it says "Live reload enabled". Does anyone know how I can solve that issue?
.
const container = document.querySelector('.datalink')
let listdata = async() => {
let url = ' http://localhost:3000/indusboards';
const urlfetch = await fetch(url);
const urlfound = await urlfetch.json();
let template = '';
datafound.array.forEach(element => {
template += `
<div class ="element">
<p><small>${element.serialno}</small></p>
<p><small>${element.boardtest}</small></p>
<p><small>${element.testresul}</small></p>
<p><small>${element.serialno}</small></p>
<p><small>${element.testdate}</small></p>
<p><small>${element.testtime}</small></p>
<p><small>${element.boardlocation}</small></p>
.... more
</div>
`
})
container.innerHTML = template;
}
window.addEventListener('DOMContentLoarded', () => listdata());
You made a spelling mistake change your code to.
window.addEventListener('DOMContentLoaded', () => listdata());
Instead of
window.addEventListener('DOMContentLoarded', () => listdata());
or you could use
window.onload = () => {
listdata()
}
How about this?
const container = document.querySelector('.datalink')
let listdata = async() => {
let url = ' http://localhost:3000/indusboards';
const urlfetch = await fetch(url);
const urlfound = await urlfetch.json();
let template = '';
datafound.array.forEach(element => {
template += `
<div class ="element">
<p><small>${element.serialno}</small></p>
<p><small>${element.boardtest}</small></p>
<p><small>${element.testresul}</small></p>
<p><small>${element.serialno}</small></p>
<p><small>${element.testdate}</small></p>
<p><small>${element.testtime}</small></p>
<p><small>${element.boardlocation}</small></p>
.... more
</div>
`
})
container.innerHTML = template;
}
window.addEventListener('DOMContentLoaded', () => listdata());

How to run a function which call axios for every 30 seconds

I'm creating a web scraper using node, cheerio and calling the website using axios(async/await). I want the function to run every 30 seconds. I tried using setTimeout and setInterval but did not get the expected result. Instead got heap out of memory error. I want to run the mvcAppointmentSearch function in the while loop for every 30 seconds. Following is the code. also attaching the codepen link for better readability.
Code pen link
const express = require('express');
const request = require('request-promise');
const cheerio = require('cheerio');
const axios = require('axios');
const cssSelect = require('css-select');
const open = require('open');
// const mvcUrl = 'https://telegov.njportal.com/njmvc/AppointmentWizard/17/';
const mvcUrl = 'https://telegov.njportal.com/njmvc/AppointmentWizard/14/';
const mvcLocation = ['Edison', 'Rahway', 'SouthPlainfield'];
// const mvcLocationNumber = ['240', '252', '239'];
const mvcLocationNumber = ['163'];
const requiredMonths = ['September', 'October'];
const callUrl = async (url, locationNumberIndex) => {
try {
const response = await axios.get(url);
//console.log('call url', response.data);
getData(response.data, locationNumberIndex);
} catch (err) {
console.log(err);
}
};
const mvcAppointmentSearch = () => {
for (let i = 0; i < mvcLocationNumber.length; i++) {
const currentUrl = mvcUrl + mvcLocationNumber[i];
console.log(mvcLocationNumber[i]);
callUrl(currentUrl, i);
}
};
const getData = (html, locationNumberIndex) => {
let data = [];
let $ = cheerio.load(html);
console.log('datais ', $);
$.prototype.exists = function (selector) {
return this.find(selector).length > 0;
};
const checkerLength = $('div').exists('.alert-danger');
console.log(checkerLength);
if (checkerLength) {
console.log(
`No appointment available in ${mvcLocation[locationNumberIndex]}`
);
} else {
const dateString = $('.control-label').text();
const availableMonth = dateString.trim().split(' ')[7];
const exactDateAvailability = dateString.slice(24, -1);
console.log(availableMonth);
if (requiredMonths.includes(availableMonth)) {
console.log('Hurray there is an appointment available');
const message = `Appointment available for the location ${mvcLocation[locationNumberIndex]} on ${exactDateAvailability}`;
open(`${mvcUrl + mvcLocationNumber[locationNumberIndex]}`);
console.log(message);
} else {
console.log('required Month is not available still searching');
}
}
};
while (true) {
try {
// mvcAppointmentSearch();
// want to run the following function for every 30 seconds.
mvcAppointmentSearch();
} catch (err) {
console.log(`Error has Occured ${err}`);
}
}

Making a web-crawler to have loop

I tried to make my web-crawler to have a loop to crawl the webpage from 1 to around 500. But the result does not include any directed one but to return an only void array.
This code is based on cheerio, jQuery, and axios. JavaScript.
const axios = require("axios");
const cheerio = require("cheerio");
const log = console.log;
const getHtml = async() => {
var i=0
while (i<493){
try {
return await axios.get("https://playentry.org/ds#!/qna?sort=created&rows=20&page="+i);
} catch (error) {
console.error(error);
}
}
};
getHtml()
.then(html => {
let ulList = [];
const $ = cheerio.load(html.data);
const $bodyList = $("div.discussContentWrapper div.discussListWrapper table.discussList").children("tr.discussRow");
$bodyList.each(function(i, elem){
ulList[i] = {
title:$(this).find('td.discussTitle div.discussTitleWrapper'),
writer:$(this).find('td.discussTitle td.discussViewCount'),
viewcount:$(this).find('td.discussTitle td.discussViewCount'),
likecount:$(this).find('td.discussTitle div.discussLikeCount'),
date:$(this).find('td.discussTitle td.discussDate'),
};
});
const data = ulList.filter(n => n.title);
return data;
})
.then(res => log(res));
The output is '''[]''' or '''[ [] ]''' with no real outputs.
Thanks for your help in advance.

Fetch API Download Progress Indicator?

I am trying to capture the download progress of a Fetch request and use that to change the width of a progress bar. I looked at ProgressEvent.lengthComputable as a potential solution but unsure if this can be used with the Fetch API.
without checking for errors (as in try/catch etc...)
const elStatus = document.getElementById('status');
function status(text) {
elStatus.innerHTML = text;
}
const elProgress = document.getElementById('progress');
function progress({loaded, total}) {
elProgress.innerHTML = Math.round(loaded/total*100)+'%';
}
async function main() {
status('downloading with fetch()...');
const response = await fetch('https://fetch-progress.anthum.com/30kbps/images/sunrise-baseline.jpg');
const contentLength = response.headers.get('content-length');
const total = parseInt(contentLength, 10);
let loaded = 0;
const res = new Response(new ReadableStream({
async start(controller) {
const reader = response.body.getReader();
for (;;) {
const {done, value} = await reader.read();
if (done) break;
loaded += value.byteLength;
progress({loaded, total})
controller.enqueue(value);
}
controller.close();
},
}));
const blob = await res.blob();
status('download completed')
document.getElementById('img').src = URL.createObjectURL(blob);
}
main();
<div id="status"> </div>
<h1 id="progress"> </h1>
<img id="img" />
adapted from here
Using this utility:
async function* streamAsyncIterable(stream) {
const reader = stream.getReader()
try {
while (true) {
const { done, value } = await reader.read()
if (done) return
yield value
}
} finally {
reader.releaseLock()
}
}
See: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of#iterating_over_async_generators
Then you can use for await...of loop:
const response = await fetch(url)
let responseSize = 0
for await (const chunk of streamAsyncIterable(response.body)) {
responseSize += chunk.length
}
But be aware that responseSize is response-size! Not necessarily download-size! What is the difference? There is no difference if there is no content-encoding (gzip, br, ...). But if a comperession was applied, final download-size will be the size of compressed data (the same content-length), and final response-size will be the size of uncompressed data.
See #ecthiender comment and this thread.
you can use axios instead
import axios from 'axios'
export async function uploadFile(file, cb) {
const url = `//127.0.0.1:4000/profile`
try {
let formData = new FormData()
formData.append("avatar", file)
const data = await axios.post(url, formData, {
onUploadProgress: (progressEvent) => {
console.log(progressEvent)
if (progressEvent.lengthComputable) {
let percentComplete = progressEvent.loaded / progressEvent.total;
if (cb) {
cb(percentComplete)
}
}
}
})
return data
} catch (error) {
console.error(error)
}
}

Creating list items after GET request is complete

I'm trying to figure out a more efficient away to create the list items in the DOM.
At the moment the list is created as each API request is made.
I'm pushing each object into its own Array, I would like to create the list once all the data has loaded.
Additionally i'm using Webpack and Babel.
let streamApi = 'https://wind-bow.glitch.me/twitch-api/streams/';
let twitchUsers = ['ESL_SC2', 'OgamingSC2', 'freecodecamp', 'noobs2ninjas', 'comster404'];
let streamByUser = [];
window.onload = function() {
//Make a API request for each user and store in an array
twitchUsers.map((user) => {
fetch(streamApi + user, {method: 'GET'})
.then(response => response.json())
.then(json => {
streamByUser.push(json);
let uL = document.getElementById("user-list");
let listItem = document.createElement("li");
listItem.className = "list-group-item";
if (json.stream === null) {
listItem.innerHTML = "null";
} else {
listItem.innerHTML = json.stream.channel.display_name;
}
uL.appendChild(listItem);
});
});
};
UPDATE:
All is working!
Not tested but I hope it should work as expected.
const streamApi = "https://wind-bow.glitch.me/twitch-api/streams/";
const twitchUsers = [
"ESL_SC2",
"OgamingSC2",
"freecodecamp",
"noobs2ninjas",
"comster404"
];
const twitchUsersStreams = twitchUsers.map(user =>
fetch(streamApi + user, { method: "GET" }).then(res => res.json())
);
let streamByUser = [];
window.onload = function() {
Promise
.all(twitchUsersStreams)
.then(everythingArray => {
//do something with everythingArray after all the requests resolved
})
.catch(err => {
// As soon as any of the 'fetch' results in promise rejection
});
};
I would probably do something like this because I really like to decompose a task into small functions that reduce the need for inline comments and keep mutable state to a minimum.
const streamApi = 'https://wind-bow.glitch.me/twitch-api/streams/';
const twitchUsers = ['ESL_SC2', 'OgamingSC2', 'freecodecamp', 'noobs2ninjas', 'comster404'];
window.onload = async function () {
const list = document.getElementById("user-list");
const addToList = list.appendChild.bind(list);
const twitchStreams = await fetchUsers(twitchUsers);
twitchStreams.map(toListItem).forEach(addToList);
};
async function fetchUser(user) {
const response = await fetch(`${streamApi}${user}`, {method: 'GET'});
return response.json();
}
function fetchUsers(users) {
return Promise.all(users.map(fetchUser));
}
function toListItem(user) {
const listItem = document.createElement("li");
listItem.className = "list-group-item";
listItem.innerHTML = user.stream !== null
? user.stream.channel.display_name
: "null";
return listItem;
}

Categories

Resources