I get the src code for image from PHP and after show at the HTML but the images are shown with a delay. How can I show the images all together without reordering after all loaded?
HTML
<div id="keys"></div>
JS
function sendGETDataToServer() {
// Set up our HTTP request
var xhr = new XMLHttpRequest();
// Setup our listener to process completed requests
xhr.onreadystatechange = function () {
// Only run if the request is complete
if (xhr.readyState !== 4) return;
// Process our return data
if (xhr.status >= 200 && xhr.status < 300) {
var JsonResponse = xhr.responseText;
var response = JSON.parse(JsonResponse);
console.log(response);
if (response[0] == "200 OK") {
var i;
for (i = 0; i < response[1]; i++) {
let img = document.createElement("img");
img.src = response[2 + i].FilePath;
img.setAttribute("style", "width:8%; padding-left:1em; float:left");
$("#keys").append(img);
}
}
} else {
console.log("error", xhr);
}
};
xhr.open("GET", "load.php");
xhr.send();
}
Your code should look like something like that
if (response[0] == '200 OK') {
var i;
var nbLoaded = 0;
var nbToLoad = 0;
var pendingImages = [];
for (i = 0; i < response[1]; i++) {
nbToLoad++;
let img = document.createElement('img');
img.src = response[2 + i].FilePath;
img.setAttribute('style', 'width:8%; padding-left:1em; float:left');
img.onload = () => {
nbLoaded++;
if (nbLoaded === nbToLoad) {
pendingImages.forEach((image) => {
$('#keys').append(image);
});
}
};
pendingImages.push(img);
}
}
The short answer to your problem: There isn't really one. There is not a way to know what order your images will arrive in, and you certainly cannot control the delay. What you can control is the order you show them in. The simplest way to do this is to wait until you have received all of your images, and while you are receiveing them, you store them in an array somewhere. After you receive the final image, you could simply order the array the way you want your images to be displayed, and run through it to actually put the images on the page. If you need any help with the specifics, please feel free to ask!
Related
I'm trying to valdiate a multiple image upload using jQuery. The validation proccess get the images dimensions and check if it is greater than 338x450. But I know very little about javascript. Can someone help me? Here's the code I'm trying:
HTML:
<input class="form-control" id="ads-photos" name="ads-photos[]" type="file" accept="image/jpeg, image/jpg" multiple />
javascript
var isFilesOk;
$('#ads-photos').change(function (evt) {
validateDimensions();
});
function validateDimensions() {
var fi = document.getElementById('ads-photos');
if (fi.files.length > 0) {
for (var i = 0; i <= fi.files.length - 1; i++) {
var fileName, fileExtension, fileSize, fileType, dateModified;
fileName = fi.files.item(i).name;
fileExtension = fileName.replace(/^.*\./, '');
if (fileExtension == 'jpg' || fileExtension == 'jpeg') {
if (readImageFile(fi.files.item(i))) {
alert(isFilesOk);
} else {
alert(isFilesOk);
}
}
}
}
// GET THE IMAGE WIDTH AND HEIGHT USING fileReader() API.
function readImageFile(file) {
var reader = new FileReader(); // CREATE AN NEW INSTANCE.
reader.onload = function (e) {
var img = new Image();
img.src = e.target.result;
img.onload = function () {
var w = this.width;
var h = this.height;
if (this.width >= 338 && this.height >= 450) {
isFilesOk = true;
} else {
isFilesOk = false;
}
}
};
reader.readAsDataURL(file);
return isFilesOk;
}
}
A example link
The above code needs analyze all the images sended by the user to know if they all have minimum required dimensions. But I'm not getting it to work.
The issue here is that the readImageFile method is asynchronous. The work it's doing happens later, after it's executed when the onload event is triggered on the file.
The best solution will depend a little on the environment you're targeting (eg: whether you need to support old IE, etc), but if you're able to use modern JS with Promises, this will be easier.
Basically, for each file that's selected, you want to get a Promise indicating whether the file is okay or not. A good way to do this is map the array to an array of Promises and use Promise.all to get a callback when they're all done.
Here's the method to get that for a single file:
function fileIsValid(file) {
return new Promise((resolve) => {
var reader = new FileReader(); // CREATE AN NEW INSTANCE.
reader.onload = function (e) {
var img = new Image();
img.src = e.target.result;
img.onload = function () {
var w = this.width;
var h = this.height;
const isValid = w >= 338 && h >= 450;
resolve(isValid);
};
};
reader.readAsDataURL(file);
});
}
And then using this method:
const fi = document.getElementById('ads-photos');
const promises = [];
for (let i = 0; i <= fi.files.length - 1; i++) {
const file = fi.files.item(i);
if (/\.jpe?g$/.test(file.name)) {
promises.push(fileIsValid(file));
}
}
Promise.all(promises).then((results) => {
// results is an array of booleans
const allValid = results.every(Boolean);
if (allValid) {
alert('Yeah!');
} else {
alert('Nope');
}
});
There's a few things to note here:
you could make it shortcut once a single error is found by making the promise reject if it's invalid, and Promise.all then won't wait for any other pending promises. This is a bit of a style question about whether failed validation should result in a rejected promise. Up to you on that. In this case, it's reading from a local file system and I presume they'll be selecting a relatively small number of files, so short-circuiting probably won't make any noticeable difference.
it doesn't handle actual errors (eg: what if a file is unable to be loaded?).
I'm working on a project to make sure that users finish a video. I would like to have it just add something like "user has finished video" to an already existing text file.
Here is what I have in my JavaScript file.
var video = document.getElementById("video");
var timeStarted = -1;
var timePlayed = 0;
var duration = 0;
// If video metadata is laoded get duration
if (video.readyState > 0)
getDuration.call(video);
//If metadata not loaded, use event to get it
else {
video.addEventListener('loadedmetadata', getDuration);
}
// remember time user started the video
function videoStartedPlaying() {
timeStarted = new Date().getTime() / 1000;
}
function videoStoppedPlaying(event) {
// Start time less then zero means stop event was fired vidout start event
if (timeStarted > 0) {
var playedFor = new Date().getTime() / 1000 - timeStarted;
timeStarted = -1;
// add the new ammount of seconds played
timePlayed += playedFor;
}
document.getElementById("played").innerHTML = Math.round(timePlayed) + "";
// Count as complete only if end of video was reached
if (timePlayed >= duration && event.type == "ended") {
document.getElementById("status").className = "complete";
}
}
function getDuration() {
duration = video.duration;
document.getElementById("duration").appendChild(new Text(Math.round(duration) + ""));
console.log("Duration: ", duration);
}
video.addEventListener("play", videoStartedPlaying);
video.addEventListener("playing", videoStartedPlaying);
video.addEventListener("ended", videoStoppedPlaying);
video.addEventListener("pause", videoStoppedPlaying);
var data = "This user has finished the video";
var url = "data.php";
var http = new XMLHttpRequest();
http.open("POST", url, true);
//sends hearder info along with the request
http.setRequestHeader("content-type", "application/x-www-form-urlencoded");
http.onreadystatechange = function() {
if (http.readyState == 4 && http.status == 200) {
alert(http.responseText);
}
}
http.send(data);
and Data.php has
<?php
$data = $_POST['data'];
$file = fopen('names.txt', 'a');
fwrite($file, $data);
fclose($file);
?>
As of now, there are no errors in the console, but it does not write the data to the text file.
Please let me know what i'm doing wrong
Since you are using http.setRequestHeader("content-type", "application/x-www-form-urlencoded");, the request expects the data to be formatted like serialized HTML form data. Change the following line to provide the data in the proper format:
var data = "data=This%20user%20has%20finished%20the%20video";
I've a piece of JavaScript that gets called when a button is clicked.
The function manipulates some DOM (makes the page opaque, reveals an animation) and then loops through AJAX calls to 'script.php' many times. When that's over it reloads the page, fresh.
My issues is that on Chrome and IE, for the life of me, I can't get the DOM modifications to happen before the AJAX runs and completes. It works fine on Firefox.
I've tried calling sequentially in the code. Nesting each part in a function and in a third, calling each in order.
I've tried to use a promise. Everytime, the AJAX runs and completes, the screen flickers with the DOM mods just before the page reloads.
How is this usually managed?
The problem in a nutshell.
What are some ways I can force Chrome (and Edge) to update the DOM first and then run the AJAX rather than the other way around.
My code is here:
function Backup(OrgRowIDs) {
document.getElementById('Overlay').style.visibility="visible";
document.getElementById('Overlay').style.display="block";
document.getElementById('ActionWindow').style.visibility="visible";
document.getElementById('ActionWindow').style.display="inline";
function GenerateBackupID () {
var Characters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789';
var ID = '';
for (var i = 1; i <= 12; i++) {
var pos = Math.floor((Math.random() * 60) + 1)
ID = ID + Characters.substring(pos, pos + 1);
}
return ID;
}
function RecordBackupSessionID() {
var xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function() {
if (this.readyState == 4 && this.status == 200) {
return;
}
}
xhttp.open("GET", "../AJAX/RecordBackupID?backupfile=" + OrgRowIDs[i] + "&backupid=" + BID , false);
xhttp.send();
}
for (i = 0; i < OrgRowIDs.length; i++){
BID = GenerateBackupID();
RecordBackupSessionID();
function BackupFiles(i) {
for (j = 0; j < 20; j++) {
function BackupEndPoints(j) {
var xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function() {
if (this.readyState == 4 && this.status == 200) {
//do something if we want.
}
}
xhttp.open("GET", "../AJAX/BackupFiles?backupfile=" + OrgRowIDs[i] + "&endpoint=" + j + "&backupid=" + BID , false);
xhttp.send();
}
BackupEndPoints(j)
}
}
BackupFiles(i)
}
location.reload();
}
Since my main language is C, I am used to pointers and I love them. Now I have some project which I need to finish in Javascript and I've got a problem which I don't know how to solve.
I want to store the value of a variable which I got from GET request. I have a script to send GET to PHP page, which then sends GET to my daemon written in C. When I get the string I wanted, I use length to measure the size of the string I got and in next GET request I want to send that number of bytes I got as the URL parameter.
window.onload = function() {
if (bytes === undefined) {
var bytes = 0;
}
var url = "/test/log.php?q=" + bytes;
function httpGet(url) {
var xhttp = new XMLHttpRequest();
xhttp.open("GET", url, true);
xhttp.onload = function(e) {
if (xhttp.readyState === 4) {
if (xhttp.status === 200) {
console.log(xhttp.responseText);
var option = "";
obj = JSON.parse(xhttp.responseText);
for (var key in obj) {
option += obj[key];
}
document.getElementById("list").innerHTML = asdf;
bytes = option.length;
}
};
xhttp.onerror = function(e) {
console.error(xhttp.statusText);
}
};
xhttp.send();
}
var updateInterval = 2000;
function update() {
httpGet(url);
setTimeout(update, updateInterval);
}
update();
}
So, the focus is on the variable bytes. It should have the value 0 when the script is a first time called, and after every loop (it loops every 2 seconds, I didn't show the loop in the code) it should have the value of the previous length of received string.
You just need to make sure to add the bytes param onto your url in a way that changes with each call rather than just once at page load when it will always be 0.
window.onload = function() {
if (bytes === undefined) {
var bytes = 0;
}
var url = "/test/log.php?q=";
function httpGet(url) {
var xhttp = new XMLHttpRequest();
xhttp.open("GET", url, true);
xhttp.onload = function(e) {
if (xhttp.readyState === 4) {
if (xhttp.status === 200) {
console.log(xhttp.responseText);
var option = "";
obj = JSON.parse(xhttp.responseText);
for (var key in obj) {
option += obj[key];
}
document.getElementById("list").innerHTML = asdf;
bytes = option.length;
}
};
xhttp.onerror = function(e) {
console.error(xhttp.statusText);
}
};
xhttp.send();
}
var updateInterval = 2000;
function update() {
httpGet(url + bytes);
setTimeout(update, updateInterval);
}
update();
}
Instead of a fixed value of url make it to a function and it will give you always the current Url with the modified version of bytes, if you modify it
You have only to change this parts
var url = ...
// to
function getUrl() {
return "/test/log.php?q=" + bytes;
}
...
// and
xhttp.open("GET", url, true);
// to
xhttp.open("GET", getUrl(), true);
I'd declare the variable in a context that doesn't empty its value when the function is called. So, you can declare your variable "bytes" before the function, and then looping through that function. In this case, the variable will hold the last value until you overwrite it.
That should work!
What's wrong with this code:
var images = [];
function getImages() {
var st = true;
var i = 1;
var url;
var ob;
while(st) {
if(i < 10) {
url = "http://rachel-b.org/gallery/albums/Events/2012/May%2008%20-%20Rachel%20Bilson%20Celebrates%20Edie%20Rose%20Home%20Collection/thumb_00" + i + ".jpg";
ob = new Image();
ob.src = url;
st = checkIfImageExists(ob);
images.push(ob);
}
if(i >= 10 && i < 100) {
url = "http://rachel-b.org/gallery/albums/Events/2012/May%2008%20-%20Rachel%20Bilson%20Celebrates%20Edie%20Rose%20Home%20Collection/thumb_0" + i + ".jpg";
ob = new Image();
ob.src = url;
st = checkIfImageExists(ob);
images.push(ob);
}
if(i >= 100) {
url = "http://rachel-b.org/gallery/albums/Events/2012/May%2008%20-%20Rachel%20Bilson%20Celebrates%20Edie%20Rose%20Home%20Collection/thumb_" + i + ".jpg";
ob = new Image();
ob.src = url;
st = checkIfImageExists(ob);
images.push(ob);
}
i++;
}
}
function checkIfImageExists(o) {
var e = document.createElement("img");
e.style.display = "none";
document.getElementsByTagName("body")[0].appendChild(e);
e.src = o.src;
var res = e.width;
document.getElementsByTagName("body")[0].removeChild(e);
console.log(res);
if(res === 0) {
return false;
} else {
return true;
}
}
getImages();
function outPut() {
for(var i = 0; i < images.length; i++) {
console.log(images[i]);
}
}
outPut();
​DEMO
Why doesn't it output all array elements at once?
At the same time, every time I press run button, it outputs n+1 array elements. How come?
Here is my rewrite
DEMO stops when there are no more images found - in this case at 106 images
var images =[];
var baseUrl = "http://rachel-b.org/gallery/albums/Events/2012/May%2008%20-%20Rachel%20Bilson%20Celebrates%20Edie%20Rose%20Home%20Collection/thumb_";
function pad(num) {
var str = "00"+num;
return str.substring(str.length-3);
}
function output() {
for (var i=0;i<images.length;i++) {
console.log(images[i].src)
}
}
function getImages(){
var ob = new Image();
var url = baseUrl+pad(images.length+1)+".jpg"
ob.onload=function() {
images.push(ob);
getImages();
}
ob.onerror=function() {
output();
}
ob.src= url;
}
getImages();
Your checkIfImageExists() function is the cause. I understand what you are trying to accomplish but you are checking for a width without giving the image time to load. You should bind to onload and onerror for the image object. (See http://lucassmith.name/2008/11/is-my-image-loaded.html for more details).
If you really want these images to load asynchronously, this while loop is very dangerous, you'd be better off with already knowing the top limit rather than trying to guess. If you do want async and have an unknown max...then you should chunk into a limited amount of images at once to load (as well as add some type of setInterval. Otherwise, with this setup, by the time your server returns a 404 for an image, your script would have already tried to load several thousand more invalid images.
If you need a code example let me know but this should at least point you in the right direction.