Failed cross domain xml Get Request - javascript

Im trying to help a friend out with a program but my coding experience is somewhat dated (10 years give or take). Where trying to pull data from a database via their API. Im making this request via a XMLhttpRequest but im having issues even getting to the Server.
The error that occurs:
Failed to load resource: the server responded with a status of 404 (Not Found)
http://localhost/jasper/api.shiftbase.com/api/rosters?min_date=2020-07-13&max_date=2020-12-31&department_id=24477
Its trying to search for the url on my own domain. But I need it to search cross domain.
The entire function:
function getRequest(){
var _request = new XMLHttpRequest();
var key = myKeyHere;
var url = "api.shiftbase.com/api/rosters?min_date=2020-07-13&max_date=2020-12-31&department_id=24477";
_request.onreadystatechange = function(event){
console.log(_request.readyState + " + " + _request.status);
if (_request.readyState == 4){
if ((_request.status >= 200 && _request.status < 300) || _request.status == 304){
alert(_request.responseText);
} else {
alert('Request was unsucceful: ' + _request.status);
}
}
};
_request.open("get", url, true);
_request.setRequestHeader("Accept", "application/JSON", false);
_request.setRequestHeader("Content-Type", "application/JSON", false);
_request.setRequestHeader("Authorization", key, false);
_request.send(null);
};
I've also read a lot about CORS and how this affects these kinds of requests cross domain, but i don't quite understand how it works and how i can work arround it.
Any help is appreciated.

First of all you need to prefix your URL with https://. This way you make a request to the external server instead of localhost. The second thing is that in your headers the application/JSON should be application/json.
Also dont forget that your key needs to be prefixed with "API". Example API [some_random_key]
I've tested the following code and it worked. You just have to add your own API KEY.
function getRequest(){
var _request = new XMLHttpRequest();
var key = "API [replace_this_with_your_key]"; // Example: "API a1b2c3d4e5f6g7h8i9"
var url = "https://api.shiftbase.com/api/rosters?min_date=2020-07-13&max_date=2020-12-31&department_id=24477";
_request.onreadystatechange = function(event){
console.log(_request.readyState + " + " + _request.status);
if (_request.readyState == 4){
if ((_request.status >= 200 && _request.status < 300) || _request.status == 304){
alert(_request.responseText);
} else {
alert('Request was unsucceful: ' + _request.status);
}
}
};
_request.open("get", url, true);
_request.setRequestHeader("Accept", "application/json", false);
_request.setRequestHeader("Content-Type", "application/json", false);
_request.setRequestHeader("Authorization", key, false);
_request.send(null);
};

Try to add // at the start of the line in url variable declaration (line 4), if you want to make a request to the external server.
Your request was made to the local webserver http://localhost/jasper/... and you've received a 404 (not found) error.

Related

XMLHttpRequest endpoint blocked while signing S3 request because no HTTPS, eventhough everything is on HTTPS

I am trying to sign a huge video upload, because I want to upload it directly to S3. It works on localhost, but on my live site it fails to sign the request because of:
Mixed Content: The page at 'https://www.example.com/profile' was loaded
over HTTPS, but requested an insecure XMLHttpRequest endpoint
'http://www.example.com/sign_s3/?file_name=17mbvid.mp4&file_type=video/mp4'.
This request has been blocked; the content must be served over HTTPS.
I am hosting everything on heroku, every page is already using HTTPS and its not possible to open it in HTTP, because I redirect all traffic to HTTPS. I am using the letsencrypt SSL certificate.
So far I have no idea where to look, the only information I found, is that I need a valid SSL certificate, which I have.
Here is the JS function:
function getSignedRequest(file) {
var xhr = new XMLHttpRequest();
xhr.open("GET", "/sign_s3?file_name=" + file.name + "&file_type=" + file.type);
xhr.onreadystatechange = function() {
if (xhr.readyState === 4) {
if (xhr.status === 200) {
console.log('got signed request');
var response = JSON.parse(xhr.responseText);
console.log(response);
console.log('uploadFile', response.url)
uploadFile(file, response.data, response.url);
} else {
console.log("Could not get signed URL.");
}
}
};
//console.log('send');
xhr.send();
}
Right after the error in the console I see this console log:
Could not get signed URL.
which means it fails here:
if (xhr.status === 200)
On the server:
#app.route('/sign_s3/', methods=["GET", "POST"])
#login_required
#check_confirmed
def sign_s3():
if "localhost" in request.url_root:
file_name = str(current_user.id) + "local-profil-video." + request.args.get('file_name').split(".")[-1]
else:
file_name = str(current_user.id) + "-profil-video." + request.args.get('file_name').split(".")[-1]
file_type = request.args.get('file_type')
session = boto3.Session(
aws_access_key_id=app.config['MY_AWS_ID'],
aws_secret_access_key=app.config['MY_AWS_SECRET'],
region_name='eu-central-1'
)
s3 = session.client('s3')
presigned_post = s3.generate_presigned_post(
Bucket = 'adultpatreon',
Key = 'videos/' + file_name,
Fields = {"acl": "public-read", "Content-Type": file_type},
Conditions = [
{"acl": "public-read"},
{"Content-Type": file_type}
],
ExpiresIn = 3600
)
if current_user.profile_video != None:
delete_file_from_aws("videos/", current_user.profile_video)
setattr(current_user, "profile_video", file_name)
db_session.commit()
return json.dumps({'data': presigned_post, 'url': 'https://s3.eu-central-1.amazonaws.com/mybucket/' + 'videos/' + file_name})
After many hours of researching I decided to rebuild this function and use AJAX get, which I am more familiar with. I also changed the way I pass/recieve the query string arguments to the best way, which is actually used in flask/python.
function getSignedRequest(file) {
$.ajax({
url : "/sign_s3/" + file.name + "/" + file.type,
type : "get",
success : function(response) {
console.log("success file up, follow", response);
var json_response = JSON.parse(response);
console.log(json_response);
uploadFile(file, json_response.data, json_response.url);
},
error : function(xhr) {
console.log("file up failed", xhr);
}
});
}
And on server side I changed how file.name and file.type are recieved:
# Sign request for direct file upload through client for video
#app.route('/sign_s3/<path:file_name_data>/<path:file_type_data>', methods=["GET", "POST"])
#login_required
#check_confirmed
def sign_s3(file_name_data, file_type_data):
#etc...
Now it works perfectly. I think they way I was recieving the query string arguments on the server was not correct, probably it would also work with the old getSignedRequest function (untested).

Download Google Drive file from url and send with post to backend

I am trying to download a file from an url (chrome drive file) in javascript; and want to send it to my backend (php - laravel).
var url = file.downloadUrl !== undefined ? file.webContentLink : file.exportLinks['application/pdf'];
console.log(url) // if I go to url, it downloads the file
if (url !== undefined) {
var remote = new XMLHttpRequest();
remote.open('GET', url);
remote.setRequestHeader('Authorization', 'Bearer ' + gapi.client.getToken().access_token);
remote.setRequestHeader('Access-Control-Allow-Origin', '*');
remote.onload = function(e) {
vm.handle_download(remote.responseText, file, 200); // do something with the fetched content;
};
remote.onerror = function(e) {
vm.handle_download('error response', null, remote.statusText);
};
remote.send();
} else vm.handle_download('no downloadable url', {
file: null,
status: 'error'
});
and on handle
handle_download: function (content, file, status) {
if (status !== 200) {
console.log('error occured on status')
return;
}
}
Failed to load https://drive.google.com/uc?id=1D12321ofd4CNG-m9_Mp4aiDcnibNf&export=download: Response to preflight request doesn't pass access control check: No 'Access-Control-Allow-Origin' header is present on the requested resource. Origin 'http://test.dev' is therefore not allowed access.
This is an intended behavior due to same origin policy in the web. Since you're doing this for testing purposes, try this Allow-Control-Allow-Origin chrome extension.
You can read more about how to implement this in Using CORS tutorial.
This SO post may also offer additional insight.

Cross Origin Resource Sharing Issue with jQuery on https secure url?

I'm having a slight issue with a cross site origin request. I'm sure it is a simple fix.
Console error:
XMLHttpRequest cannot load https://subdomain.example.com/social/disqus. Response to preflight request doesn't pass access control check: No 'Access-Control-Allow-Origin' header is present on the requested resource. Origin 'https://www.example.com' is therefore not allowed access.
js script causing the issue:
window.onload = function(){
//jQuery AJAX GET Method on Disqus Threads
$.ajax({
type: 'GET',
url: 'https://subdomain.example.com/social/disqus',
contentType: 'application/json; charset=utf-8',
success: function(threads) {
var len = Object.keys(threads.response).length
for (i = 0; i < len; i++){
if (threads.response[i].posts == 0 || threads.response[i].posts != 1) {
$('#' + threads.response[i].identifiers).html(threads.response[i].posts + " Comments ");
} else {
$('#' + threads.response[i].identifiers).html(threads.response[i].posts + " Comment ");
}
}
},
error: function() {
console.log("Aw, snap!");
}
});
};
I'm forcing redirects in Apache for both - this may be an issue, but it looks like the CORS request is being fired from a https:// valid site to another https:// valid site... url in the ajax request is definitely https.
I'm wondering if I am missing something from $.ajax ?

Using Javascript to add custom http header and trigger file download

I would like to start a simple file download through the browser, however an access token must be passed with a custom HTTP header:
GET https://my.site.com/some/file
Authorization: access_token
How can I inject the Authorization: header following the site URL?
I know it's possible to do that using query string, but I want to do it using headers.
I'm familiar with XMLHttpRequest, but as far as I understand it does not trigger download, it only reads content and the file I want to download is few hundred MBs at least.
xhr.setRequestHeader('Authorization', 'access_token');
This looks like a simple task, but I'm inexperienced coder so any help would be nice. Thanks.
I think this solves your problem:
function toBinaryString(data) {
var ret = [];
var len = data.length;
var byte;
for (var i = 0; i < len; i++) {
byte=( data.charCodeAt(i) & 0xFF )>>> 0;
ret.push( String.fromCharCode(byte) );
}
return ret.join('');
}
var xhr = new XMLHttpRequest;
xhr.open( "GET", "https://my.site.com/some/file" );
xhr.addEventListener( "load", function(){
var data = toBinaryString(this.responseText);
data = "data:application/text;base64,"+btoa(data);
document.location = data;
}, false);
xhr.setRequestHeader("Authorization", "access_token" );
xhr.overrideMimeType( "application/octet-stream; charset=x-user-defined;" );
xhr.send(null);
Modified answer https://stackoverflow.com/a/10518190/2767026 to fit your needs.
https://stackoverflow.com/a/12372670/1961561 could be a solution for your problem.
$.ajax({
url: "/test",
headers: {"X-Test-Header": "test-value"}
});
It is not possible to add custom http header when you download a file by clicking on a link.
However, in your use case, you might store the token in a cookie, which will be automatically added to all browser requests.

Is it possible to ping a server from Javascript?

I'm making a web app that requires that I check to see if remote servers are online or not. When I run it from the command line, my page load goes up to a full 60s (for 8 entries, it will scale linearly with more).
I decided to go the route of pinging on the user's end. This way, I can load the page and just have them wait for the "server is online" data while browsing my content.
If anyone has the answer to the above question, or if they know a solution to keep my page loads fast, I'd definitely appreciate it.
I have found someone that accomplishes this with a very clever usage of the native Image object.
From their source, this is the main function (it has dependences on other parts of the source but you get the idea).
function Pinger_ping(ip, callback) {
if(!this.inUse) {
this.inUse = true;
this.callback = callback
this.ip = ip;
var _that = this;
this.img = new Image();
this.img.onload = function() {_that.good();};
this.img.onerror = function() {_that.good();};
this.start = new Date().getTime();
this.img.src = "http://" + ip;
this.timer = setTimeout(function() { _that.bad();}, 1500);
}
}
This works on all types of servers that I've tested (web servers, ftp servers, and game servers). It also works with ports. If anyone encounters a use case that fails, please post in the comments and I will update my answer.
Update: Previous link has been removed. If anyone finds or implements the above, please comment and I'll add it into the answer.
Update 2: #trante was nice enough to provide a jsFiddle.
http://jsfiddle.net/GSSCD/203/
Update 3: #Jonathon created a GitHub repo with the implementation.
https://github.com/jdfreder/pingjs
Update 4: It looks as if this implementation is no longer reliable. People are also reporting that Chrome no longer supports it all, throwing a net::ERR_NAME_NOT_RESOLVED error. If someone can verify an alternate solution I will put that as the accepted answer.
Ping is ICMP, but if there is any open TCP port on the remote server it could be achieved like this:
function ping(host, port, pong) {
var started = new Date().getTime();
var http = new XMLHttpRequest();
http.open("GET", "http://" + host + ":" + port, /*async*/true);
http.onreadystatechange = function() {
if (http.readyState == 4) {
var ended = new Date().getTime();
var milliseconds = ended - started;
if (pong != null) {
pong(milliseconds);
}
}
};
try {
http.send(null);
} catch(exception) {
// this is expected
}
}
you can try this:
put ping.html on the server with or without any content, on the javascript do same as below:
<script>
function ping(){
$.ajax({
url: 'ping.html',
success: function(result){
alert('reply');
},
error: function(result){
alert('timeout/error');
}
});
}
</script>
You can't directly "ping" in javascript.
There may be a few other ways:
Ajax
Using a java applet with isReachable
Writing a serverside script which pings and using AJAX to communicate to your serversidescript
You might also be able to ping in flash (actionscript)
You can't do regular ping in browser Javascript, but you can find out if remote server is alive by for example loading an image from the remote server. If loading fails -> server down.
You can even calculate the loading time by using onload-event. Here's an example how to use onload event.
Pitching in with a websocket solution...
function ping(ip, isUp, isDown) {
var ws = new WebSocket("ws://" + ip);
ws.onerror = function(e){
isUp();
ws = null;
};
setTimeout(function() {
if(ws != null) {
ws.close();
ws = null;
isDown();
}
},2000);
}
Update: this solution does not work anymore on major browsers, since the onerror callback is executed even if the host is a non-existent IP address.
To keep your requests fast, cache the server side results of the ping and update the ping file or database every couple of minutes(or however accurate you want it to be). You can use cron to run a shell command with your 8 pings and write the output into a file, the webserver will include this file into your view.
The problem with standard pings is they're ICMP, which a lot of places don't let through for security and traffic reasons. That might explain the failure.
Ruby prior to 1.9 had a TCP-based ping.rb, which will run with Ruby 1.9+. All you have to do is copy it from the 1.8.7 installation to somewhere else. I just confirmed that it would run by pinging my home router.
There are many crazy answers here and especially about CORS -
You could do an http HEAD request (like GET but without payload).
See https://ochronus.com/http-head-request-good-uses/
It does NOT need a preflight check, the confusion is because of an old version of the specification, see
Why does a cross-origin HEAD request need a preflight check?
So you could use the answer above which is using the jQuery library (didn't say it) but with
type: 'HEAD'
--->
<script>
function ping(){
$.ajax({
url: 'ping.html',
type: 'HEAD',
success: function(result){
alert('reply');
},
error: function(result){
alert('timeout/error');
}
});
}
</script>
Off course you can also use vanilla js or dojo or whatever ...
If what you are trying to see is whether the server "exists", you can use the following:
function isValidURL(url) {
var encodedURL = encodeURIComponent(url);
var isValid = false;
$.ajax({
url: "http://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20html%20where%20url%3D%22" + encodedURL + "%22&format=json",
type: "get",
async: false,
dataType: "json",
success: function(data) {
isValid = data.query.results != null;
},
error: function(){
isValid = false;
}
});
return isValid;
}
This will return a true/false indication whether the server exists.
If you want response time, a slight modification will do:
function ping(url) {
var encodedURL = encodeURIComponent(url);
var startDate = new Date();
var endDate = null;
$.ajax({
url: "http://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20html%20where%20url%3D%22" + encodedURL + "%22&format=json",
type: "get",
async: false,
dataType: "json",
success: function(data) {
if (data.query.results != null) {
endDate = new Date();
} else {
endDate = null;
}
},
error: function(){
endDate = null;
}
});
if (endDate == null) {
throw "Not responsive...";
}
return endDate.getTime() - startDate.getTime();
}
The usage is then trivial:
var isValid = isValidURL("http://example.com");
alert(isValid ? "Valid URL!!!" : "Damn...");
Or:
var responseInMillis = ping("example.com");
alert(responseInMillis);
const ping = (url, timeout = 6000) => {
return new Promise((resolve, reject) => {
const urlRule = new RegExp('(https?|ftp|file)://[-A-Za-z0-9+&##/%?=~_|!:,.;]+[-A-Za-z0-9+&##/%=~_|]');
if (!urlRule.test(url)) reject('invalid url');
try {
fetch(url)
.then(() => resolve(true))
.catch(() => resolve(false));
setTimeout(() => {
resolve(false);
}, timeout);
} catch (e) {
reject(e);
}
});
};
use like this:
ping('https://stackoverflow.com/')
.then(res=>console.log(res))
.catch(e=>console.log(e))
I don't know what version of Ruby you're running, but have you tried implementing ping for ruby instead of javascript? http://raa.ruby-lang.org/project/net-ping/
let webSite = 'https://google.com/'
https.get(webSite, function (res) {
// If you get here, you have a response.
// If you want, you can check the status code here to verify that it's `200` or some other `2xx`.
console.log(webSite + ' ' + res.statusCode)
}).on('error', function(e) {
// Here, an error occurred. Check `e` for the error.
console.log(e.code)
});;
if you run this with node it would console log 200 as long as google is not down.
You can run the DOS ping.exe command from javaScript using the folowing:
function ping(ip)
{
var input = "";
var WshShell = new ActiveXObject("WScript.Shell");
var oExec = WshShell.Exec("c:/windows/system32/ping.exe " + ip);
while (!oExec.StdOut.AtEndOfStream)
{
input += oExec.StdOut.ReadLine() + "<br />";
}
return input;
}
Is this what was asked for, or am i missing something?
just replace
file_get_contents
with
$ip = $_SERVER['xxx.xxx.xxx.xxx'];
exec("ping -n 4 $ip 2>&1", $output, $retval);
if ($retval != 0) {
echo "no!";
}
else{
echo "yes!";
}
It might be a lot easier than all that. If you want your page to load then check on the availability or content of some foreign page to trigger other web page activity, you could do it using only javascript and php like this.
yourpage.php
<?php
if (isset($_GET['urlget'])){
if ($_GET['urlget']!=''){
$foreignpage= file_get_contents('http://www.foreignpage.html');
// you could also use curl for more fancy internet queries or if http wrappers aren't active in your php.ini
// parse $foreignpage for data that indicates your page should proceed
echo $foreignpage; // or a portion of it as you parsed
exit(); // this is very important otherwise you'll get the contents of your own page returned back to you on each call
}
}
?>
<html>
mypage html content
...
<script>
var stopmelater= setInterval("getforeignurl('?urlget=doesntmatter')", 2000);
function getforeignurl(url){
var handle= browserspec();
handle.open('GET', url, false);
handle.send();
var returnedPageContents= handle.responseText;
// parse page contents for what your looking and trigger javascript events accordingly.
// use handle.open('GET', url, true) to allow javascript to continue executing. must provide a callback function to accept the page contents with handle.onreadystatechange()
}
function browserspec(){
if (window.XMLHttpRequest){
return new XMLHttpRequest();
}else{
return new ActiveXObject("Microsoft.XMLHTTP");
}
}
</script>
That should do it.
The triggered javascript should include clearInterval(stopmelater)
Let me know if that works for you
Jerry
You could try using PHP in your web page...something like this:
<html><body>
<form method="post" name="pingform" action="<?php echo $_SERVER['PHP_SELF']; ?>">
<h1>Host to ping:</h1>
<input type="text" name="tgt_host" value='<?php echo $_POST['tgt_host']; ?>'><br>
<input type="submit" name="submit" value="Submit" >
</form></body>
</html>
<?php
$tgt_host = $_POST['tgt_host'];
$output = shell_exec('ping -c 10 '. $tgt_host.');
echo "<html><body style=\"background-color:#0080c0\">
<script type=\"text/javascript\" language=\"javascript\">alert(\"Ping Results: " . $output . ".\");</script>
</body></html>";
?>
This is not tested so it may have typos etc...but I am confident it would work. Could be improved too...

Categories

Resources