I'm using a Javascript file to try and get a token from ArcGIS Online. However, whenever I try it, it comes back with
init.js:11 Uncaught Error: undefinedModule
The file (GetAToken.js) is below:
dojo.ready(init);
var request = dojo.require('request'); // npm install request
// generate a token with your client id and client secret
function getToken(callback) {
request.post({
url: 'https://www.arcgis.com/sharing/rest/oauth2/token/',
json: true,
form: {
'f': 'json',
'client_id': '<<MY_CLIENT_ID>>',
'client_secret': '<<MY_CLIENT_SECRET>>',
'grant_type': 'client_credentials',
'expiration': '1440'
}
}, function (error, response, body) {
console.log(body.access_token);
callback(body.access_token);
});
}
And the bit which calls it (in a HTML file) is:
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.3/jquery.min.js"></script>
<script src="https://esri.github.io/calcite-bootstrap/assets/js/bootstrap.min.js"></script>
<link rel="stylesheet" href="https://js.arcgis.com/4.0/esri/css/main.css">
<script src="https://js.arcgis.com/4.0/"></script>
<script src="GetAToken.js">
var MyToken = callback(getToken);
alert(MyToken);
</script>
It looks like you are trying to get the requestJS through NodeJS (npm install request). I am right?
You need to be aware that NodeJS require uses CommonJS whereas dojo uses RequireJS. Both of them have different module structures in them. More details here
In the line var request = dojo.require('request'); It is not able to find request module so it is throwing the error.
The way to get nodejs modules in dojo is to use dojo/node as show below.
require([ "dojo/node!request" ], function(request){
// Utilise the "request" module
});
Go through the Tutorial for Dojo and Node.js
Hope this was helpful.
PS: Esri has its own request object (esri/request), which you can use to get tokens. You may want to use that instead.
Related
I am using unirest to upload a file like so
unirest.put(fullUri)
.auth({
user: self.userName,
pass: self.password
})
.header('X-Checksum-Sha1', sha1Hash)
.header('X-Checksum-Md5', md5Hash)
.send(fs.readFileSync(filePath))
.end(function (response) {
This works fine for smaller files but for large files I get ERR_FS_FILE_TOO_LARGE error. I have already tried max_old_space_size without success. Looks like I can fix this by streaming the file but I can't find an api to do that in unirest js library.
Looks like this is an issue with form-data From GitHub Issues
It turns out that the unirest are using the NPM module form-data, and the form-data module requires that if it received a stream that not fs.ReadStream, we need to provide the file information by additional.
Example:
form.append(
'my_file',
stream,
{
filename: 'bar.jpg',
contentType: 'image/jpeg',
knownLength: 19806,
},
)
See: https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-
Streaming files with unirest is available via the .attach method:
unirest.put(fullUri)
.auth({
user: self.userName,
pass: self.password
})
.header('X-Checksum-Sha1', sha1Hash)
.header('X-Checksum-Md5', md5Hash)
// .send(fs.readFileSync(filePath))
.attach('filename', fs.createReadStream(filePath))
.end(function (response) {
I can't find an API to do that in unirest js library.
That's because there is none: https://github.com/Kong/unirest-nodejs/issues/49:
You can use the underlying request library to do streaming if you want, I am open to a pull request either on this branch or the 1.0 version to add streaming.
Issue is still open.
But from this issue and from the source code you can find out that end() returns request (see https://github.com/request/request)
Unirest.request = require('request')
...
end: function (callback) {
...
Request = Unirest.request($this.options, handleRequestResponse)
Request.on('response', handleGZIPResponse)
...
return Request
}
and from request's source code you can find out that no actual request is sent yet (it's defered). So you can hack into it. And use it's API instead:
const request = unirest.put(constants.server2)
.auth({
user: self.userName,
pass: self.password
})
.header('X-Checksum-Sha1', sha1Hash)
.header('X-Checksum-Md5', md5Hash)
// .send(fs.readFileSync(filePath))
.end(...)
fs.createReadStream(filePath).pipe(request) // just pipe it!
As a side note: unirest is based on request, request is deprecated now. So... maybe you need to steer away from unirest.
I'm quite newbee on API thing, been reading the documentation here for JAVASCRIPT client but I can't make things work, even on authentication part. I already have the client ID and ClientSecret from PODIO itself.
Basically, I want to get all podio data in a workspace in a JSON format using client side (browser only).
I've downloaded the library here and created an HTML file on my localhost and link the podio-js with following code. Getting this error "podio-js.js:1 Uncaught ReferenceError: require is not defined at podio-js.js:1". Do I need to install something such that loader thing to make this work?
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Document</title>
<script type="text/javascript" src="lib/podio-js.js"></script>
<script type="text/javascript">
var podio = new PodioJS({
authType: 'server',
clientId: 'foo',
clientSecret: 'foo'
});
var redirectURL = 'http://localhost/PODIO-JS/podio-js-master/PODIO_CLIENT.html';
// Your request handler (for example in ExpressJS)
var action = function(request, response) {
var authCode = request.query.code;
var errorCode = request.query.error;
podio.isAuthenticated().then(function() {
// Ready to make API calls...
}).catch(function(err) {
if (typeof authCode !== 'undefined') {
podio.getAccessToken(authCode, redirectURL, function(err, response) {
// make API calls here
console.log (responsedata);
});
} else if (typeof errorCode !== 'undefined') {
// a problem occured
console.log(request.query.error_description);
} else {
// start authentication via link or redirect
console.log(podio.getAuthorizationURL(redirectURL));
}
});
</script>
</head>
<body>
</body>
</html>
You can only use the syntax PodioJS = require('podio-js') if you're working in an AMD environment, typically using requirejs.
You're using a good ol' HTML page instead, which means you have to follow the second part of the browser usage section found here: https://github.com/podio/podio-js#browser
From within the podio-js folder:
npm install -g browserify
npm run bundle
and then include dist/podio-js.js using a tag in your HTML page.
Note: once you've bundled the source, you can copy paste the compiled file wherever you want.
I've been dealing all day with this problem and I just can't figure out what's wrong.
I have an application using the Google Api client for javascript which is working with no problems. Now I want to do something on the server side, so after researching for a bit, found that the way to go would be to use the token on the client side with setAccessToken method in the backend.
So I try sending my token object as JSON (using JSON.stringify(gapi.auth.getToken()) ) and once I try doing an API call on the backend that requires auth, I get the following error:
The OAuth 2.0 access token has expired, and a refresh token is not available. Refresh tokens are not returned for responses that were auto-approved.
So, a little puzzled, I try veryfing the token using curl on google's endpoint, which returns the following
{
"issued_to": "client_id",
"audience": "client_id",
"user_id": "user_id",
"scope": "https://www.googleapis.com/auth/youtube.readonly https://www.googleapis.com/auth/youtube.upload https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/plus.me",
"expires_in": 1465,
"email": "user_email",
"verified_email": true,
"access_type": "online"
}
So I know the token is fine and valid. The way my code is setup is as follows (redacted for:
<?php
// The token JSON is not inline, it comes from another source directly from the client side, but this is how it looks
$token_json = '{"state":"","access_token":"TOTALLY_VALID_ACCESS_TOKEN","token_type":"Bearer","expires_in":"3600","scope":"https://www.googleapis.com/auth/youtube.readonly https://www.googleapis.com/auth/youtube.upload https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/plus.me","client_id":"CLIENT_ID","g_user_cookie_policy":"single_host_origin","cookie_policy":"single_host_origin","response_type":"token","issued_at":"1415583001","expires_at":"1415586601","g-oauth-window":{},"status":{"google_logged_in":false,"signed_in":true,"method":"PROMPT"}}';
$OAUTH2_CLIENT_ID = 'CLIENT_ID';
$OAUTH2_CLIENT_SECRET = 'CLIENT_SECRET';
$client = new Google_Client();
$client->setClientId($OAUTH2_CLIENT_ID);
$client->setClientSecret($OAUTH2_CLIENT_SECRET);
$client->setAccessToken($token_json);
$youtube = new Google_Service_YouTube($client);
try{
/* Random code goes here */
// Auth Exception here.
$insertRequest = $youtube->videos->insert("status,snippet", $video);
} catch (Google_Exception $e) {
var_dump($e->getMessage());
}
Do I need to set up offline access or something? I have the requirement that the login process must be in javascript, so no chance of recreating the login flow from the backend.
Is there anything I'm missing?
Well, if someone else stumbles into this:
Apparently, sharing tokens directly is not the way to go, since the different API wrappers handle tokens differently. What you have to do is pass the one-time code to PHP and use the following to acquire an access token
$client = new Google_Client();
$client->setClientId($OAUTH2_CLIENT_ID);
$client->setClientSecret($OAUTH2_CLIENT_SECRET);
// Couldn't find this anywhere in the docs, but it's required. If you don't pass this, you will get an Origin Mismatch error.
$client->setRedirectUri('postmessage');
// Get this from javascript
$client->authenticate($ONE_TIME_CODE);
// Optionally, you might want to retrieve the token and save it somewhere else e.g. the session, as the code is only good for a single use.
$_SESSION['token'] = $client->getAccessToken();
Also, from JS you NEED to specify you require this one-time code besides the JS token, which I couldn't find documented anywhere else as well.
Example settings, for gapi.auth.authorize
{
client_id: CLIENT_ID
scope: APP_SCOPES
cookie_policy: 'single_host_origin'
response_type: 'code token'
immediate: true
}
Thanks a lot, Moustached. I've also spent the whole day trying to solve that. However, I found not that obvious what is one-time code and how to get it, so I decided to provide my code here in case someone else face the same problem:
<html>
<head>
<script src="//ajax.googleapis.com/ajax/libs/jquery/2.2.0/jquery.min.js"></script>
<script src="https://apis.google.com/js/platform.js?onload=onGoogleAPILoad" async defer></script>
<script>
$(document).ready(function() {
var auth2;
window.onGoogleAPILoad = function() {
gapi.load('auth2', function() {
auth2 = gapi.auth2.init({ client_id: 'CLIENT_ID', scope: 'profile' });
});
};
$('button').on('click', function() {
auth2.grantOfflineAccess({'redirect_uri': 'postmessage'}).then(function(response) {
$.ajax({
method: 'POST',
url: '/auth/signin',
success: onAuthSuccess = function(response) {
// check response from your server and reload page if succeed
},
data: { code: response.code }
});
});
});
});
</script>
</head>
<body>
<button>Sign In</button>
</body>
</html>
and for back-end the code is almost the same:
$client = new Google_Client();
$client->setClientId('CLIENT_ID');
$client->setClientSecret('CLIENT_SECRET');
$client->setRedirectUri('postmessage');
$client->authenticate($code); // here is one-time code
$plus = new Google_Service_Plus($client);
$user = $plus->people->get('me');
// $user is in array, containing now id, name, etc.
Here is an article on that topic.
I'm interested in being able to use the Breeze.js EntityManager and query capabilities within a node console service to access a remote Data Service that exposes an BreezeJS/OData compliant RESTful endpoint.
We currently have a Data Service implemented using Node.js, MongoDB and the Breeze.js breeze-mongodb module.
We have web browser hosted clients that access the MondgoDB using the Breeze.js client API (EntityManager) and the Data Service described above.
I need to create another Node.js service that can access the same MongoDB database that the web browser hosted clients do, and for consistency/simplicity I would like to use the same data acceess API as I am using in the web browser.
Has anyone experimented with this configuration?
I experimented with loading Breeze and its dependencies using the Node.js module infrastructure, but am getting errors when Breeze tries to initialize Angular as an ajax handler. Angular is installed and configured as a node module dependency, but I am getting an error thrown:
Error: [$injector:nomod] http://errors.angularjs.org/1.2.2/$injector/nomod?p0=ngLocale
In theory I shouldn't need angular, but I get additional errors if Angular is not present.
I may be able to debug this particular issue, but it will require stepping through Breeze.js code in detail and possibly modifying it to fix. Was curious if anyone else has gotten this working.
I'm running Breeze in Node at the moment. It used to work just fine without any modification, but a few versions ago they added a check that it's running in the browser... so now I manually remove that check :-)
My use-case is a little bit different: I'm running breeze on the server so that I can use the same business logic as in the client, and just have a really really thin layer between breezejs and the DB.
The only thing I needed to change to get it to run in the browser is add a fake ajax handler that delegates to my skinny DB wrapper - you could equally delegate to anything else, including your existing API.
var ctor = function () {
this.name = 'node';
this.defaultSettings = { };
};
ctor.prototype.initialize = function () {
};
var query = require('../../../../server/db/query');
ctor.prototype.ajax = function (config) {
if (config.url === '/api/all') {
query.get()
.then(function (result) {
var httpResponse = {
data: result,
status: '400',
getHeaders: undefined,
config: config
};
config.success(httpResponse);
})
.otherwise(function (error) {
var httpResponse = {
data: '',
status: '500',
getHeaders: undefined,
error: error,
config: config
};
config.error(httpResponse);
});
} else if (config.url === '/api/SaveChanges') {
query.save(JSON.parse(config.data))
.then(function (result) {
var httpResponse = {
data: result,
status: '400',
getHeaders: undefined,
config: config
};
config.success(httpResponse);
})
.otherwise(function (error) {
var httpResponse = {
data: '',
status: '500',
getHeaders: undefined,
error: error,
config: config
};
config.error(httpResponse);
});
}
};
breezejs.config.registerAdapter('ajax', ctor);
breezejs.config.initializeAdapterInstance('ajax', 'node', true);
It's a good question. We haven't actually tried running Breeze within Node but your use case is interesting. This sounds like a perfect item for the Breeze User Voice. We take these suggestions seriously.
I tried to enable Cloud Endpoint via this document.
After defining the API and doing some configuration, finally the API can be viewed and executed via API Explorer successfully.
However, I cannot enable the Javascript client to run:
<html>
<script type="text/javascript">
function init() {
var ROOT = "https://my-app-id.appspot.com/_ah/api";
gapi.client.load('my-app-id', 'v1', function() {
gapi.client.bigquery.query({
'start_date': '2013-05-01',
'end_date': '2013-05-02',
'metrics': ['impr']
}).execute(function(resp) {
console.log(resp);
});
}, ROOT);
}
</script>
<script src="https://apis.google.com/js/client.js?onload=init"></script>
</html>
I noticed the request failed during the Javascript execution, but I cannot figure out why.
Request
https://my-app-id.appspot.com/_ah/api/discovery/v1/apis/my-app-id/v1/rpc?
Response
{"error":{"errors":[{"domain":"global","reason":"notFound","message":"Not Found"}],"code":404,"message":"Not Found"}}
Use your API name, not my-app-id. Also, make sure your API is deployed and serving. If not, this would similarly cause a 404.
See google cloud endpoints discrepancy between documentation, and what works in my app for an identical question.