Node JS - Youtube API Is Blank - javascript

I downloaded and installed the google apis with npm install googleapis and now i'm trying to access the api in my node js file with this code:
var google = require('googleapis')
var youtube = google.youtube({version: 'v3', auth: API_KEY})
However, when I try to access the videos object, I always get back null. Apparently, the youtube object is corrupted because when I stringify it I get this back:
{"_options":{"auth":"*********"},"activities":{},"captions":{},"channelBanners":{},"channelSections":{},"channels":{},"commentThreads":{},"comments":{},"guideCategories":{},"i18nLanguages":{},"i18nRegions":{},"liveBroadcasts":{},"liveStreams":{},"playlistItems":{},"playlists":{},"search":{},"subscriptions":{},"thumbnails":{},"videoAbuseReportReasons":{},"videoCategories":{},"videos":{},"watermarks":{},"google":{"_options":{},"auth":{"_cachedCredential":null}}}
So all of the little "subobjects" are empty. How do I fix this?

Did you check if the dependency is listed in your package.json file ? If not try npm install --save googleapis which directly adds it to your dependency list

There's nothing worrying in the fact that your youtube variable shows empty objects when stringified, because JSON representation of that object only contains properties that are primitive types. youtube.videos object contains only methods, which are ommited by JSON.stringify.
Try this:
var google = require('googleapis');
var youtube = google.youtube({version: 'v3', auth: API_KEY});
var queryOptions = {
'part': 'id,snippet',
'maxResults': 5,
'id': 'dQw4w9WgXcQ,HL1UzIK-flA'
};
youtube.videos.list(queryOptions, function(err, data) {
if(err) {
console.error(err);
return;
}
console.log(data);
});

For youtube api I use youtube-node and it works fine: https://github.com/nodenica/youtube-node

Related

Getting Google Cloud Platform custom metadata " firebaseStorageDownloadToken" programmatically

I am using Firebase Cloud Storage.
I know how to get the URL of a downloaded file (tags.txt) using Firebase SDK function running on a client javascript :
storage.ref('tags.txt').getDownloadURL().then((url) => {
console.log(url);
});
I want to get the downloadURL from Node.JS . I do know this function does not exists for Node.JS
However, In Google Cloud Platform you can get the following key/value pair :
firebaseStorageDownloadTokens : 0ea9a60a-7719-4c6b-9cb5-7fcf69d7c633, the value being the token you want. From this token I can easily build the downloadURL I need.
The path to get there is:
Cloud Storage / "Bucket name" / Bucket details / tags.txt / EDIT METADATA / Custom metadata.
I try this code to access this metadata :
async function getBucketMetadata() {
const bucketName = "gs://tags.appspot.com";
try {
// Get Bucket Metadata
let metadata = await admin.storage().bucket(bucketName).file('tags.txt').getMetadata();
console.log(metadata)
}
catch (err) {
console.log(err.message)
}
}
I got keys/values (not a real project though) info such as:
bucket:'tags.appspot.com'
contentType:'text/plain'
crc32c:'Y1Sdxw=='
etag:'CI1EETD18Co9vECEAE='
generation:'162694124484794756'
id:'tags-admin.appspot.com/tags.txt/162694431484794756'
kind:'storage#object'
md5Hash:'P1YSFER4xSf5p0/KWrdQWx1z1Lyg=='
mediaLink:'https://storage.googleapis.com/download/storage/v1/b/tags-admin.appspot.com/o/tags.txt?generation=162694443184794756&alt=media'
metageneration:'1'
name:'tags.txt'
selfLink:'https://www.googleapis.com/storage/v1/b/tags-admin.appspot.com/o/tags.txt'
size:'5211247'
storageClass:'STANDARD'
timeCreated:'2021-07-22T09:01:24.862Z'
timeStorageClassUpdated:'2021-07-22T09:01:24.862Z'
updated:'2021-07-22T09:01:24.862Z'
But nothing regarding the key/value pair I want : firebaseStorageDownloadTokens : 0ea9a60a-7719-4c6b-9cb5-7fcf69d7c633
If the key/value can be seen on Google Cloud Platform , I do believe the key/value is also accessible via some code.
Your help is appreciated.
I mixed up two projects. I re-tried it, and its work pretty nicely. Using this method I can retrieve the file token and build the file URL around it on the back-end. The front-end function getDownloadURL() is not longer required.
Thanks for your help anyway.
The code become:
async function getBucketMetadata() {
const bucketName = "gs://tags.appspot.com";
try {
// Get Bucket Metadata
let metadata = await admin.storage().bucket(bucketName).file('tags.txt').getMetadata();
console.log(metadata[0].metadata.firebaseStorageDownloadTokens)
}
catch (err) {
console.log(err.message)
}
}

Read/Write and store data internelly in a Local App (no server) with JavaScript

So I am making a local App using Javascript , React and Electron and I want it to be able to work just fine without internet.
I can't use 'localStorage' because the data might get deleted if the user deletes the cache.
I tried reading/writing using differant Modules, none of them worked mostly because of CROS. Using XMLHTTPrequests and Ajax doesn't work either and am running out of time.
When I use them on the test server, they return the index.html for the mainpage (They can at least access that ... and still they can't read the data) but when I try it on the build I get CORS the error.
My Idea for now is to enable CORS on my webpage since I have no worries about security : The App will run ONLY offline so there is no danger.
But After many hours...I didn't find a solution to do it on the client side.
If anyone has an idea or suggestion I would be grateful.
I tried : fs,FileReader,FileSaver, $.ajax,XMLHTTPrequests
//using $ajax
var test = $.ajax({
crossDomain:true,
type: 'GET',
url:'../data/DefaultCategorie.txt',
contentType: 'text/plain',
success: function(data){
console.log(data);
},
error: function(){
alert('failed');
},
})
//using fs
fs.readFile('../data/DefaultCategorie.txt', 'utf8', (err, data) => {
if (err) {
console.log("Failed");
throw err
}
console.log(data);
fs.close(data, (err) => {
if (err) throw err;
});
});
This article covers the 3 most common ways to store user data: How to store user data in Electron
The Electron API for appDatadoes what you want. It is very easy to use.
From the above article:
const userDataPath = (electron.app || electron.remote.app).getPath('userData');
this.path = path.join(userDataPath, opts.configName + '.json')
this.data = parseDataFile(this.path, opts.defaults);
function parseDataFile(filePath, defaults) {
try {
return JSON.parse(fs.readFileSync(filePath));
} catch(error) {
// if there was some kind of error, return the passed in defaults instead.
return defaults;
}
}
Docs
app.getPath(name)
name String
Returns String - A path to a special directory or file associated with
name. On failure, an Error is thrown.
You can request the following paths by the name:
appData - Per-user application data directory, which by default points to:
%APPDATA% on Windows
$XDG_CONFIG_HOME or ~/.config on Linux
~/Library/Application Support on macOS
userData - The directory for storing your app's configuration files,
which by default it is the appData directory appended with your app's
name.

How do I read the contents of a new cloud storage file of type .json from within a cloud function?

The event passed to my Google cloud function only really tells me the name of the bucket and file, and whether the file was deleted. Yes, there's more there, but it doesn't seem all that useful:
{ timestamp: '2017-03-25T07:13:40.293Z',
eventType: 'providers/cloud.storage/eventTypes/object.change',
resource: 'projects/_/buckets/my-echo-bucket/objects/base.json#1490426020293545',
data: { kind: 'storage#object',
resourceState: 'exists',
id: 'my-echo-bucket/base.json/1490426020293545',
selfLink: 'https://www.googleapis.com/storage/v1/b/my-echo-bucket/o/base.json',
name: 'base.json',
bucket: 'my-echo-bucket',
generation: '1490426020293545',
metageneration: '1',
contentType: 'application/json',
timeCreated: '2017-03-25T07:13:40.185Z',
updated: '2017-03-25T07:13:40.185Z',
storageClass: 'STANDARD',
size: '548',
md5Hash: 'YzE3ZjUyZjlkNDU5YWZiNDg2NWI0YTEyZWZhYzQyZjY=',
mediaLink: 'https://www.googleapis.com/storage/v1/b/my-echo-bucket/o/base.json?generation=1490426020293545&alt=media', contentLanguage: 'en', crc32c: 'BQDL9w==' }
}
How do I get the contents and not merely the meta-data of a new .json file uploaded to a gs bucket?
I tried using npm:request() on event.data.selfLink, which is a URL for the file in the storage bucket, and got back an authorization error:
"code": 401, "message": "Anonymous users does not have storage.objects.get access to object my-echo-bucket/base.json."
There was a similar question on SO about reading storage buckets, but probably on a different platform. Anyway it was unanswered:
How do I read the contents of a file on Google Cloud Storage using javascript
`
You need to use a client library for google storage instead of accessing via the URL. Using request() against the URL would only work if the file was exposed to public access.
Import the google cloud storage library in the npm-managed directory containing your project.
npm i #google-cloud/storage -S
The npm page for google-cloud/storage has decent examples but I had to read through the API a bit to see an easy way to download to memory.
Within the Google Cloud Functions environment, you do not need to supply any api key, etc. to storage as initialization.
const storage = require('#google-cloud/storage')();
The metadata passed about the file can be used to determine if you really want the file or not.
When you want the file, you can download it with the file.download function, which can take either a callback or, lacking a callback, will return a promise.
The data however, is returned as a Buffer so you will need to call data.toString('utf-8') to convert it to a utf-8 encoded string.
const storage = require('#google-cloud/storage')();
exports.logNewJSONFiles = function logNewJSONFiles(event){
return new Promise(function(resolve, reject){
const file = event.data;
if (!file){
console.log("not a file event");
return resolve();
}
if (file.resourceState === 'not_exists'){
console.log("file deletion event");
return resolve();
}
if (file.contentType !== 'application/json'){
console.log("not a json file");
return resolve();
}
if (!file.bucket){
console.log("bucket not provided");
return resolve();
}
if (!file.name){
console.log("file name not provided");
return resolve();
}
(storage
.bucket(file.bucket)
.file(file.name)
.download()
.then(function(data){
if (data)
return data.toString('utf-8');
})
.then(function(data){
if (data) {
console.log("new file "+file.name);
console.log(data);
resolve(data);
}
})
.catch(function(e){ reject(e); })
);
});
};
Deployment is as expected:
gcloud beta functions deploy logNewJSONFiles --stage-bucket gs://my-stage-bucket --trigger-bucket gs://my-echo-bucket
Remember to look in the Stackdriver:Logging page on Google Cloud Platform for the console.log entries.
UPDATE: (2019) When cloud-functions first released, they had some issues with ECONNRESET. I think that's fixed now. If not, use something like npm:promise-retry
npm install #google-cloud/storage --production
package.json:
{
"main": "app.js",
"dependencies": {
"#google-cloud/storage": "^1.2.1"
}
}
You should achieve that npm ls shows no errors like npm ERR! missing:.
app.js:
...
const storage = require("#google-cloud/storage")();
storage.
bucket("mybucket").
file("myfile.txt").
download( function(err, contents) {
console.log(contents.toString());
} );

Npm Packages and Meteor

I am really struggling with Meteor and the NPM package node-linkedin.
I understand how NPM integration is supposed to work. I added NPM using Meteorite and node-linkedin to packages.js. I am then calling the NPM package through Meteor.require().
Here is my server/linkedin.js file:
function Linkedin(accessToken) {
this.linkedin = Meteor.require('node-linkedin')('api', 'secret', 'callback');
this.accessToken = accessToken;
this.linkedin.init(this.accessToken);
}
Linkedin.prototype.company = function() {
var self = this;
var data = Meteor.sync(function(done) {
self.linkedin.companies.company('162479', function(err, res) {
done(null, res);
});
});
return data.result;
}
Meteor.methods({
getCompanyInfo: function () {
var linkedin = new Linkedin(Meteor.user().services.linkedin.accessToken);
var data = linkedin.company();
return data;
}
});
Unfortunately, it does not work when I call getCompanyInfo()... I get the following error: "Cannot call method 'company' of undefined".
Linkedin Auth works fine (thanks to accounts-linkedin). But I also need to connect to Linkedin API.
I had previously followed this article to play with FB Graph API in Meteor. Everything was fine.
Maybe the problem comes from Metheor.require(). I am wondering what I am supposed to do with the second set of parameters in Metheor.require(). Should I add these npm packages to packages.json as well? Does Method.require() handle this second set of parameters correctly?
Thank you for your help!

Using Breeze.js EntityManager from within Node

I'm interested in being able to use the Breeze.js EntityManager and query capabilities within a node console service to access a remote Data Service that exposes an BreezeJS/OData compliant RESTful endpoint.
We currently have a Data Service implemented using Node.js, MongoDB and the Breeze.js breeze-mongodb module.
We have web browser hosted clients that access the MondgoDB using the Breeze.js client API (EntityManager) and the Data Service described above.
I need to create another Node.js service that can access the same MongoDB database that the web browser hosted clients do, and for consistency/simplicity I would like to use the same data acceess API as I am using in the web browser.
Has anyone experimented with this configuration?
I experimented with loading Breeze and its dependencies using the Node.js module infrastructure, but am getting errors when Breeze tries to initialize Angular as an ajax handler. Angular is installed and configured as a node module dependency, but I am getting an error thrown:
Error: [$injector:nomod] http://errors.angularjs.org/1.2.2/$injector/nomod?p0=ngLocale
In theory I shouldn't need angular, but I get additional errors if Angular is not present.
I may be able to debug this particular issue, but it will require stepping through Breeze.js code in detail and possibly modifying it to fix. Was curious if anyone else has gotten this working.
I'm running Breeze in Node at the moment. It used to work just fine without any modification, but a few versions ago they added a check that it's running in the browser... so now I manually remove that check :-)
My use-case is a little bit different: I'm running breeze on the server so that I can use the same business logic as in the client, and just have a really really thin layer between breezejs and the DB.
The only thing I needed to change to get it to run in the browser is add a fake ajax handler that delegates to my skinny DB wrapper - you could equally delegate to anything else, including your existing API.
var ctor = function () {
this.name = 'node';
this.defaultSettings = { };
};
ctor.prototype.initialize = function () {
};
var query = require('../../../../server/db/query');
ctor.prototype.ajax = function (config) {
if (config.url === '/api/all') {
query.get()
.then(function (result) {
var httpResponse = {
data: result,
status: '400',
getHeaders: undefined,
config: config
};
config.success(httpResponse);
})
.otherwise(function (error) {
var httpResponse = {
data: '',
status: '500',
getHeaders: undefined,
error: error,
config: config
};
config.error(httpResponse);
});
} else if (config.url === '/api/SaveChanges') {
query.save(JSON.parse(config.data))
.then(function (result) {
var httpResponse = {
data: result,
status: '400',
getHeaders: undefined,
config: config
};
config.success(httpResponse);
})
.otherwise(function (error) {
var httpResponse = {
data: '',
status: '500',
getHeaders: undefined,
error: error,
config: config
};
config.error(httpResponse);
});
}
};
breezejs.config.registerAdapter('ajax', ctor);
breezejs.config.initializeAdapterInstance('ajax', 'node', true);
It's a good question. We haven't actually tried running Breeze within Node but your use case is interesting. This sounds like a perfect item for the Breeze User Voice. We take these suggestions seriously.

Categories

Resources