file.id undefined during creation folder in google drive with api rest - javascript

I receive undefined during creation of a folder by api rest google:
Folder Id: undefined
Below my code:
var fileMetadata = {
'name': nameProduct,
'mimeType': 'application/vnd.google-apps.folder',
parents: XXXXXXXXXXXXXXXX
};
drive.files.create({
auth: jwToken,
resource: fileMetadata,
fields: 'id'
}, function (err, file) {
if (err) {
console.error(err);
} else {
console.log('Folder Id: ', file.id);
}
});
console.log("end creation folder");
How can I print the file.id? Thanks

// Make sure the client is loaded and sign-in is complete before calling
gapi.client.drive.files.create({
"resource": {}
})
.then(function(response) {
// Handle the results here (response.result has the parsed body).
console.log("Response", response);
},
function(err) {
// handle error here.
console.error("Execute error", err); });
}
First you have to create a request then wait the response inside the "then".

Related

Authentication Error when trying to retrieve Youtube Chanel Information

I am trying authenticate the user and then retrieve youtube channel list.
Below is the function to authenticate:
function authenticate() {
showNewLoader('show');
return gapi.auth2.getAuthInstance()
.signIn({scope: "https://www.googleapis.com/auth/youtube.readonly"})
.then(function(response) {
console.log( response);
youtubeAuthResponse['token_details'] = response.tc;
youtubeAuthResponse['google_email'] = '';
youtubeAuthResponse['google_id'] = '';
showNewLoader('hide');
},
function(err) { console.error("Error signing in", err); showNewLoader('hide');});
}
Loading client:
function loadClient() {
showNewLoader('show');
gapi.client.setApiKey("XXXXXX");
return gapi.client.load("https://www.googleapis.com/discovery/v1/apis/youtube/v3/rest")
.then(function() { execute();},
function(err) { console.error("Error loading GAPI client for API", err);showNewLoader('hide');});
}
/*Make sure the client is loaded and sign-in is complete before calling this method.*/
function execute() {
return gapi.client.youtube.channels.list({
"part": [
"snippet",
"statistics"
],
"mine": true
})
.then(function(response) {
/*Handle the results here (response.result has the parsed body).*/
youtubeChannelResponse = response.result;
storeYoutubeData();
},
function(err) { console.error("Execute error", err); showNewLoader('hide') }).then(function(){
});
}
User logs in successfully but I am unable to get the channel info:
Any assistance on this issue is greatly appreciated. #DalmTo

201 status node express API always failing

I am trying to make an api through express. However, the response from the server i am getting is 201 created. The HTTP request i am trying to make through promises is interpreting the then process as false and it continually goes through a failed criteria.
Any idea on how i can get 201 created through the example below.
Example: The output even while being successful automatically goes through error status.
router.get('/example', session, function (req, res) {
example(req.body, req.session.token).then(
(response) => {
res.json(response);
},
(err) => {
if (err.status) {
res.status(err.status).json({
status: err.status,
message: err.message
});
} else {
res.status(constants.HTTP_INTERNAL_SERVER_ERROR).json({
status: constants.HTTP_INTERNAL_SERVER_ERROR,
message: 'Internal Server Error'
});
}
}
);
});
module.exports.example = (body, token) => {
return new Promise((resolve, reject) => {
const mainToken = createRawToken(token);
request.post(
{
url: `${endpoint}`,
body: JSON.stringify(body),
headers: {
Authorization: mainToken,
"Content-Type": "application/json"
}
},
(error, response, bdy) => {
resolve(requestService.handleResponse(error, response, bdy, constants.HTTP_OK));
}
);
});
};
I think your problem is in the example function. The promise seems fine but i am assuming that you're naming your constants well.
constants.HTTP_OK should be constants.HTTP_CREATED. Behind them i am assuming that HTTP_OK is 200 and HTTP_CREATED should be 201.

Make dynamic/re-usable method

I need to use sendMessage in other method.
Like:
SendMessage('abc#expample.com','abc#exapmle.com','subject','body').
I am new to nodejs. Just need some syntax help.
I have tried
SendMessage.makeBody('','','','');
But its giving me error.
TypeError: sendMessage.makeBody is not a function
function sendMessage(auth) {
const raw = makeBody(
'abc#example.com',
'abc#example.com',
'something',
'something');
gmail.users.messages.send({
auth: auth,
userId: 'me',
resource: {
raw: raw
}
}, function(err, response) {
if (err) {
logger.info('The API returned an error: ' + err);
return;
}
logger.info(response);
});
}
module.exports = {
sendMessage
};
function sendMessage(data) {
gmail.users.messages.send({
auth: data.auth,
userId: 'me',
resource: {
raw: data.raw
}
}, function(err, response) {
if (err) {
logger.info('The API returned an error: ' + err);
return;
}
logger.info(response);
});
}
let raw = makeBody('abc#example.com','abc#example.com','something','something');
let data = {raw: raw, auth: auth}
// pass raw and auth both in a object and use it into sendMessage funtion.
sendMessage(data)
module.exports = {
sendMessage
};

Triggering Cloud Dataflow pipeline from Cloud Function - function times out

I am trying to trigger a Dataflow pipeline from a Cloud Function which itself is triggered upon upload of a new file in a GCS bucket.
When I upload a file, the Cloud function gets triggered properly but timesout after few seconds without any Dataflow being triggered.
Below is my function code:
const google = require('googleapis');
const projectId = "iot-fitness-198120";
exports.moveDataFromGCStoPubSub = function(event, callback) {
const file = event.data;
if (file.resourceState === 'exists' && file.name) {
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
if (authClient.createScopedRequired && authClient.createScopedRequired()) {
authClient = authClient.createScoped([
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/userinfo.email'
]);
}
console.log("File exists and client function is authenticated");
console.log(file);
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
console.log(`Incoming data: ${file.name}`);
dataflow.projects.templates.create({
projectId: projectId,
resource: {
parameters: {
inputFile: `gs://${file.bucket}/${file.name}`,
outputTopic: `projects/iot-fitness-198120/topics/MemberFitnessData`
},
jobName: 'CStoPubSub',
gcsPath: 'gs://dataflow-templates/latest/GCS_Text_to_Cloud_PubSub',
staginglocation: 'gs://fitnessanalytics-tmp/tmp'
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
}
};
The execution doesn't even log the following line, console.log("File exists and client function is authenticated"); which tells me it is not even getting that far.
Here's the log output during execution:
2018-03-20 04:56:43.283 GST
DataflowTriggeringFunction
52957909906492
Function execution took 60097 ms, finished with status: 'timeout'
2018-03-20 04:55:43.188 GST
DataflowTriggeringFunction
52957909906492
Function execution started
Any idea why it's not triggering the Dataflow and yet not throwing an error message ?
I have finally modified the code. Got some help from GCP support. Below is the right syntax that works:
var {google} = require('googleapis');
exports.moveDataFromGCStoPubSub = (event, callback) => {
const file = event.data;
const context = event.context;
console.log(`Event ${context.eventId}`);
console.log(` Event Type: ${context.eventType}`);
console.log(` Bucket: ${file.bucket}`);
console.log(` File: ${file.name}`);
console.log(` Metageneration: ${file.metageneration}`);
console.log(` Created: ${file.timeCreated}`);
console.log(` Updated: ${file.updated}`);
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
console.log(projectId);
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
console.log(`gs://${file.bucket}/${file.name}`);
dataflow.projects.templates.create({
gcsPath: 'gs://dataflow-templates/latest/GCS_Text_to_Cloud_PubSub',
projectId: projectId,
resource: {
parameters: {
inputFilePattern: `gs://${file.bucket}/${file.name}`,
outputTopic: 'projects/iot-fitness-198120/topics/MemberFitnessData2'
},
environment: {
tempLocation: 'gs://fitnessanalytics-tmp/tmp'
},
jobName: 'CStoPubSub',
//gcsPath: 'gs://dataflow-templates/latest/GCS_Text_to_Cloud_PubSub',
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
callback();
};
Guess your cloud function execution fails doesn't satisfy your if statement,
if (file.resourceState === 'exists' && file.name)
I had similar issue when I started working on Cloud Function. Modify your index.js file var {google} = require('googleapis'); as provided in the solution here

How to Use Dropbox Upload Session For Files Larger than 150mb?

I want to upload a file larger than 150mb.
In the Dropbox API V2 docs, it says you should start an upload session.
The docs say you can't send a POST with more than 150mb of data, but I'm unsure how to achieve that with the upload_session API.
While any individual request shouldn't be larger than 150 MB (and typically you should use a significantly smaller chunk size), you can upload files larger than that by using multiple requests.
There's an example of using upload sessions below. That example uses the Python SDK, but the JavaScript SDK, but it should serve as a useful reference, as the logic is the same. (They both use the same underlying API.)
This uses the Dropbox Python SDK to upload a file to the Dropbox API from the local file as specified by file_path to the remote path as specified by dest_path. It also chooses whether or not to use an upload session based on the size of the file:
f = open(file_path)
file_size = os.path.getsize(file_path)
CHUNK_SIZE = 4 * 1024 * 1024
if file_size <= CHUNK_SIZE:
print dbx.files_upload(f.read(), dest_path)
else:
upload_session_start_result = dbx.files_upload_session_start(f.read(CHUNK_SIZE))
cursor = dropbox.files.UploadSessionCursor(session_id=upload_session_start_result.session_id,
offset=f.tell())
commit = dropbox.files.CommitInfo(path=dest_path)
while f.tell() < file_size:
if ((file_size - f.tell()) <= CHUNK_SIZE):
print dbx.files_upload_session_finish(f.read(CHUNK_SIZE),
cursor,
commit)
else:
dbx.files_upload_session_append(f.read(CHUNK_SIZE),
cursor.session_id,
cursor.offset)
cursor.offset = f.tell()
f.close()
You can quickly upload file chunks using files/upload_session/start, files/upload_session/append_v2 and files/upload_session/finish API endpoints. Here is an example which uses my tiny dropbox v2 api wrapper (dropbox-v2-api):
const CHUNK_LENGTH = 100;
//create read streams, which generates set of 100 (CHUNK_LENGTH) characters of values: 1 and 2
const firstUploadChunkStream = () => utils.createMockedReadStream('1', CHUNK_LENGTH);
const secondUploadChunkStream = () => utils.createMockedReadStream('2', CHUNK_LENGTH);
sessionStart((sessionId) => {
sessionAppend(sessionId, () => {
sessionFinish(sessionId);
});
});
function sessionStart(cb) {
dropbox({
resource: 'files/upload_session/start',
parameters: {
close: false
},
readStream: firstUploadChunkStream()
}, (err, response) => {
if (err) { return console.log('sessionStart error: ', err) }
console.log('sessionStart response:', response);
cb(response.session_id);
});
}
function sessionAppend(sessionId, cb) {
dropbox({
resource: 'files/upload_session/append_v2',
parameters: {
cursor: {
session_id: sessionId,
offset: CHUNK_LENGTH
},
close: false,
},
readStream: secondUploadChunkStream()
}, (err, response) => {
if(err){ return console.log('sessionAppend error: ', err) }
console.log('sessionAppend response:', response);
cb();
});
}
function sessionFinish(sessionId) {
dropbox({
resource: 'files/upload_session/finish',
parameters: {
cursor: {
session_id: sessionId,
offset: CHUNK_LENGTH * 2
},
commit: {
path: "/result.txt",
mode: "add",
autorename: true,
mute: false
}
}
}, (err, response) => {
if (err) { return console.log('sessionFinish error: ', err) }
console.log('sessionFinish response:', response);
});
}
I have an example!
testFile1Data = "test file data 1";
dbx.filesUploadSessionStart({
contents: testFile1Data,
close: true,
})
.then(function (response) {
file1Start = response;
})
.catch(function (err) {
console.log(err);
});
testFile2Data = "test file data 2";
dbx.filesUploadSessionStart({
contents: testFile2Data,
close: true,
})
.then(function (response) {
file2Start = response;
})
.catch(function (err) {
console.log(err);
});
dbx.filesUploadSessionFinishBatch({entries: [
{cursor: {session_id: file1Start.session_id, offset: testFile1Data.length}, commit: {path: "/testFile1.txt"}},
{cursor: {session_id: file2Start.session_id, offset: testFile2Data.length}, commit: {path: "/testFile2.txt"}},
]})
.then(function (response) {
finishBatch = response;
})
.catch(function (err) {
console.log(err);
});
dbx.filesUploadSessionFinishBatchCheck({async_job_id: finishBatch.async_job_id})
.then(function (response) {
finishBatch = response
})
.catch(function (err) {
console.log(err);
});
I got the example from an issue thread on github - https://github.com/dropbox/dropbox-sdk-js/issues/80#issuecomment-283189888

Categories

Resources