Google Drive API - JavaScript Insert File - Create native google doc - javascript

I have been working with the JavaScript Google Drive API in order to create (insert) a new file. It works great, except that the document it creates is not a google doc, but some kind of document that can be converted in to a google doc.
I would like an actual google doc. I tried "convert:true", but it does not seem to convert it to a doc.
insertFile: function (filename, content, callback) {
const boundary = '-------314159265358979323846';
const delimiter = "\r\n--" + boundary + "\r\n";
const close_delim = "\r\n--" + boundary + "--";
{
var contentType = 'application/vnd.google-apps.document';
var metadata = {
'title': filename,
'mimeType': 'text/html',
'description': 'Created'
};
// TODO: replace this with a library - https://github.com/beatgammit/base64-js/blob/master/lib/b64.js
var base64Data = window.btoa(unescape(encodeURIComponent(content)));
var multipartRequestBody =
delimiter +
'Content-Type: application/json\r\n\r\n' +
JSON.stringify(metadata) +
delimiter +
'Content-Type: ' + contentType + '\r\n' +
'Content-Transfer-Encoding: base64\r\n' +
'\r\n' +
base64Data +
close_delim;
var request = gapi.client.request({
'path': '/upload/drive/v2/files',
'method': 'POST',
'params': {'uploadType': 'multipart', 'convert': true },
'headers': {
'Content-Type': 'multipart/mixed; boundary="' + boundary + '"'
},
'body': multipartRequestBody
});
if (!callback) {
callback = function (file) {
console.log(file)
};
}
request.execute(callback);
}
}
After I insert the file, when I go to the link I get:

I think I found your error. Try migrating to v3, according to v3 migration:
Other changes
Imports to Google Docs formats are now requested by setting the appropriate target mimeType in the resource body, rather than specifying ?convert=true.
Import operations will return a 400 error if the format is not supported.
And here is the link for the list of supported mimeType.

Related

Google Cloud Storage upload request sometimes works, sometimes doesn't - make resumable?

A bit of a pickle here. I've got a Google Cloud Storage bucket set up, and a webpage that, probably about 60% of the time, DOES upload a file to the bucket. However, sometimes it doesn't and I don't really know why.
I'm trying this from a mobile handset so it could be the signal. As such, I guess one thing I need to think about is making it resumable (the files themselves aren't big - less than 2mb, but the service we're getting isn't great).
So, how could I make this into a resumable (rather than multipart) upload? And do you think this would solve the issue?
Code below:
function insertObject(event) {
try {
var fileData = event.target.files[0];
}
catch(e) {
return;
}
var boundary = '-------314159265358979323846';
var delimiter = "\r\n--" + boundary + "\r\n";
var close_delim = "\r\n--" + boundary + "--";
var reader = new FileReader();
reader.readAsBinaryString(fileData);
reader.onload = function(e) {
var contentType = fileData.type || 'application/octet-stream';
var metadata = {
'name': fileData.name,
'mimeType': contentType
};
var base64Data = btoa(reader.result);
var multipartRequestBody =
delimiter +
'Content-Type: application/json\r\n\r\n' +
JSON.stringify(metadata) +
delimiter +
'Content-Type: ' + contentType + '\r\n' +
'Content-Transfer-Encoding: base64\r\n' +
'\r\n' +
base64Data +
close_delim;
//Note: gapi.client.storage.objects.insert() can only insert
//small objects (under 64k) so to support larger file sizes
//we're using the generic HTTP request method gapi.client.request()
var request = gapi.client.request({
'path': '/upload/storage/' + API_VERSION + '/b/' + BUCKET + '/o',
'method': 'POST',
'params': {'uploadType': 'multipart'},
'headers': {
'Content-Type': 'multipart/mixed; boundary="' + boundary + '"'
},
'body': multipartRequestBody});
//Remove the current API result entry in the main-content div
//listChildren = document.getElementById('main-content').childNodes;
//if (listChildren.length > 1) {
// listChildren[1].parentNode.removeChild(listChildren[1]);
//}
//look at http://stackoverflow.com/questions/30317797/uploading-additional-metadata-as-part-of-file-upload-request-to-google-cloud-sto
try{
//Execute the insert object request
//document.getElementById("authorize-button").click();
executeRequest(request, 'insertObject');
//Store the name of the inserted object
document.getElementById("btnUpload").click();
object = fileData.name;
}
catch(e) {
alert('An error has occurred: ' + e.message);
}
}
}
EDIT - just tried it from my PC and it's uploaded 17 files with no problems - normally it just starts dropping them straight away from the mobile handset. So I think it is a signal issue and making it resumable would do the job.
Well, this is embarrassing.
I decided to check that the code was uploading the file correctly, and got rid of the
document.getElementById("btnUpload").click();
object = fileData.name;
in the final element of the code (the btnUpload does some C# stuff that I do need, but I don't need the object at all). I tried 20 pictures from my mobile handset and, when it would have usually only done about 12, it did all 20 - and a lot more effectively.
So I think the issue is the btnUpload.click that is taking control of the process away from the file transfer before it's finished. I've just got to find a way to incorporate that back into the code without interfering with things.
Github link
And just moved the btnUpload.click() from being within the insertobject to the executeRequest (just after where it calls the log). There just seems to be a small problem now, but not with the file upload. The btnUpload stuff gets called where it should, and the rest is simply sorting out the webpage.
Edit - as an aside, in order to cope with the request length being long (for files which are 4mb or more), I added this to the web.config (in the system.web):
<httpRuntime maxRequestLength="214748364"/>

Trying to upload files from a webpage to Google Cloud Storage

I've got a website (in asp.net) through which I want users to upload photos to a Google Cloud Storage bucket. The users doing the uploading CAN be authenticated with Google if necessary (though I would prefer it if they weren't - the site is locked down with usernames/passwords/captchas).
Slightly unrelated - the photos in the bucket though have to be visible to everyone, so long as they have the link (some of our clients have IT depts who refuse to allow them to use Google accounts, and we can't change their minds). I would want the link ideally returned when the photo is uploaded.
I have the following Javascript code, which I think should work (basically it's taken from here):
<script type="text/javascript" src="https://apis.google.com/js/client:plusone.js"></script>
<script type="text/javascript" src="https://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js"></script>
<script type="text/javascript" src="https://apis.google.com/js/client.js"></script>
<script type="text/javascript">
var PROJECT = 'MY_PROJECT';
var clientId = 'MY_CLIENT_ID_ENDING_IN_apps.googleusercontent.com';
var apiKey = 'MY_API_KEY';
var scopes = 'https://www.googleapis.com/auth/devstorage.read_write';
//quick question - I've got a photo in the bucket already, and its
//URL points to a v1_internal folder. Would that mean that the API
//version is v1_internal?
var API_VERSION = 'v1';
var BUCKET = 'MY_BUCKET';
var object = "";
//question - when using a specific group, should this read group-blahblah
//or, for instance, owners-blahblah
//or even group-owners-blahblah?
var GROUP = 'group-MY_LONG_GROUP_ID';
//stuck on these next few ones
var ENTITY = 'group-Owners';
var ROLE = 'OWNER';
var ROLE_OBJECT = 'OWNER';
function insertObject(event) {
try {
var fileData = event.target.files[0];
}
catch(e) {
//'Insert Object' selected from the API Commands select list
//Display insert object button and then exit function
//filePicker.style.display = 'block';
return;
}
var boundary = '-------314159265358979323846';
var delimiter = "\r\n--" + boundary + "\r\n";
var close_delim = "\r\n--" + boundary + "--";
var reader = new FileReader();
reader.readAsBinaryString(fileData);
reader.onload = function(e) {
var contentType = fileData.type || 'application/octet-stream';
var metadata = {
'name': fileData.name,
'mimeType': contentType
};
var base64Data = btoa(reader.result);
var multipartRequestBody =
delimiter +
'Content-Type: application/json\r\n\r\n' +
JSON.stringify(metadata) +
delimiter +
'Content-Type: ' + contentType + '\r\n' +
'Content-Transfer-Encoding: base64\r\n' +
'\r\n' +
base64Data +
close_delim;
//Note: gapi.client.storage.objects.insert() can only insert
//small objects (under 64k) so to support larger file sizes
//we're using the generic HTTP request method gapi.client.request()
var request = gapi.client.request({
'path': '/upload/storage/' + API_VERSION + '/b/' + BUCKET + '/o',
'method': 'POST',
'params': {'uploadType': 'multipart'},
'headers': {
'Content-Type': 'multipart/mixed; boundary="' + boundary + '"'
},
'body': multipartRequestBody});
//Remove the current API result entry in the main-content div
//listChildren = document.getElementById('main-content').childNodes;
//if (listChildren.length > 1) {
// listChildren[1].parentNode.removeChild(listChildren[1]);
//}
//look at http://stackoverflow.com/questions/30317797/uploading-additional-metadata-as-part-of-file-upload-request-to-google-cloud-sto
try{
//Execute the insert object request
executeRequest(request, 'insertObject');
//Store the name of the inserted object
object = fileData.name;
}
catch(e) {
alert('An error has occurred: ' + e.message);
}
}
}
</script>
Currently, I can execute the function. The file ends up within the function (I've been able to call an alert on the fileData.name). However, it doesn't end up in the bucket, and there's no error message brought up.
Does this code look okay, or is it a problem with how the storage bucket could be set up? And am I using the correct values (or have I formatted them correctly)?
Sorted it. For those who have tried using this code and, like me, didn't bother to read it all, Google is using an "authorize" button. Basically, the example code they provided gives you the ability to more or less "attach" the file, then another button which does the actual uploading. I hadn't seen this myself.

upload to google cloud storage signed url with javascript

With the following code I'm able to upload to my publicly writable bucket in google cloud storage. (allUsers has write permission). However If the bucket isn't publicly writable then I get a 401 unauthorised error. (I don't want the bucket to be publicly writable).
var file = $scope.myFile;
var fileData = file;
var boundary = '-------314159265358979323846';
var delimiter = "\r\n--" + boundary + "\r\n";
var close_delim = "\r\n--" + boundary + "--";
var reader = new FileReader();
reader.readAsBinaryString(fileData);
reader.onload = function(e) {
var contentType = fileData.type || 'application/octet-stream';
var metadata = {
'name': 'objectName', //'lol' + fileData.name,
'mimeType': contentType
};
var base64Data = btoa(reader.result);
var multipartRequestBody =
delimiter +
'Content-Type: application/json\r\n\r\n' +
JSON.stringify(metadata) +
delimiter +
'Content-Type: ' + contentType + '\r\n' +
'Content-Transfer-Encoding: base64\r\n' +
'\r\n' +
base64Data +
close_delim;
var stuff = angular.fromJson('{"Expires": "1415344534", "GoogleAccessId": "394062384276-n2jjh17vt975fsi4nc9ikm1nj55466ir#developer.gserviceaccount.com", "Signature": "AMkhO7mt2zg+s1Dzx28yQIMSrZlDC2Xx1SzvMCAgUVyiLXs5890/nA6PKzoc1KYBcRv/ALmkNaEVhvWHxE0EfcE151c0PYSG9x7AeSpQI/3dB1UPcSqpwilS1e2sgwB9piLNvBEXLNRXiLYyTiH22zkFZHAEQonJ3J25a47fwo4="}');
var Expires = stuff.Expires;
var GoogleAccessId = stuff.GoogleAccessId;
var Signature = encodeURIComponent(stuff.Signature);
var BUCKET = 'mybucket';
var request = $window.gapi.client.request({
'path': '/upload/storage/v1/b/' + BUCKET + '/o',
'method': 'POST',
'params': {
'uploadType': 'multipart',
'Expires': Expires,
'GoogleAccessId': GoogleAccessId,
'Signature': Signature
},
'headers': {
'Content-Type': 'multipart/mixed; boundary="' + boundary + '"'
},
'body': multipartRequestBody});
request.execute(function(r) {
console.log(r);
})
}
Is it possible to use signed URLS with the gapi javascript client? Or does it not understand the params.
If not - are there any examples of doing CORS with the JSON api from javascript for upload with signed urls?
(lets assume that my expiry, GoogleAccessId & Signature are correct & match what i'm doing in the javascript & the permissions i've set up on the bucket)
basically are there any examples of uploading to google cloud storage from javascript client from localhost without requiring the user to have a google account & without using a publicly writable bucket but using dispensed signed urls?
Use https://storage.googleapis.com as a host to compose the URL that points to the desired resource. You can choose between a few ways to construct your base URL. Here are some possible combinations.
For reference, you can also check out a very simple snippet Python that could be helpful.
Hope it helps.
I was implementing the same issue. The problem is with SignedURL. After correcting the signedurl the upload worked like a charm.
As I was using php. Below is the code for generating signed urls.
private function createSignedUrl($objectName, $bucketName, $key, $serviceEmailAddress, $method = 'GET', $duration = 600)
{
$expires = time() + $duration;
// Line breaks are important!
$toSign = (
$method . "\n" .
/* Content-MD5 */ "\n" .
/* Content Type */ "\n" .
$expires . "\n" .
$objectName
);
$signature = urlencode(base64_encode(JWT::encode($toSign, $key, 'HS256')));
return array(
'expires' => $expires,
'accessid' => $serviceEmailAddress,
'signature' => $signature,
);
}

getting a 404 when trying to upload a file to the google drive api, suspecting a data formatting issue

Im trying to save some dynamically generated data (json) to a (new) file on my google drive using JS but i keep getting these unhelpful errors on my POST.
At first I thought I was POSTing to the wrong address, but after changing the data content the error changed from 404 to 400 so now i suspect that the error is formatting related (since i dont understand this multipart stuff so well).
code is like:
function gDriveSaveProject(data, gDriveFolderID, currentProj, callback )
{
const boundary = '-------314159265358979323846';
const delimiter = "\r\n--" + boundary + "\r\n";
const close_delim = "\r\n--" + boundary + "--";
var metadata = {
'title': currentProj + ".lo",
'mimeType': 'application/json',
'parents' : [{'id' : gDriveFolderID}]
};
var multipartRequestBody =
delimiter +
'Content-Type: application/json\r\n\r\n' +
JSON.stringify(metadata) +
delimiter +
'Content-Type: application/json\r\n' +
'Content-Transfer-Encoding: base64\r\n' +
'\r\n' +
btoa(JSON.stringify(data)) +
close_delim;
var request = gapi.client.request({
'path': '/upload/drive/v2/files',
'method': 'POST',
'params': {'uploadType': 'multipart'},
'headers': {
'Content-Type': 'multipart/mixed; boundary="' + boundary + '"'
},
//'body': "" // using this gives me 400 instead of 404
'body': multipartRequestBody
});
request.execute(callback);
}
the code is based on the google drive sdk example, but using dynamically created data instead of a uploaded file and a target directory instead of the root.
thanks
Figured it out!
Turned out that I was supplying a faulty ID for the parent folder, and that is why i got the 404, but since it wasnt returned as a proper error object, I assumed the 404 was because the /upload/v2/drive/files wasnt found.
Maybe google could make it a bit clearer for idiots like me ;)

Upload a large file using nodejs

I have the following nodejs code which uploads a file by calling a server-side API (written by me) and passing the file content as a multi-part request. The problem is that my code works perfectly with small files but it fails with large files (1 MB or above). I'm pretty sure it's a problem in my code but I'm not able to find out what it is.
// assume file content have been read into post_data array
//Make post
var google = http.createClient(443, host, secure = true);
var filepath = '/v2_0/put_file/';
var GMTdate = (new Date()).toGMTString();
var fileName = encodeURIComponent(destination);
console.log("fileName : " + fileName);
console.log("Path : " + filepath);
var header = {
'Host': host,
'Authorization': 'Basic ' + authStr,
'Content-Type': 'multipart/form-data; boundary=0xLhTaLbOkNdArZ',
'Last-Modified': GMTdate,
'Filename': fileName,
'Last-Access-By': username
};
var request = google.request('POST', filepath, header);
for (var i = 0; i < post_data.length; i++) {
request.write(post_data[i]);
}
request.end();
request.addListener('response', function(response){
var noBytest = 0;
response.setEncoding('utf8');
console.log('STATUS: ' + response);
console.log('STATUS: ' + response.statusCode);
console.log('HEADERS: ' + JSON.stringify(response.headers));
console.log('File Size: ' + response.headers['content-length'] + " bytes.");
From the logs, I see that control comes to request.end(); but I do not see the last few logs written after request.addListener() block.
I've been pulling my hair off for last couple of days trying to understand why it works for small files but not for larger files. I don't see any timeouts and the code just seems to be hung till I kill it off.
Can anyone guide me as to what am I doing wrong?
UPDATE:
post_data is an array, here is what I'm doing
post_data = [];
console.log('ContentType =' + ContentType + "\n\nEncoding Style =" + encodingStyle);
post_data.push(new Buffer(EncodeFilePart(boundary, ContentType, 'theFile', FileNameOnly), 'ascii'));
var file_contents = '';
var file_reader = fs.createReadStream(filename, {
encoding: encodingStyle
});
file_reader.on('data', function(data){
console.log('in data');
file_contents += data;
});
file_reader.on('end', function(){
post_data.push(new Buffer(file_contents, encodingStyle))
post_data.push(new Buffer("\r\n--" + boundary + "--\r\n", 'ascii'));
...
var request = google.request('POST', filepath, header);
for (var i = 0; i < post_data.length; i++) {
request.write(post_data[i]);
}
I look forward to your suggestions.
You should be passing either an array or a string to request.write . Is post_data an array of strings, or an array of arrays?
Also, you are posting it as multipart/form-data, so that means you have to modify your data to that format. Have you done so, or is post_data just the raw data from a file?
checkout node-formidable and this post http://debuggable.com/posts/parsing-file-uploads-at-500-mb-s-with-node-js:4c03862e-351c-4faa-bb67-4365cbdd56cb

Categories

Resources