I have another project where this same code works successfully, so it may be some configuration option I've missed this time around. I'm using the google cloud API to access firebase storage.
For clarity, the file does exist.
var storage = require('#google-cloud/storage')({
keyFilename: 'serviceAccountKey.json',
projectId: 'my-id'
});
var bucket = storage.bucket('my-id.appspot.com');
var file = bucket.file('directory/file.json'); //this exists!
file.exists(function(err, exists){
console.log("Checking for challenges file. Results:" + exists + ", err:" + err); //returns "Checking for challenges file. Results:true, err:nil"
if (exists) {
console.log("File exists. Printing."); //prints "File exists. Printing."
file.download().then(function(currentFileData) {
console.log("This line is never reached.");
}).catch(err => {
console.error('ERROR:', err); //gives a 404 error
});
}
});
Instead of printing "this line is never reached.", it prints the following caught error:
ERROR: { ApiError: Not Found at Object.parseHttpRespMessage (/user_code/node_modules/#google-cloud/storage/node_modules/#google-cloud/common/src/util.js:156:33) at Object.handleResp ... ... The full error is colossal, so I won't post it here in its entirety unless required.
It's possible the user that is trying to access the file only have access over the bucket but not over the file. Check the ACLs of both the bucket and the file in both projects and compare what you get:
myBucket.acl.get()
.then(acls => console.log("Bucket ACLs:", acls));
myFile.acl.get()
.then(acls => console.log("File ACLs:", acls));
You should see an output like this:
[ [ { entity: 'user-abenavides333#gmail.com', role: 'OWNER' },
{ entity: 'user-dwilches#gmail.com', role: 'OWNER' } ],
{ kind: 'storage#objectAccessControls',
items: [ [Object], [Object] ] } ]
If there is no difference there, try the following more verbose versions of the same code:
myBucket.acl.get()
.then(acls => console.log("Bucket ACLs:", JSON.stringify(acls, null, '\t')));
myFile.acl.get()
.then(acls => console.log("File ACLs:", JSON.stringify(acls, null, '\t')));
Related
I have uploaded a pdf file successfully to drive using a node server. However, I can not find a way to convert it to the google doc format so that it can be downloaded later as a docx document.
here is my actual code:
const fileMetadata = {
'name': 'new file.pdf',
//'mimeType': 'application/vnd.google-apps.document'
// trying to create it as a google doc directly does
// not work either
};
const media = {
mimeType: 'application/pdf',
body: fs.createReadStream(pdf)
};
drive.files.create({
resource: fileMetadata,
media,
fields: 'id'
}, (err, createdFile) => {
if (err) {
console.error(err);
} else {
// ------- COPY AS GOOGLE DOC
drive.files.copy({
fileId: createdFile.data.id,
requestBody:{
mimeType:'application/vnd.google-apps.document'
}
}, (err, googleDocFiles) => {
if (err) {
console.log(err)
// GaxiosError: Bad Request
// error: [
// { domain: 'global', reason: 'badRequest', message: 'Bad Request' } ]
} else {
console.log(googleDocFiles)
}
})
}});
What i have tried
using the files.upload method with the same requestBody returns a 200 response, however the pdf is unchanged
I know is possible, I did it before, using the copy method however now it not working and I have no clue why *sigh
EDIT
The mimeType in the request body is responsible for the 400 error, that is very weird since I am following the steps in the documentation here:
Import to Google Docs types section
this section is explicit about importing a CSV, however in the box under that section says that I can convert a pdf to doc using the corresponding mime type
I re-installed the package and it work
i want to send multiple file to Fire-base storage and meantime it has to save real-time database as a one record using java-script
ex:
file1 : urlonfirstfile
file2 : urlonsecondfile
for (var i = 0; i < file.length; i++) {
var task = ref.child(file.name).put(file, metadata);
task
.then(snapshot => snapshot.ref.getDownloadURL())
.then((url) => {
console.log(url);
userDetails.push({
email : email,
title1: tit,
detail1: dit,
file:file[i].name
});
});
}
Your question is kinda vague on what information you want to store. So I've made some assumptions to come up with the below code:
The files are to be uploaded to an area of Firebase Storage specific to the logged in user. (e.g. "userFiles/CURRENT_USER/...")
The information about the uploaded files are kept under the user's own data. (e.g. "users/CURRENT_USER/uploads/..."
The title and detail properties change for each file. Where these properties come from is unclear, so I'm just going to assume they are passed in through the object metadata.
The code below should be enough to get you started on figuring out your own solution.
// the array of File objects to upload
const fileObjArray = [ ... ]
// the metadata to store with each file
const metadata = { ... }
// the current user's ID
const currentUserId = firebase.auth().currentUser.uid;
// Where to save information about the uploads
const databaseRef = firebase.database().ref("user").child(currentUserId).child('uploads');
// Create an ID for this set of uploaded files
const uploadId = storageRef.push().key;
// Save files to storage in a subfolder of the user's files corresponding to the uploadId
const storageRef = firebase.storage().ref("userFiles").child(currentUserId).child(uploadId);
// Upload each file in fileObjArray, then fetch their download URLs and then return an object containing information about the uploaded file
var uploadPromiseArray = fileObjArray.map((fileObj) => {
var uploadTask = storageRef.child(fileObj.name).put(fileObj, metadata)
return uploadTask.then(uploadSnapshot => {
// file uploaded successfully. Fetch url for the file and return it along with the UploadTaskSnapshot
return uploadSnapshot.ref.getDownloadURL().then((url) => {
return {
downloadUrl: url,
snapshot: uploadSnapshot
};
});
});
});
// uploadPromiseArray is an array of Promises that resolve as objects with the properties "downloadUrl" and "snapshot"
Promise.all(uploadPromiseArray)
.then((uploadResultArray) => {
var batchUploadData = {
timestamp: firebase.database.ServerValue.TIMESTAMP, // use the server's time
files: [],
... // other upload metadata such as reason, expiry, permissions, etc.
}
batchUploadData.files = uploadResultArray.map((uploadResult) => {
// rearrange the file's snapshot data and downloadUrl for storing in the database
return {
file: uploadResult.snapshot.name,
url: uploadResult.url,
title: uploadResult.snapshot.metadata.customMetadata.title,
detail: uploadResult.snapshot.metadata.customMetadata.detail
};
});
// commit the data about this upload to the database.
return databaseRef.child(uploadId).set(batchUploadData);
})
.then((dataSnapshot) => {
// the upload completed and information about the upload was saved to the database successfully
// TODO: do something
}, (err) => {
// some error occured
// - a file upload failed/was cancelled
// - the database write failed
// - permission error from Storage or Realtime Database
// TODO: handle error
});
// Warning: this line will be reached before the above code has finished executing
This is what it looks like on the database:
"users": {
"someUserId-78sda9823": {
"email": "example#example.com",
"name": "mr-example",
"username": "mrexemplary",
"uploads": {
"niase89f73oui2kqwnas98azsa": {
"timestamp": 1554890267823,
"files": {
"1": {
"file": "somefile.pdf",
"url": "https://firebasestorage.googleapis.com/v0/b/bucket/o/userFiles%2FsomeUserId-78sda9823%2Fsomefile.pdf",
"title": "Some File",
"detail": "Contains a report about some stuff"
},
"2": {
"file": "screenshot.png",
"url": "https://firebasestorage.googleapis.com/v0/b/bucket/o/userFiles%2FsomeUserId-78sda9823%2Fscreenshot.png",
"title": "Screenshot of problem",
"detail": "Contains an image that shows some stuff"
},
...
}
},
...
},
...
},
...
}
Note 1: This code is not yet complete. It is missing error handling for things like permission errors and incomplete file uploads. This is a problem for you to solve.
Note 2: Regarding incomplete file uploads, if any file fails to upload or get it's download URL successfully, the database will not be written to. One possible way to aid with this is to add a catch to uploadTask that returns null on error and then in the uploadResultArray.map(...) step skip any uploadResult variables that are null or write to the database that it failed for that particular file.
Note 3: Because Firebase Storage and the Realtime Database both use snapshots, try to keep them called uploadSnapshot/fileSnapshot and dataSnapshot respectively when using both in your code to minimise confusion. Similarly, name your references somethingStorageRef/somethingSRef and somethingDatabaseRef/somethingDBRef.
I'm trying to sent an audio blob on some Google drive folder. To succeed I translate blob in file before sending it.
I received since the starting an error :
Error: File not found.
code: 404, errors: [ { domain: 'global',
reason: 'notFound',
message: 'File not found: 1aazd544z3FOCAsdOA5E7XcOaS3297sU.',
locationType: 'parameter',
location: 'fileId' } ] }
progressive edit : So far I have converted my audio blob in base64 string in order to ease the processing of my blob.
But, I fail always to write a file with my base 64 audio blob :
Here my driveApi.js :
// request data from req.body
var data = req.body.data ; // data variable is presented in form of base64 string
var name = req.body.word ;
(...)
// WRITE FILE AND STORE IT IN BODY HEADER PROPERTY
body: fs.writeFile((name + ".mp3"), data.substr(data.indexOf(',')+1), {encoding: 'base64'}, function(err) {
console.log('File created')
})
Three steps: create a temporary file with your base64 data out of the drive.files.create function, then give this file a specific name -e.g. tempFile, also you can customize this name with a time value. After that, pass this file on a "fs.createReadStream" method to upload it on Google drive.
Some hints:
Firstly - use path.join(__dirname, name + "-" + Date.now() +".ext" ) to create to file name
Secondly - make this process asynchronously to avoid data flow conflict (trying to create file before file is created), so call the drive.files.create after having setting a fs.writeFile function.
Thirdly - Destroy the tempFile after the operation has been done. It allows you to automatize the process.
I let you dive in the methods you need. But basically fs should do the job.
Again, be careful on the data flow and use callback to control it. Your code can crash just because the function gone up in a no-operational way.
Some links :
https://nodejs.org/api/path.html
https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback
here an instance :
// datavalue = some time value
fs.writeFile(
path.join(__dirname, name + "-" + datevalues +".mp3" ),
data.substr(data.indexOf(',')+1),
{encoding: 'base64'},
// callback
function(err) {
if(err){ console.log("error writting file : " + err)}
console.log('File created')
console.log("WRITTING") // control data flow
fileCreate(name)
})
function fileCreate (name){
// upload file in specific folder
var folderId = "someID";
var fileMetadata = {
'name': name + ".mp3" ,
parents: [folderId]
}; console.log("MEDIA") // control data flow
var media = {
mimeType: 'audio/mp3',
body: fs.createReadStream(path.join(__dirname, name + "-" + datevalues +".mp3" ))
};
drive.files.create({
auth: jwToken,
resource: fileMetadata,
media: media,
fields: 'id'
}, function (err, file) {
if (err) {
// Handle error
console.error(err);
} else {
console.log('File Id: ', file.data.id);
}
// make a callback to a deleteFile() function // I let you search for it
});
}
How about this modification? I'm not sure the condition of blob from reactApp.js. So could you please try to use this modification? In this modification, file or blob from reactApp.js are used.
Modified script :
var stream = require('stream'); // Added
module.exports.uploadFile = function(req){
var file ;
console.log("driveApi upload reached")
function blobToFile(req){
file = req.body.blob
//A Blob() is almost a File() - it's just missing the two properties below which we will add
file.lastModifiedDate = new Date();
file.name = req.body.word;
return file;
}
var bufStream = new stream.PassThrough(); // Added
bufStream.end(file); // Or bufStream.end(### blob from reactApp.js ###) Added
console.log(typeof 42);
// upload file in specific folder
var folderId = "1aa1DD993FOCADXUDNJKLfzfXcOaS3297sU";
var fileMetadata = {
"name": req.body.word,
parents: [folderId]
}
var media = {
mimeType: "audio/mp3",
body: bufStream // Modified
}
drive.files.create({
auth: jwToken,
resource: fileMetadata,
media: media,
fields: "id"
}, function (err, file) {
if (err) {
// Handle error
console.error(err);
} else {
console.log("File Id: ", file.id);
}
console.log("driveApi upload accomplished")
});
}
If this didn't work, I'm sorry.
I'm writing a javascript library and when some event happens I would like to store a JSON variable that can be accessed from NightwatchJs tests to validate the data that the event generated.
I used to store it as a global variable (ex.
window.debugLog = { message: "something", timestamp: 1111, eventType: "click", other: "stuff" }; )
and retrieve it in nightwatch with
browser.execute("return window.debugLog;", [], function(result){ debugLog = result.value;})
Unfortunately this doesn't seem reliable with every browser/device when I run Nightwatch tests over Browserstack. It's not rare to have false positives because Appium failed to execute the Js function.
I was wondering if anyone has suggestions about more reliable alternatives. A cookie? A metatag? An hidden tag?
Write it to a file, require that file for the later tests that use it.
I have, in the custom_commands folder, a file called helperFunctions.js, and within that, this is one of the more useful functions/methods:
saveToFile : function(client, path, data) {
this.fs = fs;
buffer = new Buffer(data);
fs.open(path, 'w', function(err, fd) {
if (err) {
throw 'error opening file: ' + err;
}
fs.write(fd, buffer, 0, buffer.length, null, function(err) {
if (err) throw 'error writing file: ' + err;
return fs.close(fd, function() {
console.log('File write: ' + path + ' has been updated.' );
})
});
})
},
In that way it can be called to write data from within any test:
this.helperFunctions.saveToFile(client, "conf/usefulVariable.js", "module.exports = {\"default\" : {},\"test_env\" : { myGlobal: " + someGlobal + "}};")
In the test file that needs to use it:
var usefulVar = require("../conf/usefulVariable.js");
You may need/want to JSON.stringify or parse as appropriate.
I am trying to get slingshot to work but having a hard time, I am attaching here the code I have.
The error I get n the console is:
"Exception in delivering result of invoking 'slingshot/uploadRequest': TypeError: Cannot read property 'response' of undefined"
client
Template.hello.events({
'change .uploadFile': function(event, template) {
event.preventDefault();
var uploader = new Slingshot.Upload("myFileUploads");
uploader.send(document.getElementById('uploadFile').files[0], function (error, downloadUrl) {
if (error) {
// Log service detailed response
console.error('Error uploading', uploader.xhr.response);
alert (error);
}
else{
console.log("Worked!");
}
});
}
});
lib
Slingshot.fileRestrictions("myFileUploads", {
allowedFileTypes: ["image/png", "image/jpeg", "image/gif"],
maxSize: null // 10 MB (use null for unlimited)
});
server
Slingshot.fileRestrictions("myFileUploads", {
allowedFileTypes: ["image/png", "image/jpeg", "image/gif"],
maxSize: null,
});
Slingshot.createDirective("myFileUploads", Slingshot.S3Storage, {
AWSAccessKeyId: "my-AWSAccessKeyId",
AWSSecretAccessKey: "my-AWSSecretAccessKey",
bucket: "slingshot-trial-2",
acl: "public-read",
authorize: function () {
//Deny uploads if user is not logged in.
},
key: function (file) {
//Store file into a directory by the user's username.
return file.name;
}
});
I saw the same issue and it was due to xhr being null - try removing the console error line that references it and I'm assuming you'll start seeing the alert with the actual error message:
console.error('Error uploading', uploader.xhr.response);
I ended up putting in a check for xhr before referencing it and then logging it if it existed.