Ionic Upload Video to S3 - javascript

I'm attempting to use this to upload videos
http://market.ionic.io/plugins/image-upload
I know the file paths of the videos (they're on cameras I can access via HTTP), I'm trying to work out how to use something like this to get these videos onto S3.
I get that I can't pass the file path to this function, and it needs to be an actual file as per the directive.
How do I copy a large video file into an Javascript variable to upload as a file?
var imageUploader = new ImageUploader();
scene.videoFiles.forEach(function(videoFile) {
imageUploader.push(videoFile, function (data) {
console.log('File uploaded Successfully', videoFile, data);
$scope.uploadUri = data.url;
$scope.$digest();
});
})};

you can use $cordovaFile and aws java script-sdk for upload file (video ) to aws-s3 bucket. This is the sample code for upload video to s3 bucket.
var uploadFile = function (file_name,file_path) {
var deferred = $q.defer();
$cordovaFile.readAsArrayBuffer(file_name,file_path)
.then(function (success) {
AWS.config.region = 'eu-west-1';
AWS.config.update({
accessKeyId: 'ACCESS-KEY',
secretAccessKey:'SECRET-KEY'
});
var bucket = new AWS.S3({
params: {
Bucket: 'Bucket-NAME'
}
});
var params = {
Key: "uploads/"+file_name,
Body: success
};
bucket.upload(params).on('httpUploadProgress',function(evt){
$scope.uploading = true;
$scope.progress = parseInt((evt.loaded*100)/ evt.total);
console.log("Uploaded :: " + $scope.progress);
$scope.$apply();
}).send(function (err, data) {
$scope.uploading = false;
$scope.$apply();
deferred.resolve(data);
});
$scope.i++;
}, function (error) {
deferred.reject(error);
});
return deferred.promise;
};

Related

Upload file to google drive after http get request

I have two functions in separate files to split up the workflow.
const download = function(url){
const file = fs.createWriteStream("./test.png");
const request = https.get(url, function(response) {
response.pipe(file);
});
}
This function in my fileHelper.js is supposed to take a URL with an image in it and then save it locally to test.png
function uploadFile(filePath) {
fs.readFile('credentials.json', (err, content) => {
if (err) return console.log('Error loading client secret file:', err);
// Authorize a client with credentials, then call the Google Drive API.
authorize(JSON.parse(content), function (auth) {
const drive = google.drive({version: 'v3', auth});
const fileMetadata = {
'name': 'testphoto.png'
};
const media = {
mimeType: 'image/png',
body: fs.createReadStream(filePath)
};
drive.files.create({
resource: fileMetadata,
media: media,
fields: 'id'
}, (err, file) => {
if (err) {
// Handle error
console.error(err);
} else {
console.log('File Id: ', file.id);
}
});
});
});
}
This function in my googleDriveHelper.js is supposed to take the filePath of call and then upload that stream into my google drive. These two functions work on their own but it seems that the https.get works asynchronously and if I try to call the googleDriveHelper.uploadFile(filePath) function after the download, it doesn't have time to get the full file to upload so instead a blank file will be uploaded to my drive.
I want to find a way so that when the fileHelper.download(url) is called, it automatically uploads into my drive.
I also don't know if there is a way to create a readStream directly from the download function to the upload function, so I can avoid having to save the file locally to upload it.
I believe your goal as follows.
You want to upload a file retrieving from an URL to Google Drive.
When you download the file from the URL, you want to upload it to Google Drive without creating the file.
You want to achieve this using googleapis with Node.js.
You have already been able to upload a file using Drive API.
For this, how about this answer?
Modification points:
At download function, the retrieved buffer is converted to the stream type, and the stream data is returned.
At uploadFile function, the retrieved stream data is used for uploading.
When the file ID is retrieved from the response value of Drive API, please use file.data.id instead of file.id.
By above modification, the file downloaded from the URL can be uploaded to Google Drive without creating a file.
Modified script:
When your script is modified, please modify as follows.
download()
const download = function (url) {
return new Promise(function (resolve, reject) {
request(
{
method: "GET",
url: url,
encoding: null,
},
(err, res, body) => {
if (err && res.statusCode != 200) {
reject(err);
return;
}
const stream = require("stream");
const bs = new stream.PassThrough();
bs.end(body);
resolve(bs);
}
);
});
};
uploadFile()
function uploadFile(data) { // <--- Modified
fs.readFile("drive_credentials.json", (err, content) => {
if (err) return console.log("Error loading client secret file:", err);
authorize(JSON.parse(content), function (auth) {
const drive = google.drive({ version: "v3", auth });
const fileMetadata = {
name: "testphoto.png",
};
const media = {
mimeType: "image/png",
body: data, // <--- Modified
};
drive.files.create(
{
resource: fileMetadata,
media: media,
fields: "id",
},
(err, file) => {
if (err) {
console.error(err);
} else {
console.log("File Id: ", file.data.id); // <--- Modified
}
}
);
});
});
}
For testing
For example, when above scripts are tested, how about the following script?
async function run() {
const url = "###";
const data = await fileHelper.download(url);
googleDriveHelper.uploadFile(data);
}
References:
Class: stream.PassThrough
google-api-nodejs-client

Read Stream not doing firing / catching errors

I am trying to create a read stream to use Cloudinary's upload stream function, I am also using resumable.js to chunk the initial file, while the create read stream is working perfectly fine (as the whole file gets written perfectly fine.) the read stream / cloudinary upload function seems to not even be firing and failing silently.
router.post("/upload", (req, res, next) => {
console.log("the params are.. ", req.body);
resumable.post(req, function(
status,
filename,
original_filename,
identifier
) {
if (status === "done") {
let timestamp = new Date().getTime().toString();
//stich the chunks
var s = fs.createWriteStream(timestamp + filename);
resumable.write(identifier, s);
var upload_stream = cloudinary.uploader.upload_stream(
{ tags: "basic_sample" },
function(err, image) {
console.log();
console.log("** Stream Upload");
if (err) {
console.warn(err);
}
console.log("* Same image, uploaded via stream");
console.log("* " + image.public_id);
console.log("* " + image.url);
waitForAllUploads(timestamp + filename, err, image);
}
);
fs.createReadStream(timestamp + filename)
.pipe(upload_stream)
.on("error", err => {
console.log(err);
});
s.on("finish", function() {
// Stream upload
console.log("ive finished...");
// delete chunks
setTimeout(() => {
resumable.clean(identifier);
}, 1000);
});
}
res.send(status);
});
});
Here are the resources to what I am using:
https://github.com/cloudinary/cloudinary_npm/blob/master/samples/basic/basic.js
https://github.com/mrawdon/resumable-node
fs.createReadStream(timestamp + filename) accepts the file path but looks like you are passing the timestamp as well. Also, waitForAllUploads is not defined. You can try the following code just using Node and Cloudinary to test it out.
var upload_stream= cloudinary.uploader.upload_stream({tags: 'basic_sample'},function(err,image) {
console.log("** Stream Upload");
if (err){ console.warn(err);}
console.log("* "+image.url)
});
var file_reader = fs.createReadStream('<file path>').pipe(upload_stream);

AWS Lambda returns 'null' after going through a forEach loop of saving multiple files in s3 bucket

I have an AWS Lambda function. It goes through a list of the array which is an array of URLs and saves their screenshot and puts them in s3. How do I return the output of this function which is screenshotLinks array that has all the links to files saved in s3? I used the callback function in the end, but it just returns null! I want callback function to output all the s3 file links saved inside screenshotLinks array.
exports.handler = (event, context, callback) => {
desktopLinks.forEach(function (url, index) {
https.request(url, function(res) {
var data = new Stream();
res.on('data', function(chunk) {
// Agregates chunks
data.push(chunk);
});
res.on('end', function() {
var body = data.read();
// Once you received all chunks, send to S3
var currentLink = links[index];
var linkAddress = encodeURIComponent(currentLink);
var slashPosition = getPosition(currentLink, '/', 3)+1;
var linkName = currentLink.substr(slashPosition, currentLink.length)
var params = {
Bucket: bucket,
Key: completeDate + '/screenshots/' + linkName + '.png',
Body: body
};
s3.putObject(params, function(err, data, callback) {
if (err) {
console.error(err, err.stack);
} else {
bunch = params.Bucket + '/' + params.Key;
screenshotLinks.push(bunch);
}
});
});
}).end();
})
callback(null, screenshotLinks)
};
Your code is event driven / asynchronous which means you are calling the callback before screenshotLinks has been populated.
The node http.ClientRequest.end() method finishes sending a request, but that doesn't mean that the response has been received and handled, as that is done by an asynchronous event handler. However, the callback is executed immediately after the call to request.end(), which is just after the request has been fired off, therefore screenshotLinks is empty.
You need to execute your callback from the callback you pass to s3.putObject. I suggest you pass your callback a response/result object that indicates whether the putObject succeeded and contains the url it relates to and either an error message or a screenshotLink, e.g. something like this:
s3.putObject(params, function(err, data, callback) {
var s3Response;
s3Response.url = url;
if (err) {
s3Response.success = false;
s3Response.error = err;
console.error(err, err.stack);
} else {
bunch = params.Bucket + '/' + params.Key;
s3Response.success = true;
s3Response.screenshotLink = bunch;
}
callback(null, s3Response);
});
I would like to suggest you use an 8.10 node runtime.
ref: https://aws.amazon.com/blogs/compute/node-js-8-10-runtime-now-available-in-aws-lambda/
Then your entry point should be:
export async function <function_name>(event) {}
Then:
let s3 = new AWS.S3({ region: process.env.AWS_REGION, apiVersion: '2006-03-01' });
let params=
{
Bucket: /* a path to bucket (string) */,
Key: name /* string */,
Body: /* supported types (Buffer, Typed Array, Blob, String, ReadableStream) */,
ACL: 'public-read',
ContentType: 'image/png'
};
try
{
let s3Response = await s3.upload(params).promise();
// if succceed
console.log(`File uploaded to S3 at ${s3Response.Bucket} bucket. File location: ${s3Response.Location}`);
}
catch (ex) // if error occured
{
console.error(ex);
}

Mock code in another module

I want to mock Amazon AWS S3 getObject
The code I want to test is the following one: Its in helper.js
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.get_data_and_callback = function(callback, extra){
s3.getObject( {Bucket: SRC_BUCKET, Key: SRC_KEY},
function (err, data) {
if (err != null) {
console.log("Couldn't retrieve object: " + err);
}else{
console.log("Loaded " + data.ContentLength + " bytes");
callback(data, extra);
}
});
}
In test/helper_test.js I wrote a test that should mock the module AWS
var assert = require('assert');
var mockery = require('mockery');
describe("helper", function() {
it('loads and returns data from S3 to a callback', function(){
mockery.enable();
var fakeaws = {
S3: function(){
return {
getObject: function(params, callback){
callback(null, "hello")
}
}
}
}
mockery.registerSubstitute('aws-sdk', fakeaws);
function replace_function(err, data){
console.log(data);
}
require('../helper.js').get_data_and_callback(replace_function, null);
});
});
When I require AWS in the Test-File test/helper_test.js like this:
aws = require('aws-sdk');
s3 = new aws.S3;
s3.getObject(replace_function)
Then my code works, it prints out hello.
BUT the execution of require('../helper.js').get_data_and_callback(replace_function, null);
Doesn't work like expected, AWS stays the same its not replaced with my fakeaws. What do I wrong? Do you maybe have other solutions to replace S3 Thanks
We have created an aws-sdk-mock npm module which mocks out all the AWS SDK services and methods. https://github.com/dwyl/aws-sdk-mock
It's really easy to use. Just call AWS.mock with the service, method and a stub function.
AWS.mock('S3', 'getObject', function(params, callback) {
callback(null, 'success');
});
Then restore the methods after your tests by calling:
AWS.restore('S3', 'getObject');
This works for every service and method in the as-sdk.

NodeJs - How to convert chunks of data to new Buffer?

In NodeJS, I have chunks of data from a file upload that saved the file in parts. I'd like to convert this by doing new Buffer() then upload it to Amazon s3
This would work if there was only one chunk but when there are multiple, I cannot figure out how to do new Buffer()
Currently my solution is write the chunks of data into a real file on my own server, then send the PATH of that file to Amazon s3.
How can I skip the file creation step and actually send the buffer the Amazon s3?
i guess you need to use streaming-s3
var streamingS3 = require('streaming-s3');
var uploadFile = function (fileReadStream, awsHeader, cb) {
//set options for the streaming module
var options = {
concurrentParts: 2,
waitTime: 20000,
retries: 2,
maxPartSize: 10 * 1024 * 1024
};
//call stream function to upload the file to s3
var uploader = new streamingS3(fileReadStream, aws.accessKey, aws.secretKey, awsHeader, options);
//start uploading
uploader.begin();// important if callback not provided.
// handle these functions
uploader.on('data', function (bytesRead) {
console.log(bytesRead, ' bytes read.');
});
uploader.on('part', function (number) {
console.log('Part ', number, ' uploaded.');
});
// All parts uploaded, but upload not yet acknowledged.
uploader.on('uploaded', function (stats) {
console.log('Upload stats: ', stats);
});
uploader.on('finished', function (response, stats) {
console.log(response);
cb(null, response);
});
uploader.on('error', function (err) {
console.log('Upload error: ', err);
cb(err);
});
};

Categories

Resources