Within my Cordova app, I am downloading arbitrary files like images or video files. This is done with the Cordova file-transfer plugin and the "Range" Header, because I need to download the files in parts.
My Problem is, that I want to merge back the several small "Byte"-Files back together into the original file they once where to use that file. Every time I'm trying to read the resulting parts as binaryString via the FileReader and write them together in a new file, that file ends up a lot larger than the parts of the original file altogther and the resulting file is unusable.
Any help is appreciated.
Here is my code until now (long and ugly):
document.addEventListener('deviceready', deviceready, false);
var App;
var finishedFileUrl = "";
var async = {
sequence: function(items, callback) {
var def = $.Deferred(),
deferrers = [$.Deferred()];
for(var i = 0; i < items.length; i++) {
(function (n) {
deferrers[n + 1] = $.Deferred();
deferrers[n].always(function() {
callback(items[n], deferrers[n + 1]);
});
})(i);
}
deferrers[items.length].always(function() {
def.resolve();
});
deferrers[0].resolve();
return def.promise();
}
}
var aSmallImageArray = [
'' // Put URL to JPG accessible with Range Header Request here
];
var aByteSizeImageArray = [];
function formatDownloadArray(fileSize) {
for(var j = 1000; j <= fileSize; j += 1000) {
aByteSizeImageArray.push(j);
}
aByteSizeImageArray.push(j);
}
function deviceready() {
console.log('dv ready');
function registerHandlers() {
App = new DownloadApp();
formatDownloadArray(XXXXX); // XXXXX should be size of JPG in bytes
document.getElementById("startDl").onclick = function() {
var that = this;
console.log("load button clicked");
var folderName = "testimagefolder";
// sequence call
async.sequence(aByteSizeImageArray, function(currentBytes, iter) {
var filePath = aSmallImageArray[0];
var fileName = aSmallImageArray[0].substr(52,99) + currentBytes;
console.log(filePath);
console.log(fileName);
console.log("Starting with: " + fileName);
var uri = encodeURI(filePath);
var folderName = "testimagefolder";
document.getElementById("statusPlace").innerHTML = "<br/>Loading: " + uri;
App.load(currentBytes, uri, folderName, fileName,
function progress (percentage) {
document.getElementById("statusPlace").innerHTML = "<br/>" + percentage + "%";
},
function success (entry) {
console.log("Entry: " + entry);
document.getElementById("statusPlace").innerHTML = "<br/>Image saved to: " + App.filedir;
console.log("DownloadApp.filedir: " + App.filedir);
iter.resolve();
},
function error () {
document.getElementById("statusPlace").innerHTML = "<br/>Failed load image: " + uri;
iter.resolve();
}
);
}).then(function afterAsync () {
console.log("ASYNC DONE");
var ohNoItFailed = function ohNoItFailed (exeperro) {
console.log(exeperro);
}
// now we merge the fileparts into one file to show it
window.requestFileSystem(LocalFileSystem.PERSISTENT, 0, function (FileSystem) {
FileSystem.root.getDirectory(folderName, {create: true, exclusive: false}, function itSuccessed (Directory) {
Directory.getFile(aSmallImageArray[0].substr(52,99), {create: true, exclusive: false}, function itSuccessedAgain (fileEntry) {
finishedFileUrl = fileEntry.toURL();
var directoryReader = Directory.createReader();
var allFiles = directoryReader.readEntries(function succesReadDir (fileEntries) {
async.sequence(fileEntries, function(currentFile, iterThis) {
currentFile.file(function (theActualFile) {
var myFileReader = new FileReader();
myFileReader.onload = function (content) {
console.log('FileReader onload event fired!');
console.log('File Content should be: ' + content.target.result);
fileEntry.createWriter(
function mergeImage (writer) {
writer.onwrite = function (evnt) {
console.log("Writing successful!");
iterThis.resolve();
}
writer.seek(writer.length);
writer.write(content.target.result);
}, ohNoItFailed);
};
myFileReader.readAsBinaryString(theActualFile);
}, ohNoItFailed);
}).then(function afterAsyncTwo () {
console.log("NOW THE IMAGE SHOULD BE TAKEN FROM THIS PATH: " + finishedFileUrl);
//window.requestFileSystem(LocalFileSystem.PERSISTENT, 0, function (FileSystem) {
//FileSystem.root.getDirectory(folderName, {create: true, exclusive: false}, function itSuccessed (Directory) {
//Directory.getFile(aSmallImageArray[0].substr(52,99), {create: true, exclusive: false}, function itSuccessedAgain (fileEntry) {
//fileEntry.createWriter(
document.getElementById("image_here").src = finishedFileUrl;
});
}, ohNoItFailed);
}, ohNoItFailed);
}, ohNoItFailed);
}, ohNoItFailed);
});
};
}
registerHandlers();
}
var DownloadApp = function() {}
DownloadApp.prototype = {
filedir: "",
load: function(currentBytes, uri, folderName, fileName, progress, success, fail) {
var that = this;
that.progress = progress;
that.success = success;
that.fail = fail;
filePath = "";
that.getFilesystem(
function(fileSystem) {
console.log("GotFS");
that.getFolder(fileSystem, folderName, function(folder) {
filePath = folder.toURL() + fileName;
console.log("FILEPATH: " + filePath);
console.log("URI: " + uri);
that.transferFile(currentBytes, uri, filePath, progress, success, fail);
}, function(error) {
console.log("Failed to get folder: " + error.code);
typeof that.fail === 'function' && that.fail(error);
});
},
function(error) {
console.log("Failed to get filesystem: " + error.code);
typeof that.fail === 'function' && that.fail(error);
}
);
},
getFilesystem: function (success, fail) {
window.requestFileSystem = window.requestFileSystem || window.webkitRequestFileSystem;
window.requestFileSystem(LocalFileSystem.PERSISTENT, 0, success, fail);
},
getFolder: function (fileSystem, folderName, success, fail) {
fileSystem.root.getDirectory(folderName, {create: true, exclusive: false}, success, fail)
},
transferFile: function (currentBytes, uri, filePath, progress, success, fail) {
var that = this;
that.progress = progress;
that.success = success;
that.fail = fail;
console.log("here we go");
console.log("filePath before Request: " + filePath);
var previousBytes = currentBytes - 1000;
var transfer = new FileTransfer();
transfer.onprogress = function(progressEvent) {
if (progressEvent.lengthComputable) {
var perc = Math.floor(progressEvent.loaded / progressEvent.total * 100);
typeof that.progress === 'function' && that.progress(perc); // progression on scale 0..100 (percentage) as number
} else {
}
};
transfer.download(
uri,
filePath,
function success (entry) {
console.log("File saved to: " + entry.toURL());
typeof that.success === 'function' && that.success(entry);
},
function errorProblem(error) {
console.log("An error has occurred: Code = " + error.code);
console.log("download error source " + error.source);
console.log("download error target " + error.target);
console.log("download error code " + error.code);
typeof that.fail === 'function' && that.fail(error);
},
true,
{
headers: {
"Range": "bytes=" + previousBytes + "-" + currentBytes
}
}
);
}
}
async code by stackoverflow-user: Paul Facklam
-> Thanks a lot!
you can build a blob from other blobs, like the ones you use FileReader on now. (File()s are Blobs)
// put three blobs into a fourth:
var b=new Blob([new Blob(["hello"]), new Blob([" "]), new Blob(["world"])]);
// verify the blob has the data we expect:
var fr=new FileReader();
fr.onload=function(){alert(this.result);};
fr.readAsBinaryString(b); // shows: "hello world"
the binaryString flavor is used here to show how these low-order strings stack up, but the actual new blob instance should have all the orig (arbitrary) bytes from the original blobs, even if they aren't composed of simple strings...
Using readAsArrayBuffer() instead of readAsBinaryString() did the trick!
So instead of:
myFileReader.readAsBinaryString(theActualFile);
I did:
myFileReader.readAsBinaryArray(theActualFile);
And the resulting image file is usable.
Related
I made a YouTube API upload app. It works great with small video file sizes but with larger sizes my app crashes. The exception happens when I try to get the video file with Fetch().
Question: Is there a way I can fetch a large file in React Native and feed it into the YouTube API in smaller chunks?
Here is my fetch code:
const fetchResponse = await fetch(videoUri);
const blob = await fetchResponse.blob();
var file = new File([blob], "video.mp4", {type: "video/mp4"});
My upload YouTube code is taken from the following git repos - supposedly supports multipart upload as well:
https://github.com/youtube/api-samples/blob/master/javascript/cors_upload.js and
https://github.com/youtube/api-samples/blob/master/javascript/upload_video.js
Here is my full upload code:
uploadVideo = async function() {
var match = this.state.match.value;
var video = match.mergedVideo;
var players = match.players;
var scoreboard = this.state.match.value.scoreboard;
var points = match.points;
var title = players[0].name + " vs. " + players[1].name + " " + scoreboard;
var description = this.descriptionBuilder(points, match.videos);
/*const fetchResponse = await fetch(video);
const blob = await fetchResponse.blob();
var file = new File([blob], "video.mp4", {type: "video/mp4"});
console.log(file);*/
const file = await DocumentPicker.pick({
type: [DocumentPicker.types.video],
});
var metadata = {
snippet: {
title: title,
description: description,
tags: ['youtube-cors-upload'],
categoryId: 22
},
status: {
privacyStatus: 'unlisted'
}
};
var uploader = new MediaUploader({
baseUrl: 'https://www.googleapis.com/upload/youtube/v3/videos',
file: file,
token: this.state.user.auth.accessToken,
metadata: metadata,
chunkSize: 1024 * 1024,
params: {
part: Object.keys(metadata).join(',')
},
onError: function(data) {
console.log(data);
var message = data;
try {
var errorResponse = JSON.parse(data);
message = errorResponse.error.message;
} finally {
alert(message);
}
}.bind(this),
onProgress: function(data) {
var currentTime = Date.now();
var bytesUploaded = data.loaded;
var totalBytes = data.total;
var bytesPerSecond = bytesUploaded / ((currentTime - window.uploadStartTime) / 1000);
var estimatedSecondsRemaining = (totalBytes - bytesUploaded) / bytesPerSecond;
var percentageComplete = (bytesUploaded * 100) / totalBytes;
this.setState({ youtubeUploadProgress: percentageComplete / 100});
console.log("Uploaded: " + bytesUploaded + " | Total: " + totalBytes + " | Percentage: " + percentageComplete + " | Esitmated seconds remaining: " + estimatedSecondsRemaining);
}.bind(this),
onComplete: function(data) {
console.log("Complete");
alert("Upload successful!");
this.setState({ youtubeUploadProgress: 0});
}.bind(this)
});
window.uploadStartTime = Date.now();
uploader.upload();
}
and this is my cors_upload.js in React Native class module:
import React, { Component } from 'react';
export default class MediaUploader extends Component {
constructor(props) {
super(props);
const obj = this;
const DRIVE_UPLOAD_URL = 'https://www.googleapis.com/upload/drive/v2/files/';
var options = props;
var noop = function() {};
this.file = options.file;
this.contentType = options.contentType || this.file.type || 'application/octet-stream';
this.metadata = options.metadata || {
'title': this.file.name,
'mimeType': this.contentType
};
this.token = options.token;
this.onComplete = options.onComplete || noop;
this.onProgress = options.onProgress || noop;
this.onError = options.onError || noop;
this.offset = options.offset || 0;
this.chunkSize = options.chunkSize || 0;
//this.retryHandler = new RetryHandler();
//this.retryHandler = new obj.RetryHandler();
this.interval = 1000; // Start at one second
this.maxInterval = 60 * 1000;
this.url = options.url;
if (!this.url) {
var params = options.params || {};
params.uploadType = 'resumable';
//this.url = this.buildUrl_(options.fileId, params, options.baseUrl);
this.url = obj.buildUrl_(options.fileId, params, options.baseUrl);
}
this.httpMethod = options.fileId ? 'PUT' : 'POST';
}
retry = function(fn) {
setTimeout(fn, this.interval);
this.interval = this.nextInterval_();
};
reset = function() {
this.interval = 1000;
};
nextInterval_ = function() {
var interval = this.interval * 2 + this.getRandomInt_(0, 1000);
return Math.min(interval, this.maxInterval);
};
getRandomInt_ = function(min, max) {
return Math.floor(Math.random() * (max - min + 1) + min);
};
buildQuery_ = function(params) {
params = params || {};
return Object.keys(params).map(function(key) {
return encodeURIComponent(key) + '=' + encodeURIComponent(params[key]);
}).join('&');
};
buildUrl_ = function(id, params, baseUrl) {
var url = baseUrl || DRIVE_UPLOAD_URL;
if (id) {
url += id;
}
var query = this.buildQuery_(params);
if (query) {
url += '?' + query;
}
return url;
};
upload = function() {
//var self = this;
console.log("UPLOAD called", this.file.size);
var xhr = new XMLHttpRequest();
xhr.open(this.httpMethod, this.url, true);
xhr.setRequestHeader('Authorization', 'Bearer ' + this.token);
xhr.setRequestHeader('Content-Type', 'application/json');
xhr.setRequestHeader('X-Upload-Content-Length', this.file.size);
xhr.setRequestHeader('X-Upload-Content-Type', this.contentType);
xhr.onload = function(e) {
console.log("ON LOAD CALLED");
if (e.target.status < 400) {
var location = e.target.getResponseHeader('Location');
this.url = location;
this.sendFile_();
} else {
this.onUploadError_(e);
}
}.bind(this);
xhr.onerror = this.onUploadError_.bind(this);
xhr.send(JSON.stringify(this.metadata));
};
sendFile_ = function() {
console.log("SEND FILE CALLED");
var content = this.file;
var end = this.file.size;
if (this.offset || this.chunkSize) {
// Only bother to slice the file if we're either resuming or uploading in chunks
if (this.chunkSize) {
end = Math.min(this.offset + this.chunkSize, this.file.size);
}
content = content.slice(this.offset, end);
}
var xhr = new XMLHttpRequest();
xhr.open('PUT', this.url, true);
xhr.setRequestHeader('Content-Type', this.contentType);
xhr.setRequestHeader('Content-Range', 'bytes ' + this.offset + '-' + (end - 1) + '/' + this.file.size);
xhr.setRequestHeader('X-Upload-Content-Type', this.file.type);
if (xhr.upload) {
xhr.upload.addEventListener('progress', this.onProgress);
}
xhr.onload = this.onContentUploadSuccess_.bind(this);
xhr.onerror = this.onContentUploadError_.bind(this);
xhr.send(content);
};
resume_ = function() {
var xhr = new XMLHttpRequest();
xhr.open('PUT', this.url, true);
xhr.setRequestHeader('Content-Range', 'bytes */' + this.file.size);
xhr.setRequestHeader('X-Upload-Content-Type', this.file.type);
if (xhr.upload) {
xhr.upload.addEventListener('progress', this.onProgress);
}
xhr.onload = this.onContentUploadSuccess_.bind(this);
xhr.onerror = this.onContentUploadError_.bind(this);
xhr.send();
};
extractRange_ = function(xhr) {
var range = xhr.getResponseHeader('Range');
if (range) {
this.offset = parseInt(range.match(/\d+/g).pop(), 10) + 1;
}
};
onContentUploadSuccess_ = function(e) {
if (e.target.status == 200 || e.target.status == 201) {
this.onComplete(e.target.response);
} else if (e.target.status == 308) {
this.extractRange_(e.target);
this.reset();
this.sendFile_();
}
};
onContentUploadError_ = function(e) {
if (e.target.status && e.target.status < 500) {
this.onError(e.target.response);
} else {
this.retry(this.resume_.bind(this));
}
};
onUploadError_ = function(e) {
this.onError(e.target.response); // TODO - Retries for initial upload
};
}
UPDATE 1:
To avoid using Fetch() I decided to use React Native Document Picker. Now I can select the video file and pass it to the MediaUploader following this guide: https://alishavineeth.medium.com/upload-a-video-from-a-mobile-device-to-youtube-using-react-native-eb2fa54a7445
Now if I set the chunkSize option I will receive a .slice array exception because the object structure doesn't match. If I pass the file without the chunkSize option the metadata uploads to YouTube but the video status will be stuck on processing without any other errors. The video upload process never begins.
DocumentPicker responds with the following object after I select my file:
[{"fileCopyUri": "content://com.android.providers.media.documents/document/video%3A7853", "name": "video_1629795128339.mp4", "size": 192660773, "type": "video/mp4", "uri": "content://com.android.providers.media.documents/document/video%3A7853"}]
UPDATE 2:
Managed to fix my DocumentPicker file issue(from my Update 1) with changing React Native Document Picker to Expo Document Picker.
Now I am able to select large files and call the upload function - the metadata uploads, the video file begins to upload as well but the app crashes during the upload. If I set the chunkSize option on the MediaUploader object I get [TypeError: content.slice is not a function. (In 'content.slice(this.offset, end)', 'content.slice' is undefined)]
Expo Document Picker responds with the following object after I select my video file:
{"name": "video_1629801588164.mp4", "size": 5799179, "type": "video/mp4", "uri": "file:///data/user/0/com.tennis.rec/cache/DocumentPicker/8b350fbf-1b66-4a78-a10f-b61eb2ed3032.mp4"}
UPDATE 3 - RESOLVED!!!
The chunk upload is working now!!! I modified my cors_upload.js file where the chunkSize is being evaluated and sliced with the following code:
if (this.offset || this.chunkSize) {
// Only bother to slice the file if we're either resuming or uploading in chunks
if (this.chunkSize) {
end = Math.min(this.offset + this.chunkSize, this.file.size);
}
console.log("CONTENT SLICE", this.offset, end, this.file.size);
//content = content.slice(this.offset, end);
var base64 = await RNFS.read(this.file.uri, this.chunkSize, this.offset, 'base64');
content = Uint8Array.from(atob(base64), c => c.charCodeAt(0));
}
I added React Native File System and I am using its read() function to load the chunk as base64 and convert it back to a byte array.
I am trying to get Strophe.js based XMPP file transfer to work. I can get logged in to work on my openfire server. I can send messages and receive messages fine but I am having trouble with file transfer.
HTML:
<form name='file_form' class="panel-body">
<input type="file" id="file" name="file[]" />
<input type='button' id='btnSendFile' value='sendFile' />
<output id="list"></output>
</form>
Javascript file:
// file
var sid = null;
var chunksize;
var data;
var file = null;
var aFileParts, mimeFile, fileName;
function sendFileClick() {
file =$("#file")[0].files[0];
sendFile(file);
readAll(file, function(data) {
log("handleFileSelect:");
log(" >data="+data);
log(" >data.len="+data.length);
});
}
function sendFile(file) {
var to = $('#to').get(0).value;
var filename = file.name;
var filesize = file.size;
var mime = file.type;
chunksize = filesize;
sid = connection._proto.sid;
log('sendFile: to=' + to);
// send a stream initiation
connection.si_filetransfer.send(to, sid, filename, filesize, mime, function(err) {
fileTransferHandler(file, err);
});
}
function fileTransferHandler(file, err) {
log("fileTransferHandler: err=" + err);
if (err) {
return console.log(err);
}
var to = $('#to').get(0).value;
chunksize = file.size;
chunksize = 20 * 1024;
// successfully initiated the transfer, now open the band
connection.ibb.open(to, sid, chunksize, function(err) {
log("ibb.open: err=" + err);
if (err) {
return console.log(err);
}
readChunks(file, function(data, seq) {
sendData(to, seq, data);
});
});
}
function readAll(file, cb) {
var reader = new FileReader();
// If we use onloadend, we need to check the readyState.
reader.onloadend = function(evt) {
if (evt.target.readyState == FileReader.DONE) { // DONE == 2
cb(evt.target.result);
}
};
reader.readAsDataURL(file);
}
function readChunks(file, callback) {
var fileSize = file.size;
var chunkSize = 20 * 1024; // bytes
var offset = 0;
var block = null;
var seq = 0;
var foo = function(evt) {
if (evt.target.error === null) {
offset += chunkSize; //evt.target.result.length;
seq++;
callback(evt.target.result, seq); // callback for handling read chunk
} else {
console.log("Read error: " + evt.target.error);
return;
}
if (offset >= fileSize) {
console.log("Done reading file");
return;
}
block(offset, chunkSize, file);
}
block = function(_offset, length, _file) {
log("_block: length=" + length + ", _offset=" + _offset);
var r = new FileReader();
var blob = _file.slice(_offset, length + _offset);
r.onload = foo;
r.readAsDataURL(blob);
}
block(offset, chunkSize, file);
}
function sendData(to, seq, data) {
// stream is open, start sending chunks of data
connection.ibb.data(to, sid, seq, data, function(err) {
log("ibb.data: err=" + err);
if (err) {
return console.log(err);
}
// ... repeat calling data
// keep sending until you're ready you've reached the end of the file
connection.ibb.close(to, sid, function(err) {
log("ibb.close: err=" + err);
if (err) {
return console.log(err);
}
// done
});
});
}
$('#btnSendFile').bind('click', function() {
console.log('File clicked:');
sendFileClick();
});
Full code is based on:
Complete example of Strophe.js file transfer
http://plnkr.co/edit/fYpXo1mFRWPxrLlgr123 (source can be download here: has errors). I changed the sendFileClick function.
I am getting:
ibb.open: err=Error: feature-not-implemented? Why is this error I am getting?
I have managed to setup code to write to and read from a text file.
The problem I've run into is when I try to read the file after writing it.
It returns a null value.
I am assuming this is due to the write not being complete when the read starts.
How do I make the read wait for the write.
MyFile Class
MyFile = function() {
};
MyFile.prototype.readFile = function(fileDir, fileName) {
window.resolveLocalFileSystemURL(fileDir, function(dir) {
dir.getFile(fileName, {create: true}, function(fileEntry) {
fileEntry.file(function(file) {
var reader = new FileReader();
reader.onerror = function(evt) {
console.log("Reading " + file.name + " Failed");
};
reader.onloadstart = function(evt) {
console.log("Read " + file.name + " Starting");
};
reader.onload = function(evt) {
console.log("Read " + file.name + " Successful");
window.localStorage.setItem(file.name + "Read", evt.target.result);
console.log(evt.target.result);
};
reader.onloadend = function(evt) {
console.log("Read " + file.name + " Ending");
};
reader.readAsText(file);
}, fail);
}, fail);
}, fail);
var strText = window.localStorage.getItem(fileName + "Read");
console.log(strText);
window.localStorage.removeItem(fileName + "Read");
return strText;
};
MyFile.prototype.overWriteFile = function(fileDir, fileName, strText) {
window.resolveLocalFileSystemURL(fileDir, function(dir) {
dir.getFile(fileName, {create: true}, function(file) {
if (!file) {
return;
} else {
console.log("Overwrite File Name: " + file.name);
}
file.createWriter(function(fileWriter) {
fileWriter.onwriteend = function(evt) {
if (fileWriter.length === 0) {
//fileWriter has been reset, write file
fileWriter.write(strText);
} else {
//file has been overwritten with blob
//use callback or resolve promise
console.log("Write " + file.name + " Success");
console.log(strText);
}
};
fileWriter.truncate(0);
}, fail);
});
});
};
Calling Write and Read
var myf = new MyFile();
myf.overWriteFile("Directory", "test.txt","Test.txt Contents");
console.log(myf.readFile("Directory", "test.txt"));
If you dont mind jQuery you can try using a custom event that you fire when the file is done writing.
MyFile.prototype.overWriteFile = function(fileDir, fileName, strText) {
//added self var to be accessed from within createWriter finished section
var self = this
window.resolveLocalFileSystemURL(fileDir, function(dir) {
dir.getFile(fileName, {create: true}, function(file) {
if (!file) {
return;
} else {
console.log("Overwrite File Name: " + file.name);
}
file.createWriter(function(fileWriter) {
fileWriter.onwriteend = function(evt) {
if (fileWriter.length === 0) {
//fileWriter has been reset, write file
fileWriter.write(strText);
} else {
//file has been overwritten with blob
//use callback or resolve promise
//triggers the finishedMyFileWrite on the MyFile instance
jQuery(self).tigger('finishedMyFileWrite');
console.log("Write " + file.name + " Success");
console.log(strText);
}
};
fileWriter.truncate(0);
}, fail);
});
});
};
var myf = new MyFile();
jQuery(myf).on('finishedMyFileWrite', function finishedWrite(){
console.log(this.readFile("Directory", "test.txt"));
});
myf.overWriteFile("Directory", "test.txt","Test.txt Contents");
I added a self variable to allow the custom event to target the specific instance of MyFile, then I trigger an event called finishedMyFileWrite after the file has written. The myf object is set to listen for this callback before writing. I am not able to test this as I dont have a cordova emulator, but the theory should work.
I'm trying to upload a file from a cordova app to a node server. I am getting this exception in the server side
Cannot read property 'file' of undefined
i'm newbie in javascript and i need help thanks
this is the client code
<script type="text/javascript" charset="utf-8">
// Wait for device API libraries to load
//
document.addEventListener("deviceready", onDeviceReady, false);
// device APIs are available
//
function onDeviceReady() {
// Retrieve image file location from specified source
navigator.camera.getPicture(
uploadPhoto,
function(message) { alert('get picture failed'); },
{
quality : 50,
destinationType : navigator.camera.DestinationType.FILE_URI,
sourceType : navigator.camera.PictureSourceType.PHOTOLIBRARY
}
);
}
function uploadPhoto(imageURI) {
var options = new FileUploadOptions();
options.fileKey="file";
options.fileName=imageURI.substr(imageURI.lastIndexOf('/')+1);
options.mimeType="image/jpeg";
var params = {};
params.value1 = "test";
params.value2 = "param";
options.params = params;
var ft = new FileTransfer();
ft.upload(imageURI, encodeURI("http://10.20.160.38:3000/images"), win, fail, options);
}
function win(r) {
console.log("Code = " + r.responseCode);
console.log("Response = " + r.response);
console.log("Sent = " + r.bytesSent);
}
function fail(error) {
alert("An error has occurred: Code = " + error.code);
console.log("upload error source " + error.source);
console.log("upload error target " + error.target);
}
</script>
and this is the server code
(function () {
var serverURL = "http://192.168.1.4:3000", // IMPORTANT: This URL needs to be accessible from your phone for testing.
$scroller = $('.scroller'),
// Get List of images from server
getFeed = function () {
$scroller.empty();
$.ajax({url: serverURL + "/images", dataType: "json", type: "GET"}).done(function (data) {
var l = data.length;
for (var i = 0; i < l; i++) {
$scroller.append('<img src="' + serverURL + '/' + data[i].fileName + '"/>');
}
});
},
// Upload image to server
upload = function (imageURI) {
var ft = new FileTransfer(),
options = new FileUploadOptions();
options.fileKey = "file";
options.fileName = 'filename.jpg'; // We will use the name auto-generated by Node at the server side.
options.mimeType = "image/jpeg";
options.chunkedMode = false;
options.params = { // Whatever you populate options.params with, will be available in req.body at the server-side.
"description": "Uploaded from my phone"
};
ft.upload(imageURI, serverURL + "/images",
function (e) {
getFeed();
},
function (e) {
alert("Upload failed");
}, options);
},
// Take a picture using the camera or select one from the library
takePicture = function (e) {
var options = {
quality: 45,
targetWidth: 1000,
targetHeight: 1000,
destinationType: Camera.DestinationType.FILE_URI,
encodingType: Camera.EncodingType.JPEG,
sourceType: Camera.PictureSourceType.CAMERA
};
navigator.camera.getPicture(
function (imageURI) {
alert(imageURI);
upload(imageURI);
},
function (message) {
// We typically get here because the use canceled the photo operation. Fail silently.
}, options);
return false;
};
$('.camera-btn').on('click', takePicture);
getFeed();
}());
I'm trying to hack/personalize a html5/ajax/javascript plugin for drag&drop and upload a file
(the problem with this plugin is that it uploads the file directly after being dropped on the drop zone..) So i want to change this behaviour and control the moment in which the upload should happens
(the upload uses jquery-ajax) by externalizing the upload function
The main plugin's script is written in this page (the html5Upload.js) :
/*jslint unparam: true, browser: true, devel: true */
/*global define*/
define(function () {
'use strict';
var module = {},
noop = function () { },
console = window.console || { log: noop },
supportsFileApi;
function UploadManager(options) {
var self = this;
self.dropContainer = options.dropContainer;
self.inputField = options.inputField;
self.uploadsQueue = [];
self.activeUploads = 0;
self.data = options.data;
self.key = options.key;
self.maxSimultaneousUploads = options.maxSimultaneousUploads || -1;
self.onFileAdded = options.onFileAdded || noop;
self.uploadUrl = options.uploadUrl;
self.onFileAddedProxy = function (upload) {
console.log('Event: onFileAdded, file: ' + upload.fileName);
self.onFileAdded(upload);
};
self.initialize();
}
function FileUpload(file) {
var self = this;
self.file = file;
self.fileName = file.name;
self.fileSize = file.size;
self.uploadSize = file.size;
self.uploadedBytes = 0;
self.eventHandlers = {};
self.events = {
onProgress: function (fileSize, uploadedBytes) {
var progress = uploadedBytes / fileSize * 100;
console.log('Event: upload onProgress, progress = ' + progress + ', fileSize = ' + fileSize + ', uploadedBytes = ' + uploadedBytes);
(self.eventHandlers.onProgress || noop)(progress, fileSize, uploadedBytes);
},
onStart: function () {
console.log('Event: upload onStart');
(self.eventHandlers.onStart || noop)();
},
onCompleted: function (data) {
console.log('Event: upload onCompleted, data = ' + data);
file = null;
(self.eventHandlers.onCompleted || noop)(data);
}
};
}
FileUpload.prototype = {
on: function (eventHandlers) {
this.eventHandlers = eventHandlers;
}
};
UploadManager.prototype = {
initialize: function () {
console.log('Initializing upload manager');
var manager = this,
dropContainer = manager.dropContainer,
inputField = manager.inputField,
cancelEvent = function (e) {
e.preventDefault();
e.stopPropagation();
};
if (dropContainer) {
manager.on(dropContainer, 'dragover', cancelEvent);
manager.on(dropContainer, 'dragenter', cancelEvent);
manager.on(dropContainer, 'drop', function (e) {
cancelEvent(e);
manager.processFiles(e.dataTransfer.files);
});
}
if (inputField) {
manager.on(inputField, 'change', function () {
manager.processFiles(this.files);
});
}
},
processFiles: function (files) {
console.log('Processing files: ' + files.length);
var manager = this,
len = files.length,
file,
upload,
i;
for (i = 0; i < len; i += 1) {
file = files[i];
if (file.size === 0) {
alert('Files with files size zero cannot be uploaded or multiple file uploads are not supported by your browser');
break;
}
upload = new FileUpload(file);
manager.uploadFile(upload);
}
},
uploadFile: function (upload) {
var manager = this;
manager.onFileAdded(upload);
// Queue upload if maximum simultaneous uploads reached:
if (manager.activeUploads === manager.maxSimultaneousUploads) {
console.log('Queue upload: ' + upload.fileName);
manager.uploadsQueue.push(upload);
return;
}
manager.ajaxUpload(upload);
},
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// This is the function i want to externalize to be called from outside this script
ajaxUpload: function (upload) {
var manager = this,
xhr,
formData,
fileName,
file = upload.file,
prop,
data = manager.data,
key = manager.key || 'file';
console.log('Beging upload: ' + upload.fileName);
manager.activeUploads += 1;
xhr = new window.XMLHttpRequest();
formData = new window.FormData();
fileName = file.name;
xhr.open('POST', manager.uploadUrl);
// Triggered when upload starts:
xhr.upload.onloadstart = function () {
// File size is not reported during start!
console.log('Upload started: ' + fileName);
upload.events.onStart();
};
// Triggered many times during upload:
xhr.upload.onprogress = function (event) {
if (!event.lengthComputable) {
return;
}
// Update file size because it might be bigger than reported by the fileSize:
upload.events.onProgress(event.total, event.loaded);
};
// Triggered when upload is completed:
xhr.onload = function (event) {
console.log('Upload completed: ' + fileName);
// Reduce number of active uploads:
manager.activeUploads -= 1;
upload.events.onCompleted(event.target.responseText);
// Check if there are any uploads left in a queue:
if (manager.uploadsQueue.length) {
manager.ajaxUpload(manager.uploadsQueue.shift());
}
};
// Triggered when upload fails:
xhr.onerror = function () {
console.log('Upload failed: ', upload.fileName);
};
// Append additional data if provided:
if (data) {
for (prop in data) {
if (data.hasOwnProperty(prop)) {
console.log('Adding data: ' + prop + ' = ' + data[prop]);
formData.append(prop, data[prop]);
}
}
}
// Append file data:
formData.append(key, file);
// Initiate upload:
xhr.send(formData);
},
on: function (element, eventName, handler) {
if (!element) {
return;
}
if (element.addEventListener) {
element.addEventListener(eventName, handler, false);
} else if (element.attachEvent) {
element.attachEvent('on' + eventName, handler);
} else {
element['on' + eventName] = handler;
}
}
};
module.fileApiSupported = function () {
if (typeof supportsFileApi !== 'boolean') {
var input = document.createElement("input");
input.setAttribute("type", "file");
supportsFileApi = !!input.files;
}
return supportsFileApi;
};
module.initialize = function (options) {
return new UploadManager(options);
};
return module;
});
i was thinking about to set the scope of all variables needed by that function to the global scope
and define it outside the prototype .. i'm not sure it will work as this function calls the manager to which it belongs itself..
honnestly, i'm not that expert to make this work , i hope your help guys
Simply set it to where it returns whatever it is you need in the global scope. For example,
Instead of:
return module;
Do:
return {
module: module,
fileUpload: FileUpload };
You could take the text define and replace it with var uploader = this will give you an object with all of the characteristics and methods including the one you are looking to extract. Now if this code is relying on some other externally defined object it might through some errors but a basic reading of the code doesn't appear to rely on anything. I would not change the scope of the variables or attempt to cut out one (or even a few) methods. JavaScript closures make removing arbitrary code much harder.
As i said, setting the variables's scope needed by the function to the global scope
var monmanager;
var monupload;
/////////////////////////////////////////////////////////////////////////////////////
/*jslint unparam: true, browser: true, devel: true */
/*global define*/
define(function () {
'use strict';
var module = {},
noop = function () { },
console = window.console || { log: noop },
supportsFileApi;
function UploadManager(options) {
var self = this;
self.dropContainer = options.dropContainer;
self.inputField = options.inputField;
self.uploadsQueue = [];
self.activeUploads = 0;
self.data = options.data;
self.key = options.key;
self.maxSimultaneousUploads = options.maxSimultaneousUploads || -1;
self.onFileAdded = options.onFileAdded || noop;
self.uploadUrl = options.uploadUrl;
self.onFileAddedProxy = function (upload) {
console.log('Event: onFileAdded, file: ' + upload.fileName);
self.onFileAdded(upload);
};
self.initialize();
}
function FileUpload(file) {
var self = this;
self.file = file;
self.fileName = file.name;
self.fileSize = file.size;
self.uploadSize = file.size;
self.uploadedBytes = 0;
self.eventHandlers = {};
self.events = {
onProgress: function (fileSize, uploadedBytes) {
var progress = uploadedBytes / fileSize * 100;
console.log('Event: upload onProgress, progress = ' + progress + ', fileSize = ' + fileSize + ', uploadedBytes = ' + uploadedBytes);
(self.eventHandlers.onProgress || noop)(progress, fileSize, uploadedBytes);
},
onStart: function () {
console.log('Event: upload onStart');
(self.eventHandlers.onStart || noop)();
},
onCompleted: function (data) {
console.log('Event: upload onCompleted, data = ' + data);
file = null;
(self.eventHandlers.onCompleted || noop)(data);
}
};
}
FileUpload.prototype = {
on: function (eventHandlers) {
this.eventHandlers = eventHandlers;
}
};
UploadManager.prototype = {
initialize: function () {
console.log('Initializing upload manager');
var manager = this,
dropContainer = manager.dropContainer,
inputField = manager.inputField,
cancelEvent = function (e) {
e.preventDefault();
e.stopPropagation();
};
if (dropContainer) {
manager.on(dropContainer, 'dragover', cancelEvent);
manager.on(dropContainer, 'dragenter', cancelEvent);
manager.on(dropContainer, 'drop', function (e) {
cancelEvent(e);
manager.processFiles(e.dataTransfer.files);
});
}
if (inputField) {
manager.on(inputField, 'change', function () {
manager.processFiles(this.files);
});
}
},
processFiles: function (files) {
console.log('Processing files: ' + files.length);
var manager = this,
len = files.length,
file,
upload,
i;
for (i = 0; i < len; i += 1) {
file = files[i];
if (file.size === 0) {
alert('Files with files size zero cannot be uploaded or multiple file uploads are not supported by your browser');
break;
}
upload = new FileUpload(file);
manager.uploadFile(upload);
}
},
uploadFile: function (upload) {
var manager = this;
//////////my modification////////////////-----------------------------------------<<<<<<<<<<<<<<<
monmanager = manager;
//////////////////////////-----------------------------------------<<<<<<<<<<<<<<<
manager.onFileAdded(upload);
// Queue upload if maximum simultaneous uploads reached:
if (manager.activeUploads === manager.maxSimultaneousUploads) {
console.log('Queue upload: ' + upload.fileName);
manager.uploadsQueue.push(upload);
return;
}
//////////my modification////////////////-----------------------------------------<<<<<<<<<<<<<<<
monupload = upload;
//////////////////////////-----------------------------------------<<<<<<<<<<<<<<<
// manager.ajaxUpload(upload);
},
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// This is the function i want to externalize to be called from outside this script
ajaxUpload: function (upload) {
var manager = this,
xhr,
formData,
fileName,
file = upload.file,
prop,
data = manager.data,
key = manager.key || 'file';
console.log('Beging upload: ' + upload.fileName);
manager.activeUploads += 1;
xhr = new window.XMLHttpRequest();
formData = new window.FormData();
fileName = file.name;
xhr.open('POST', manager.uploadUrl);
// Triggered when upload starts:
xhr.upload.onloadstart = function () {
// File size is not reported during start!
console.log('Upload started: ' + fileName);
upload.events.onStart();
};
// Triggered many times during upload:
xhr.upload.onprogress = function (event) {
if (!event.lengthComputable) {
return;
}
// Update file size because it might be bigger than reported by the fileSize:
upload.events.onProgress(event.total, event.loaded);
};
// Triggered when upload is completed:
xhr.onload = function (event) {
console.log('Upload completed: ' + fileName);
// Reduce number of active uploads:
manager.activeUploads -= 1;
upload.events.onCompleted(event.target.responseText);
// Check if there are any uploads left in a queue:
if (manager.uploadsQueue.length) {
manager.ajaxUpload(manager.uploadsQueue.shift());
}
};
// Triggered when upload fails:
xhr.onerror = function () {
console.log('Upload failed: ', upload.fileName);
};
// Append additional data if provided:
if (data) {
for (prop in data) {
if (data.hasOwnProperty(prop)) {
console.log('Adding data: ' + prop + ' = ' + data[prop]);
formData.append(prop, data[prop]);
}
}
}
// Append file data:
formData.append(key, file);
// Initiate upload:
xhr.send(formData);
},
on: function (element, eventName, handler) {
if (!element) {
return;
}
if (element.addEventListener) {
element.addEventListener(eventName, handler, false);
} else if (element.attachEvent) {
element.attachEvent('on' + eventName, handler);
} else {
element['on' + eventName] = handler;
}
}
};
module.fileApiSupported = function () {
if (typeof supportsFileApi !== 'boolean') {
var input = document.createElement("input");
input.setAttribute("type", "file");
supportsFileApi = !!input.files;
}
return supportsFileApi;
};
module.initialize = function (options) {
return new UploadManager(options);
};
return module;
});
and then i could call the function from outside :
$("#testcontrol").click(function(){
console.log( "it's clicked !" );
monmanager.ajaxUpload(monupload);
});
now it's fine :)