Zipping multiple PDFs and downloading the zip file? - javascript

So as the description says: This is my code
var zip = new JSZip();
var urls = ["https://www.link.ca/documents/PDF/Quarters.pdf",
"https://www.link.ca/documents/PDF/Mills.pdf",
"https://www.link.ca/documents/PDF/Stop.pdf"
];
var count = 0;
var zipFilename = "Check_This_Out.zip";
urls.forEach(function (url) {
debugger;
var filename = "filename";
console.log(url);
JSZipUtils.getBinaryContent(url, function (err, data) {
if (err) {
throw err; // or handle the error
}
zip.file(url, data, {
binary: true
});
count++;
if (count == urls.length) {
zip.generateAsync({
type: "blob"
})
.then(function (blob) {
saveAs(blob, zipFilename);
});
}
});
});
The problem I'm facing is that when I unzip it, it creates a folder structure like the following:
C:\Users\samuser\Downloads\https_\www.link.ca\documents\PDF
My issue/question is how do I fix it so that when I open the Zip file, I should see the three documents I uploaded.

If someone had the same issue, this is how I resolved it:
var filename = url.replace(/.*\//g, "");
zip.file(filename, data, { binary: true, createFolders: true });

Related

Occasionally uploading larger files

On moving to the next step in the form I have run some checks. One is to stop photos over 10mb and preventing .heic files from being upload. 90% of the time it works, but now and again files are let through.
Any help with a better written solution or a reason why this may fail and let large files or .heic file through.
var upload_one = document.getElementById("image_one");
if(upload_one.files.length > 0) {
if (upload_one.files.item(0).size >= '10485760') {
upload_one.className += " invalid";
valid = false;
alert("Photo is too large. Photos need to be under 10mb")
}
fileName = document.querySelector('#image_one').value;
extension = fileName.split('.').pop();
if (extension == 'heic') {
upload_one.className += " invalid";
valid = false;
alert("Files can only be .png, .jpg or .jpeg")
}
}
You should have a look at presigned Url using S3 bucket on aws.
Basically you generate an upload url where you can upload big files direclty to S3.
Personally I use a lambda to generate this presignedUrl and I return it to front end then.
Backend
const AWS = require("aws-sdk");
const S3 = new AWS.S3();
const { v4: uuidv4 } = require("uuid");
const getUrl = async (params) => {
return await new Promise((resolve, reject) => {
S3.getSignedUrl("putObject", params, (err, url) => {
if (err) {
reject(err);
} else {
resolve({
statusCode: 200,
url,
});
}
});
});
};
exports.handler = async (event, context) => {
const id = uuidv4();
const { userId } = event?.queryStringParameters;
const params = {
Bucket: process.env.INVOICE_BUCKET,
Key: `${userId}/${id}.csv`,
ContentType: `text/csv`,
ACL: "public-read",
};
try {
const { url } = await getUrl(params);
return handleRes({ message: `Successfully generated url`, url, key: `${id}.csv`, publicUrl: `https://yourBucket.s3.eu-west-1.amazonaws.com/${userId}/${id}.csv` }, 200);
} catch (e) {
console.error(e);
return handleRes({ message: "failed" }, 400);
}
};
Front end
$(function () {
$("#theForm").on("submit", sendFile);
});
function sendFile(e) {
e.preventDefault();
var urlPresigned;
var publicUrl;
var key;
$.ajax({
type: "GET",
url: `https://yourId.execute-api.eu-west-1.amazonaws.com/Prod/file-upload-to-bucket?userId=${userId}`,
success: function (resp) {
urlPresigned = resp.url;
publicUrl = resp.publicUrl;
key = resp.key;
var theFormFile = $("#theFile").get()[0].files[0];
$.ajax({
type: "PUT",
url: urlPresigned,
contentType: "text/csv", // Put meme type
processData: false,
// the actual file is sent raw
data: theFormFile,
success: function () {
// File uploaed
},
error: function (err) {
console.log(err);
},
});
},
});
}

WebShare Api sharing files?

Hy i want to share images with the new api.
If i have a upload-form for a file, i can share that file with the api, i break my head trying to share a local file. Here my try:
function sharePage(){
const title = document.title;
var filesArray = [];
$.get(baseurl+"images/cover.jpg", { async: false }, function(data) { filesArray.push(data); });
setHashtag('share');
if(navigator.share){
if(navigator.canShare && navigator.canShare({ files: filesArray })) {
navigator.share({
text: 'FILE',
files: filesArray,
title: title,
url: baseurl
});
}else{
navigator.share({
text: 'NO FILE',
title: title,
url: baseurl
});
}
}else{
document.location.href="whatsapp://send?text="+baseurl;
}
EDIT:
The problem is, that i don't know to implement a serverside-file to this script something like var file = baseurl+"images/cover.jpg"; I tried with jquery $.get but it doesn't work
I get it working by requesting a blob and generating a File object. Someting like this:
fetch("url_to_the_file")
.then(function(response) {
return response.blob()
})
.then(function(blob) {
var file = new File([blob], "picture.jpg", {type: 'image/jpeg'});
var filesArray = [file];
if(navigator.canShare && navigator.canShare({ files: filesArray })) {
navigator.share({
text: 'some_text',
files: filesArray,
title: 'some_title',
url: 'some_url'
});
}
}
Same in TypeScript with async/await (assuming you checked navigator.share is available):
const image = await fetch(imageUrl);
const blob = await image.blob();
const file = new File([blob], 'image.jpg', { type: 'image/jpeg' });
navigator.share({ text: 'some_text', files: [file] } as ShareData);
I get it working by requesting a blob and generating a File object. Someting like this:
fetch("Url-image-complete")
.then(function(response) {
return response.blob()
})
.then(function(blob) {
var file = new File([blob], "Name-image-whith-extension", {type: 'image/jpeg'});
var filesArray = [file];
var shareData = { files: filesArray };
if (navigator.canShare && navigator.canShare(shareData)) {
// Adding title afterwards as navigator.canShare just
// takes files as input
shareData.title = "Name"
navigator.share(shareData)
.then(() => console.log('Share was successful.'))
.catch((error) => console.log('Sharing failed', error));
} else {
console.log("Your system doesn't support sharing files.");
}
});

cordovaFile.readAsArrayBuffer ENCODING_ERR

I'm trying to use $cordovaFile.readAsArrayBuffer but I'm getting the following error
I tried some solutions from the forum but without success
function getFileBlob(url, cb) {
console.log(url);
var path = url.substring(0, url.lastIndexOf('/') + 1);
var filename = url.substring(url.lastIndexOf('/') + 1, url.length);
console.log('path', path);
console.log('file', filename);
$cordovaFile.readAsArrayBuffer(path, filename)
.then(function (success) {
var blob = new Blob([success], { type: 'image/jpeg' });
cb(blob);
}, function (error) {
onsole.error(error);
cb(null);
});
}
Error:
FileError code:5, message:"ENCODING_ERR"
Console.logs:
My url: /file:///storage/emulated/0/Android/data/com.ionicframework.xx443164/cache/.Pic.jpg
var path:/file:///storage/emulated/0/Android/data/com.ionicframework.xx443164/cache/
var file: .Pic.jpg
I'm testing on an android
I think this can help you, if you need to get your photo as a blob type
var photo = url; // your photo
var mainDic = photo.substring(0, photo.lastIndexOf('/') + 1),
mainArchive = photo.substring(photo.lastIndexOf('/') + 1, photo.length);
$cordovaFile.readAsArrayBuffer(mainDic, mainArchive).then(function(success) {
var blob = new Blob([success], {
type: 'image/jpeg'
});
// now blob is your photo as blob type
}, function(error) {
console.error(error);
});

Can open blob returned by XMLHTTPRequest, but can't upload to Azure

I am able to upload a file to my vendors API, and the vendor responds with a .png file as binary data. I am able to write this out to a blob in the browser, but I can't get it to upload in Azure blob storage. I also tried uploading it to a Web directory using fs.writefile but that produces a corrupt/non-bitmap image.
Ideally, I would like to upload my blob directly into Azure, but when I try it gives me the following error:
TypeError: must start with number, buffer, array or string
If I need to upload the blob to a Web directory and use Azure's createBlockBlobFromLocalFile, I would be more than happy to, but my attempts have failed thus far.
Here is my XMLHTTPRequest that opens the image in the browser that is returned after I post my file:
var form = document.forms.namedItem("fileinfo");
form.addEventListener('submit', function (ev) {
var oData = new FormData(form);
var xhr = new XMLHttpRequest();
xhr.responseType = "arraybuffer";
xhr.open("POST", "http://myvendorsapi/Upload", true);
xhr.onload = function (oEvent) {
if (xhr.status == 200) {
var blob = new Blob([xhr.response], { type: "image/png" });
var objectUrl = URL.createObjectURL(blob);
window.open(objectUrl);
console.log(blob);
var containerName = boxContainerName;
var filename = 'Texture_0.png';
$http.post('/postAdvanced', { containerName: containerName, filename: filename, file: blob }).success(function (data) {
//console.log(data);
console.log("success!");
}, function (err) {
//console.log(err);
});
} else {
oOutput.innerHTML = "Error " + xhr.status + " occurred when trying to upload your file.<br \/>";
}
};
xhr.send(oData);
ev.preventDefault();
}, false);
Here is my Node backend for the /postAdvanced call:
app.post('/postAdvanced', function (req, res, next) {
var containerName = req.body.containerName;
var filename = req.body.filename;
var file = req.body.file;
if (!Buffer.isBuffer(file)) {
// Convert 'file' to a binary buffer
}
var options = { contentType: 'image/png' };
blobSvc.createBlockBlobFromText(containerName, filename, file, function (error, result, response) {
if (!error) {
res.send(result);
} else {
console.log(error);
}
});
})
If someone can't help me with uploading directly to Azure, if I can get how to upload this blob to a directory, I can get it into Azure via createBlockBlobFromLocalFile
I have solved the issue. I needed to base64 encode the data on the client side before passing it to node to decode to a file. I needed to use XMLHTTPRequest to get binary data properly, as jQuery AJAX appears to have an issue with returning (see here: http://www.henryalgus.com/reading-binary-files-using-jquery-ajax/).
Here is my front end:
var form = document.forms.namedItem("fileinfo");
form.addEventListener('submit', function (ev) {
var oData = new FormData(form);
var xhr = new XMLHttpRequest();
xhr.responseType = "arraybuffer";
xhr.open("POST", "http://vendorapi.net/Upload", true);
xhr.onload = function (oEvent) {
if (xhr.status == 200) {
var blob = new Blob([xhr.response], { type: "image/png" });
//var objectUrl = URL.createObjectURL(blob);
//window.open(objectUrl);
console.log(blob);
var blobToBase64 = function(blob, cb) {
var reader = new FileReader();
reader.onload = function() {
var dataUrl = reader.result;
var base64 = dataUrl.split(',')[1];
cb(base64);
};
reader.readAsDataURL(blob);
};
blobToBase64(blob, function(base64){ // encode
var update = {'blob': base64};
var containerName = boxContainerName;
var filename = 'Texture_0.png';
$http.post('/postAdvancedTest', { containerName: containerName, filename: filename, file: base64}).success(function (data) {
//console.log(data);
console.log("success!");
// Clear previous 3D render
$('#webGL-container').empty();
// Generated new 3D render
$scope.generate3D();
}, function (err) {
//console.log(err);
});
})
} else {
oOutput.innerHTML = "Error " + xhr.status + " occurred when trying to upload your file.<br \/>";
}
};
xhr.send(oData);
ev.preventDefault();
}, false);
Node Backend:
app.post('/postAdvancedTest', function (req, res) {
var containerName = req.body.containerName
var filename = req.body.filename;
var file = req.body.file;
var buf = new Buffer(file, 'base64'); // decode
var tmpBasePath = 'upload/'; //this folder is to save files download from vendor URL, and should be created in the root directory previously.
var tmpFolder = tmpBasePath + containerName + '/';
// Create unique temp directory to store files
mkdirp(tmpFolder, function (err) {
if (err) console.error(err)
else console.log('Directory Created')
});
// This is the location of download files, e.g. 'upload/Texture_0.png'
var tmpFileSavedLocation = tmpFolder + filename;
fs.writeFile(tmpFileSavedLocation, buf, function (err) {
if (err) {
console.log("err", err);
} else {
//return res.json({ 'status': 'success' });
blobSvc.createBlockBlobFromLocalFile(containerName, filename, tmpFileSavedLocation, function (error, result, response) {
if (!error) {
console.log("Uploaded" + result);
res.send(containerName);
}
else {
console.log(error);
}
});
}
})
})

How upload a file to Dropbox with dropbox.js?

ORIGINAL
I'm having problems to upload a file (image) to Dropbox from Node.js using the official dropbox.js.
I want to upload a picture that I have in another server. For example with the dropbpox icon (www.dropbox.com/static/images/new_logo.png).
client.writeFile(file, 'www.dropbox.com/static/images/new_logo.png', function(error, stat) {
if (error) {
return es.send(error.status); // Something went wrong.
}
res.send("File saved as revision " + stat.revisionTag);
});
I know that this only creates a text file with the url, but how I can upload the picture to Dropbox?
I also try to download the file using http.get and then upload this to dropbox but it doesn't work.
Thanks.
UPDATE WITH MORE INFO
First I download the image from a remote url with this code:
var request = http.get(options, function(res){
var imagedata = ''
res.setEncoding('binary')
res.on('data', function(chunk){
imagedata += chunk
})
res.on('end', function(){
console.log("Image downloaded!");
fs.writeFile(local, imagedata, 'binary', function(err){
if (err) throw err
console.log('File saved.')
})
})
})
The file is saved correctly.
Then I trie to things:
Sending the 'imagedata' to Dropbox:
console.log("Image downloaded!");
client.writeFile(file, imagedata, function(error, stat) {
if (error) {
return response.send(error.status); // Something went wrong.
}
response.send("File saved as revision " + stat.revisionTag);
});
And something is uploaded to Dropbox but it's nothing useful.
Then I also tried to read the file from disc and then send it to Dropbox but it doesn't work neither:
fs.readFile(file, function(err, data) {
Use dropbox-js 0.9.1-beta1 or above to upload binary files from node.js. You need to pass it Buffer or ArrayBuffer instances. Try this code:
var req = http.get(options, function(res) {
var chunks = [];
res.on('data', function(chunk) {
chunks.push(chunk);
});
res.on('end', function() {
console.log("Image downloaded!");
var imageData = Buffer.concat(chunks);
client.writeFile(file, imageData, function(error, stat) {
if (error) {
return response.send(error.status);
}
response.send("File saved as revision " + stat.revisionTag);
});
});
});
```
Original answer: the dropbox-js README mentions that binary files don't work in node.js just yet.
I had issue as well, I just copied and modified a bit on the old dropbox-node npm(which is now deprecated), but I added following function on dropbox.js.
Client.prototype.writeFileNodejs = function(path, data, callback) {
var self = this;
fs.readFile(data.path, function(err, data) {
if (err) return callback(err);
var uri = "" + self.urls.putFile + "/" + (self.urlEncodePath(path));
if (typeof data === 'function') callback = data, data = undefined;
var oauth = {
consumer_key: self.oauth.key
, consumer_secret: self.oauth.secret
, token: self.oauth.token
, token_secret: self.oauth.tokenSecret
};
var requestOptions = { uri: uri, oauth: oauth };
requestOptions.body = data;
return request['put'](requestOptions, callback ?
function(err, res, body) {
if (err) return callback(err);
var contentType = res.headers['content-type'];
// check if the response body is in JSON format
if (contentType === 'application/json' ||
contentType === 'text/javascript') {
body = JSON.parse(body);
if (body.error) {
var err = new Error(body.error);
err.statusCode = res.statusCode;
return callback(err);
}
} else if (errors[res.statusCode]) {
var err = new Error(errors[res.statusCode]);
err.statusCode = res.statusCode;
return callback(err);
}
// check for metadata in headers
if (res.headers['x-dropbox-metadata']) {
var metadata = JSON.parse(res.headers['x-dropbox-metadata']);
}
callback(null, body, metadata);
} : undefined);
});
};
As well you would like to require request and fs to do this.
var request = require('request'),
fs = require('fs');

Categories

Resources