WebShare Api sharing files? - javascript

Hy i want to share images with the new api.
If i have a upload-form for a file, i can share that file with the api, i break my head trying to share a local file. Here my try:
function sharePage(){
const title = document.title;
var filesArray = [];
$.get(baseurl+"images/cover.jpg", { async: false }, function(data) { filesArray.push(data); });
setHashtag('share');
if(navigator.share){
if(navigator.canShare && navigator.canShare({ files: filesArray })) {
navigator.share({
text: 'FILE',
files: filesArray,
title: title,
url: baseurl
});
}else{
navigator.share({
text: 'NO FILE',
title: title,
url: baseurl
});
}
}else{
document.location.href="whatsapp://send?text="+baseurl;
}
EDIT:
The problem is, that i don't know to implement a serverside-file to this script something like var file = baseurl+"images/cover.jpg"; I tried with jquery $.get but it doesn't work

I get it working by requesting a blob and generating a File object. Someting like this:
fetch("url_to_the_file")
.then(function(response) {
return response.blob()
})
.then(function(blob) {
var file = new File([blob], "picture.jpg", {type: 'image/jpeg'});
var filesArray = [file];
if(navigator.canShare && navigator.canShare({ files: filesArray })) {
navigator.share({
text: 'some_text',
files: filesArray,
title: 'some_title',
url: 'some_url'
});
}
}

Same in TypeScript with async/await (assuming you checked navigator.share is available):
const image = await fetch(imageUrl);
const blob = await image.blob();
const file = new File([blob], 'image.jpg', { type: 'image/jpeg' });
navigator.share({ text: 'some_text', files: [file] } as ShareData);

I get it working by requesting a blob and generating a File object. Someting like this:
fetch("Url-image-complete")
.then(function(response) {
return response.blob()
})
.then(function(blob) {
var file = new File([blob], "Name-image-whith-extension", {type: 'image/jpeg'});
var filesArray = [file];
var shareData = { files: filesArray };
if (navigator.canShare && navigator.canShare(shareData)) {
// Adding title afterwards as navigator.canShare just
// takes files as input
shareData.title = "Name"
navigator.share(shareData)
.then(() => console.log('Share was successful.'))
.catch((error) => console.log('Sharing failed', error));
} else {
console.log("Your system doesn't support sharing files.");
}
});

Related

Occasionally uploading larger files

On moving to the next step in the form I have run some checks. One is to stop photos over 10mb and preventing .heic files from being upload. 90% of the time it works, but now and again files are let through.
Any help with a better written solution or a reason why this may fail and let large files or .heic file through.
var upload_one = document.getElementById("image_one");
if(upload_one.files.length > 0) {
if (upload_one.files.item(0).size >= '10485760') {
upload_one.className += " invalid";
valid = false;
alert("Photo is too large. Photos need to be under 10mb")
}
fileName = document.querySelector('#image_one').value;
extension = fileName.split('.').pop();
if (extension == 'heic') {
upload_one.className += " invalid";
valid = false;
alert("Files can only be .png, .jpg or .jpeg")
}
}
You should have a look at presigned Url using S3 bucket on aws.
Basically you generate an upload url where you can upload big files direclty to S3.
Personally I use a lambda to generate this presignedUrl and I return it to front end then.
Backend
const AWS = require("aws-sdk");
const S3 = new AWS.S3();
const { v4: uuidv4 } = require("uuid");
const getUrl = async (params) => {
return await new Promise((resolve, reject) => {
S3.getSignedUrl("putObject", params, (err, url) => {
if (err) {
reject(err);
} else {
resolve({
statusCode: 200,
url,
});
}
});
});
};
exports.handler = async (event, context) => {
const id = uuidv4();
const { userId } = event?.queryStringParameters;
const params = {
Bucket: process.env.INVOICE_BUCKET,
Key: `${userId}/${id}.csv`,
ContentType: `text/csv`,
ACL: "public-read",
};
try {
const { url } = await getUrl(params);
return handleRes({ message: `Successfully generated url`, url, key: `${id}.csv`, publicUrl: `https://yourBucket.s3.eu-west-1.amazonaws.com/${userId}/${id}.csv` }, 200);
} catch (e) {
console.error(e);
return handleRes({ message: "failed" }, 400);
}
};
Front end
$(function () {
$("#theForm").on("submit", sendFile);
});
function sendFile(e) {
e.preventDefault();
var urlPresigned;
var publicUrl;
var key;
$.ajax({
type: "GET",
url: `https://yourId.execute-api.eu-west-1.amazonaws.com/Prod/file-upload-to-bucket?userId=${userId}`,
success: function (resp) {
urlPresigned = resp.url;
publicUrl = resp.publicUrl;
key = resp.key;
var theFormFile = $("#theFile").get()[0].files[0];
$.ajax({
type: "PUT",
url: urlPresigned,
contentType: "text/csv", // Put meme type
processData: false,
// the actual file is sent raw
data: theFormFile,
success: function () {
// File uploaed
},
error: function (err) {
console.log(err);
},
});
},
});
}

Zipping multiple PDFs and downloading the zip file?

So as the description says: This is my code
var zip = new JSZip();
var urls = ["https://www.link.ca/documents/PDF/Quarters.pdf",
"https://www.link.ca/documents/PDF/Mills.pdf",
"https://www.link.ca/documents/PDF/Stop.pdf"
];
var count = 0;
var zipFilename = "Check_This_Out.zip";
urls.forEach(function (url) {
debugger;
var filename = "filename";
console.log(url);
JSZipUtils.getBinaryContent(url, function (err, data) {
if (err) {
throw err; // or handle the error
}
zip.file(url, data, {
binary: true
});
count++;
if (count == urls.length) {
zip.generateAsync({
type: "blob"
})
.then(function (blob) {
saveAs(blob, zipFilename);
});
}
});
});
The problem I'm facing is that when I unzip it, it creates a folder structure like the following:
C:\Users\samuser\Downloads\https_\www.link.ca\documents\PDF
My issue/question is how do I fix it so that when I open the Zip file, I should see the three documents I uploaded.
If someone had the same issue, this is how I resolved it:
var filename = url.replace(/.*\//g, "");
zip.file(filename, data, { binary: true, createFolders: true });

How to send data in api without losing it

Below code i'm adding objects to media array in reactJS
const formData = new FormData()
formData.append('file', file, file.name)
const obj = {
'id': id,
'type': 'doc',
'data': formData,
url: file
};
setID(id + 1);
setMedia([...media, obj]);
Now i send it to api via axios
function addNew() {
setProgressing(true)
axios.post(uri + '/api/add', {
map_id: props.data.id,
questions: JSON.stringify(media)
})
.then(res => {
props.updateStep();
})
.catch(err => {
});
}
But on server end i receive object of array like below
see data and url is empty
{id: 1, type: "image", data: {{}}, url: {}}
this is request header
SetMedia code which call on each image upload
function handleUploadChange(e) {
handleClose();
const file = e.target.files[0];
if (!file) {
return;
}
const reader = new FileReader();
reader.readAsBinaryString(file);
reader.onload = () => {
const formData = new FormData()
formData.append('file', file, file.name)
const obj = { 'id': id, 'type': 'image', 'data': formData, url: file };
setID(id+1);
setMedia([...media, obj]);
}
reader.onerror = function () {
console.log("error on load image");
};
}
i find solution for it.
i convert images in base64 and send string in request and on server side convert it again to images.

Blank images and pdfs downloaded from Dropbox

I have a problem when downloading files from my dropbox.
While using the type 'text/csv' I can download and view txt files.
Chaning the type to 'image/jpeg' or 'application/pdf' and download the filetype gives me blank file.
Am I on the right track here or is there another way this should be done?
main.service.ts
downloadFile(id) {
const headers = new Headers();
headers.append('Authorization', 'Bearer ' + this.accessToken);
const path = `{"path": "${id}"}`;
headers.append('Dropbox-API-Arg', path);
return this.http.post('https://content.dropboxapi.com/2/files/download',
null, { headers: headers });
}
main.component.ts
downloadFileBlob(data: any, name) {
const blob = new Blob([data], { type: 'image/jpeg' });
const url = window.URL.createObjectURL(blob);
window.open(url);
}
saveFile(id, name) {
this.dropbox.downloadFile(id).subscribe((data: any) => {
this.downloadFileBlob(data._body, name); });
}
Turns out I was going at this the wrong way.
Dropbox github has an example of using sharingGetSharedLinkFile, which works about the same as filesDownload.
Replace sharingGetSharedLinkFile with filesDownload and provide a file path instead of the URL.
Something like this:
function downloadFile() {
var ACCESS_TOKEN = (<HTMLInputElement> document.getElementById('access-
token')).value;
var SHARED_LINK = (<HTMLInputElement> document.getElementById('shared-
link')).value;
var dbx = new Dropbox.Dropbox({ accessToken: ACCESS_TOKEN });
dbx.filesDownload({path: SHARED_LINK})
.then(function(data) {
var downloadUrl = URL.createObjectURL((<any> data).fileBlob);
var downloadButton = document.createElement('a');
downloadButton.setAttribute('href', downloadUrl);
downloadButton.setAttribute('download', data.name);
downloadButton.setAttribute('class', 'button');
downloadButton.innerText = 'Download: ' + data.name;
document.getElementById('results').appendChild(downloadButton);
});
}

AWS SDK JavaScript: how display upload progress of AWS.S3.putObject?

I'm developing a JavaScript client to upload files directly to Amazon S3.
<input type="file" id="file-chooser" />
<button id="upload-button">Upload to S3</button>
<div id="results"></div>
<script type="text/javascript">
var bucket = new AWS.S3({params: {Bucket: 'myBucket'}});
var fileChooser = document.getElementById('file-chooser');
var button = document.getElementById('upload-button');
var results = document.getElementById('results');
button.addEventListener('click', function() {
var file = fileChooser.files[0];
if (file) {
results.innerHTML = '';
var params = {Key: file.name, ContentType: file.type, Body: file};
bucket.putObject(params, function (err, data) {
results.innerHTML = err ? 'ERROR!' : 'UPLOADED.';
});
} else {
results.innerHTML = 'Nothing to upload.';
}
}, false);
</script>
The example from Amazon documentation works fine, but it doesn't provide any feedback on the upload progress.
Any ideas?
Thanks
Rather than using the s3.PutObject function why not instead use the ManagedUpload function.
It has been specifically developed to allow you to hook into a httpUploadProgress event that should allow the updating of your progress bar to be implemented fairly easily.
I have done some customisation for file upload progress. Use this same logic in node, angular and javascript.
Here is repository link :
https://github.com/aviboy2006/aws-s3-file-upload-progress
Use this fiddle for test: https://jsfiddle.net/sga3o1h5/
Note : Update access key, bucketname and secret key.
var bucket = new AWS.S3({
accessKeyId: "",
secretAccessKey: "",
region: 'us-east-1'
});
uploadfile = function(fileName, file, folderName) {
const params = {
Bucket: "fileuploadprocess",
Key: folderName + fileName,
Body: file,
ContentType: file.type
};
return this.bucket.upload(params, function(err, data) {
if (err) {
console.log('There was an error uploading your file: ', err);
return false;
}
console.log('Successfully uploaded file.', data);
return true;
});
}
uploadSampleFile = function() {
var progressDiv = document.getElementById("myProgress");
progressDiv.style.display="block";
var progressBar = document.getElementById("myBar");
file = document.getElementById("myFile").files[0];
folderName = "Document/";
uniqueFileName = 'SampleFile';
let fileUpload = {
id: "",
name: file.name,
nameUpload: uniqueFileName,
size: file.size,
type: "",
timeReference: 'Unknown',
progressStatus: 0,
displayName: file.name,
status: 'Uploading..',
}
uploadfile(uniqueFileName, file, folderName)
.on('httpUploadProgress', function(progress) {
let progressPercentage = Math.round(progress.loaded / progress.total * 100);
console.log(progressPercentage);
progressBar.style.width = progressPercentage + "%";
if (progressPercentage < 100) {
fileUpload.progressStatus = progressPercentage;
} else if (progressPercentage == 100) {
fileUpload.progressStatus = progressPercentage;
fileUpload.status = "Uploaded";
}
})
}
I bumped into this post, then i found this AWS npm package, which does exactly what you are asking for:
#aws-sdk/lib-storage
import { Upload } from "#aws-sdk/lib-storage";
import { S3Client, S3 } from "#aws-sdk/client-s3";
const target = { Bucket, Key, Body };
try {
const parallelUploads3 = new Upload({
client: new S3({}) || new S3Client({}),
tags: [...], // optional tags
queueSize: 4, // optional concurrency configuration
partSize: 5MB, // optional size of each part
leavePartsOnError: false, // optional manually handle dropped parts
params: target,
});
parallelUploads3.on("httpUploadProgress", (progress) => {
console.log(progress);
});
await parallelUploads3.done();
} catch (e) {
console.log(e);
}

Categories

Resources