Occasionally uploading larger files - javascript

On moving to the next step in the form I have run some checks. One is to stop photos over 10mb and preventing .heic files from being upload. 90% of the time it works, but now and again files are let through.
Any help with a better written solution or a reason why this may fail and let large files or .heic file through.
var upload_one = document.getElementById("image_one");
if(upload_one.files.length > 0) {
if (upload_one.files.item(0).size >= '10485760') {
upload_one.className += " invalid";
valid = false;
alert("Photo is too large. Photos need to be under 10mb")
}
fileName = document.querySelector('#image_one').value;
extension = fileName.split('.').pop();
if (extension == 'heic') {
upload_one.className += " invalid";
valid = false;
alert("Files can only be .png, .jpg or .jpeg")
}
}

You should have a look at presigned Url using S3 bucket on aws.
Basically you generate an upload url where you can upload big files direclty to S3.
Personally I use a lambda to generate this presignedUrl and I return it to front end then.
Backend
const AWS = require("aws-sdk");
const S3 = new AWS.S3();
const { v4: uuidv4 } = require("uuid");
const getUrl = async (params) => {
return await new Promise((resolve, reject) => {
S3.getSignedUrl("putObject", params, (err, url) => {
if (err) {
reject(err);
} else {
resolve({
statusCode: 200,
url,
});
}
});
});
};
exports.handler = async (event, context) => {
const id = uuidv4();
const { userId } = event?.queryStringParameters;
const params = {
Bucket: process.env.INVOICE_BUCKET,
Key: `${userId}/${id}.csv`,
ContentType: `text/csv`,
ACL: "public-read",
};
try {
const { url } = await getUrl(params);
return handleRes({ message: `Successfully generated url`, url, key: `${id}.csv`, publicUrl: `https://yourBucket.s3.eu-west-1.amazonaws.com/${userId}/${id}.csv` }, 200);
} catch (e) {
console.error(e);
return handleRes({ message: "failed" }, 400);
}
};
Front end
$(function () {
$("#theForm").on("submit", sendFile);
});
function sendFile(e) {
e.preventDefault();
var urlPresigned;
var publicUrl;
var key;
$.ajax({
type: "GET",
url: `https://yourId.execute-api.eu-west-1.amazonaws.com/Prod/file-upload-to-bucket?userId=${userId}`,
success: function (resp) {
urlPresigned = resp.url;
publicUrl = resp.publicUrl;
key = resp.key;
var theFormFile = $("#theFile").get()[0].files[0];
$.ajax({
type: "PUT",
url: urlPresigned,
contentType: "text/csv", // Put meme type
processData: false,
// the actual file is sent raw
data: theFormFile,
success: function () {
// File uploaed
},
error: function (err) {
console.log(err);
},
});
},
});
}

Related

Azure function don't accept to create file on remote

I would download file on local the create a stream then send to an API.
In localhost files get created via blobClient.downloadToFile(defaultFile);
But When I deploy function it can not find file to stream, so I think that the download does not happen or in bad location.
I get this error
[Error: ENOENT: no such file or directory, open 'D:\home\site\wwwroot\importPbix\exampleName.pbix'
Here's my code
const blobServiceClient = BlobServiceClient.fromConnectionString(
process.env.CONNEXION_STRING
);
const containerClient = blobServiceClient.getContainerClient(
params.containerName
);
const blobClient = containerClient.getBlobClient(process.env.FILE_LOCATION); // get file from storage
let blobData;
var defaultFile = path.join(params.baseDir, `${params.reportName}.pbix`); // use path module
let stream;
try {
blobData = await blobClient.downloadToFile(defaultFile);
console.log(blobData);
stream = fs.createReadStream(defaultFile);
} catch (error) {
params.context.log(error);
console.log(error);
}
var options = {
method: "POST",
url: `https://api.powerbi.com/v1.0/myorg/groups/${params.groupId}/imports?datasetDisplayName=${params.reportName}`,
headers: {
"Content-Type": "multipart/form-data",
Authorization: `Bearer ${params.accessToken} `,
},
formData: {
"": {
value: stream,
options: {
filename: `${params.reportName}.pbix`,
contentType: null,
},
},
},
};
//check if file keep in mem
return new Promise(function (resolve, reject) {
request(options, function (error, response) {
if (error) {
params.context.log(error);
reject(error);
} else {
params.context.log(response);
resolve(response.body);
}
fs.unlinkSync(defaultFile);
});
});
I found this post having same issue , that's why I user path module and passed __dirname to function params.baseDir.
If you want to download a file from Azure blob and read it as a stream, just try the code below, in this demo, I try to download a .txt file to a temp folder(you should create it first on Azure function)and print its content from the stream for a quick test:
module.exports = async function (context, req) {
const { BlockBlobClient } = require("#azure/storage-blob")
const fs = require('fs')
const connStr = '<connection string>'
const container = 'files'
const blobName = 'test.txt'
const tempPath = 'd:/home/temp/'
const tempFilePath = tempPath + blobName
const blobClient = new BlockBlobClient(connStr,container,blobName);
await blobClient.downloadToFile(tempFilePath).then(async function(){
context.log("download successfully")
let stream = fs.createReadStream(tempFilePath)
//Print text content,just check if stream has been readed successfully
context.log("text file content:")
context.log(await streamToString(stream))
//You can call your API here...
})
function streamToString (stream) {
const chunks = [];
return new Promise((resolve, reject) => {
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
stream.on('error', (err) => reject(err));
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')));
})
}
context.res = {
body: 'done'
}
}
Result
File has been downloaded:
read as stream successfully:

Getting "No file" or empty file when trying to download PDF file with Node.js

The issue:
I need to download a PDF file from my server but getting either "No file" or empty file
Details:
Here is my server-side code:
let fileBuffered = '';
// authentication for downloading a file from Dropbox API to my server
const dropbox = dropboxV2Api.authenticate({
token: process.env.DEV_DROPBOX_SECRET_KEY
});
// configuring parameters
const params = Object.freeze({
resource: "files/download",
parameters: {
path: `/${customerFileFolder}/${fileName}`
}
});
let dropboxPromise = new Promise(function (resolve, reject) {
dropbox(params, function (err, result) {
if (err) {
reject(err);
} else {
resolve(result);
}
}).on('data',function(data) {
fileBuffered += data;
})
const file = fileBuffered;
res.setHeader("Content-Type", "application/octet-stream");
res.send(file);
The PDF file's that I'm trying to download size is 139,694 bytes. The length of the fileBuffered variable is 132,597. Here is the content of the variable as it is shown in the debugger:
Seems like a legit PDF file
Here is the client-side
function documentFileDownload(fileName) {
const ip = location.host;
let request = $.ajax({
type: "POST",
url: `${http() + ip}/documentFileDownload`,
headers: {
"Accept": "application/octet-stream"
},
data: {
fileName: fileName
},
error: function (err) {
console.log("ERROR: " + err);
}
});
console.log(request);
return request;
}
Problem:
Then I get the response on a client-side it looks like this:
Note the size of the responseText: 254Kb.
What I actually get in the browser is a "Failed - No file" message
What else I tried:
I tried to play with different Content-Types (application/pdf, text/pdf) on a server-side and tried to convert the variable to base64 buffer
const file = `data:application/pdf;base64, ${Buffer.from(fileBuffered).toString("base64")}`;
and added res.setHeader("Accept-Encoding", "base64");
but still getting the same result.
Any ideas?
I found a solution. I missed a .on("end", ) event while reading data from Dropbox stream. Here is a working solution:
Here is the server-side:
let chunk = [];
let fileBuffered = '';
// authentication for downloading a file from Dropbox API to my server
const dropbox = dropboxV2Api.authenticate({
token: process.env.DEV_DROPBOX_SECRET_KEY
});
// configuring parameters
const params = Object.freeze({
resource: "files/download",
parameters: {
path: `/${customerFileFolder}/${fileName}`
}
});
let dropboxPromise = new Promise(function (resolve, reject) {
dropbox(params, function (err, result) {
if (err) {
reject(err);
} else {
resolve(result);
}
}).on('data',function(data) {
fileBuffered += data;
}).on('end', () => {
// console.log("finish");\
// generate buffer
fileBuffered = Buffer.concat(chunk);
});
const file = `data:application/pdf;base64, ${Buffer.from(fileBuffered).toString("base64")}`;
res.setHeader("Content-Type", "application/pdf");
res.send(file);
Client-side:
function documentFileDownload(fileName) {
const ip = location.host;
let request = $.ajax({
type: "POST",
url: `${http() + ip}/documentFileDownload`,
responseType: "arraybuffer",
headers: {
"Accept": "application/pdf"
},
data: {
fileName: fileName
},
error: function (err) {
console.log("ERROR: " + err);
}
});
// console.log(request);
return request;
}
Try adding dataType: "blob" in your $.ajax method
and within the headers object add this 'Content-Type', 'application/json'.

Amazon S3 Remote File Upload with Axios

I am trying to write a function that would:
Take a remote URL as a parameter,
Get the file using axios
Upload the stream to amazon s3
And finally, return the uploaded url
I found help here on stackoverflow. So far, I have this:
/*
* Method to pipe the stream
*/
const uploadFromStream = (file_name, content_type) => {
const pass = new stream.PassThrough();
const obj_key = generateObjKey(file_name);
const params = { Bucket: config.bucket, ACL: config.acl, Key: obj_key, ContentType: content_type, Body: pass };
s3.upload(params, function(err, data) {
if(!err){
return data.Location;
} else {
console.log(err, data);
}
});
return pass;
}
/*
* Method to upload remote file to s3
*/
const uploadRemoteFileToS3 = async (remoteAddr) => {
axios({
method: 'get',
url: remoteAddr,
responseType: 'stream'
}).then( (response) => {
if(response.status===200){
const file_name = remoteAddr.substring(remoteAddr.lastIndexOf('/')+1);
const content_type = response.headers['content-type'];
response.data.pipe(uploadFromStream(file_name, content_type));
}
});
}
But uploadRemoteFileToS3 does not return anything (because it's a asynchronous function). How can I get the uploaded url?
UPDATE
I have further improved upon the code and wrote a class. Here is what I have now:
const config = require('../config.json');
const stream = require('stream');
const axios = require('axios');
const AWS = require('aws-sdk');
class S3RemoteUploader {
constructor(remoteAddr){
this.remoteAddr = remoteAddr;
this.stream = stream;
this.axios = axios;
this.config = config;
this.AWS = AWS;
this.AWS.config.update({
accessKeyId: this.config.api_key,
secretAccessKey: this.config.api_secret
});
this.spacesEndpoint = new this.AWS.Endpoint(this.config.endpoint);
this.s3 = new this.AWS.S3({endpoint: this.spacesEndpoint});
this.file_name = this.remoteAddr.substring(this.remoteAddr.lastIndexOf('/')+1);
this.obj_key = this.config.subfolder+'/'+this.file_name;
this.content_type = 'application/octet-stream';
this.uploadStream();
}
uploadStream(){
const pass = new this.stream.PassThrough();
this.promise = this.s3.upload({
Bucket: this.config.bucket,
Key: this.obj_key,
ACL: this.config.acl,
Body: pass,
ContentType: this.content_type
}).promise();
return pass;
}
initiateAxiosCall() {
axios({
method: 'get',
url: this.remoteAddr,
responseType: 'stream'
}).then( (response) => {
if(response.status===200){
this.content_type = response.headers['content-type'];
response.data.pipe(this.uploadStream());
}
});
}
dispatch() {
this.initiateAxiosCall();
}
async finish(){
//console.log(this.promise); /* return Promise { Pending } */
return this.promise.then( (r) => {
console.log(r.Location);
return r.Location;
}).catch( (e)=>{
console.log(e);
});
}
run() {
this.dispatch();
this.finish();
}
}
But still have no clue how to catch the result when the promise is resolved. So far, I tried these:
testUpload = new S3RemoteUploader('https://avatars2.githubusercontent.com/u/41177');
testUpload.run();
//console.log(testUpload.promise); /* Returns Promise { Pending } */
testUpload.promise.then(r => console.log); // does nothing
But none of the above works. I have a feeling I am missing something very subtle. Any clue, anyone?
After an upload you can call the getsignedurl function in s3 sdk to get the url where you can also specify the expiry of the url as well. You need to pass the key for that function. Now travelling will update with example later.
To generate a simple pre-signed URL that allows any user to view the
contents of a private object in a bucket you own, you can use the
following call to getSignedUrl():
var s3 = new AWS.S3();
var params = {Bucket: 'myBucket', Key: 'myKey'};
s3.getSignedUrl('getObject', params, function (err, url) {
console.log("The URL is", url);
});
Official documentation link
http://docs.amazonaws.cn/en_us/AWSJavaScriptSDK/guide/node-examples.html
Code must be something like this
function uploadFileToS3AndGenerateUrl(cb) {
const pass = new stream.PassThrough();//I have generated streams from file. Using this since this is what you have used. Must be a valid one.
var params = {
Bucket: "your-bucket", // required
Key: key , // required
Body: pass,
ContentType: 'your content type',
};
s3.upload(params, function(s3Err, data) {
if (s3Err) {
cb(s3Err)
}
console.log(`File uploaded successfully at ${data.Location}`)
const params = {
Bucket: 'your-bucket',
Key: data.key,
Expires: 180
};
s3.getSignedUrl('getObject', params, (urlErr, urlData) => {
if (urlErr) {
console.log('There was an error getting your files: ' + urlErr);
cb(urlErr);
} else {
console.log(`url: ${urlData}`);
cb(null, urlData);
}
})
})
}
Please check i have update your code might its help you.
/*
* Method to upload remote file to s3
*/
const uploadRemoteFileToS3 = async (remoteAddr) => {
const response = await axios({
method: 'get',
url: remoteAddr,
responseType: 'stream'
})
if(response.status===200){
const file_name = remoteAddr.substring(remoteAddr.lastIndexOf('/')+1);
const content_type = response.headers['content-type'];
response.data.pipe(uploadFromStream(file_name, content_type));
}
return new Promise((resolve, reject) => {
response.data.on('end', (response) => {
console.log(response)
resolve(response)
})
response.data.on('error', () => {
console.log(response);
reject(response)
})
})
};
*
* Method to pipe the stream
*/
const uploadFromStream = (file_name, content_type) => {
return new Promise((resolve, reject) => {
const pass = new stream.PassThrough();
const obj_key = generateObjKey(file_name);
const params = { Bucket: config.bucket, ACL: config.acl, Key: obj_key, ContentType: content_type, Body: pass };
s3.upload(params, function(err, data) {
if(!err){
console.log(data)
return resolve(data.Location);
} else {
console.log(err)
return reject(err);
}
});
});
}
//call uploadRemoteFileToS3
uploadRemoteFileToS3(remoteAddr)
.then((finalResponse) => {
console.log(finalResponse)
})
.catch((err) => {
console.log(err);
});

AJAX not uploading images to backend service

Working on a requirement to upload images to AWS instance. UI and service is separated and connects via REST. Service is in nodejs. from UI we are making a ajax call to backend service to upload the images to AWS.
The problem:
When I upload the images via POSTMAN request, I can see that response as uploaded with files properly uploaded in AWS.
Whereas when I upload images via AJAX call, I get no response in browser, and also the images are not uploaded in aws.
Below is the piece of code in ajax:
var formData = new FormData();
formData.append('image', $('#tx_file_programa')[0]);
$.ajax({
method: 'POST',
type: "POST",
url: 'http://10.0.0.95:9999/photo/1',
contentType: false,
processData: false,
async: false,
cache: false,
beforeSend: function (xhr) {
xhr.setRequestHeader('Authorization', 'Bearer ' + access_token );
},
data: formData,
success: function (data) {
console.log('response from server is : ', data);
}
//dataType: 'json'
});
This is the backend service.
server.post('/photo/:count', function (req, res) {
if (req.getContentType() == 'multipart/form-data') {
var form = new formidable.IncomingForm(),
files = [], fields = [];
var result = [];
var noOfFiles = req.params.count;
var count = 0;
console.log('noOfFiles', noOfFiles);
form.on('field', function(field, value) {
fields.push([field, value]);
console.log(fields);
})
form.on('progress', function(bytesReceived, bytesExpected) {
console.log('err');
});
form.on('error', function(err) {
console.log('err',err);
});
form.on('aborted', function() {
console.log('aborted', arguments);
});
new Promise(function(resolve, reject) {
var result = [];
form.onPart = function (part) {
var data = null;
const params = {
Bucket: 'xxxxx',
Key: uuidv4() + part.filename,
ACL: 'public-read'
};
var upload = s3Stream.upload(params);
upload.on('error', function (error) {
console.log('errr', error);
});
upload.on('part', function (details) {
console.log('part', details);
});
upload.on('uploaded', function (details) {
let extension = details.Location.split('.');
if(['JPG', 'PNG'].indexOf(extension[extension.length - 1].toUpperCase()) > -1) {
var ext = extension[extension.length - 1];
count++;
result.push(details.Location);
if(count == noOfFiles) {
resolve(result);
}
}
});
part.pipe(upload);
}
}).then(function(result){
console.log('end', result);
res.writeHead(200, {'content-type': 'text/plain'});
res.end('received files:\n\n ' + util.inspect(result));
})
form.parse(req, function (err, fields, files) {
})
return;
} else {
BadRequestResponse(res, "Invalid request type!");
}
})
#user3336194, Can you check with this, this is working thins
var appIconFormData = null
$(":file").change(function () {
var file = this.files[0], name = file.name, size = file.size, type = file.type;
var imageType = new Array("image/png", "image/jpeg", "image/gif", "image/bmp");
if (jQuery.inArray(type, imageType) == -1) {
return false;
} else {
appIconFormData = new FormData();
appIconFormData.append('appimage', $('input[type=file]')[0].files[0]);
}
});
$.ajax({
url: 'your/api/destination/url',
type: 'POST',
data: appIconFormData,
cache: false,
contentType: false,
processData: false,
success: function (data) {
console.log(data)
},
error: function (e) {
}
});
I think the way you are sending formdata is not correct.
Try these 2 ways:
You can give your whole form to FormData() for processing
var form = $('form')[0]; // You need to use standard javascript object here
var formData = new FormData(form);
or specify exact data for FormData()
var formData = new FormData();
// Attach file
formData.append('image', $('input[type=file]')[0].files[0]);

AWS SDK JavaScript: how display upload progress of AWS.S3.putObject?

I'm developing a JavaScript client to upload files directly to Amazon S3.
<input type="file" id="file-chooser" />
<button id="upload-button">Upload to S3</button>
<div id="results"></div>
<script type="text/javascript">
var bucket = new AWS.S3({params: {Bucket: 'myBucket'}});
var fileChooser = document.getElementById('file-chooser');
var button = document.getElementById('upload-button');
var results = document.getElementById('results');
button.addEventListener('click', function() {
var file = fileChooser.files[0];
if (file) {
results.innerHTML = '';
var params = {Key: file.name, ContentType: file.type, Body: file};
bucket.putObject(params, function (err, data) {
results.innerHTML = err ? 'ERROR!' : 'UPLOADED.';
});
} else {
results.innerHTML = 'Nothing to upload.';
}
}, false);
</script>
The example from Amazon documentation works fine, but it doesn't provide any feedback on the upload progress.
Any ideas?
Thanks
Rather than using the s3.PutObject function why not instead use the ManagedUpload function.
It has been specifically developed to allow you to hook into a httpUploadProgress event that should allow the updating of your progress bar to be implemented fairly easily.
I have done some customisation for file upload progress. Use this same logic in node, angular and javascript.
Here is repository link :
https://github.com/aviboy2006/aws-s3-file-upload-progress
Use this fiddle for test: https://jsfiddle.net/sga3o1h5/
Note : Update access key, bucketname and secret key.
var bucket = new AWS.S3({
accessKeyId: "",
secretAccessKey: "",
region: 'us-east-1'
});
uploadfile = function(fileName, file, folderName) {
const params = {
Bucket: "fileuploadprocess",
Key: folderName + fileName,
Body: file,
ContentType: file.type
};
return this.bucket.upload(params, function(err, data) {
if (err) {
console.log('There was an error uploading your file: ', err);
return false;
}
console.log('Successfully uploaded file.', data);
return true;
});
}
uploadSampleFile = function() {
var progressDiv = document.getElementById("myProgress");
progressDiv.style.display="block";
var progressBar = document.getElementById("myBar");
file = document.getElementById("myFile").files[0];
folderName = "Document/";
uniqueFileName = 'SampleFile';
let fileUpload = {
id: "",
name: file.name,
nameUpload: uniqueFileName,
size: file.size,
type: "",
timeReference: 'Unknown',
progressStatus: 0,
displayName: file.name,
status: 'Uploading..',
}
uploadfile(uniqueFileName, file, folderName)
.on('httpUploadProgress', function(progress) {
let progressPercentage = Math.round(progress.loaded / progress.total * 100);
console.log(progressPercentage);
progressBar.style.width = progressPercentage + "%";
if (progressPercentage < 100) {
fileUpload.progressStatus = progressPercentage;
} else if (progressPercentage == 100) {
fileUpload.progressStatus = progressPercentage;
fileUpload.status = "Uploaded";
}
})
}
I bumped into this post, then i found this AWS npm package, which does exactly what you are asking for:
#aws-sdk/lib-storage
import { Upload } from "#aws-sdk/lib-storage";
import { S3Client, S3 } from "#aws-sdk/client-s3";
const target = { Bucket, Key, Body };
try {
const parallelUploads3 = new Upload({
client: new S3({}) || new S3Client({}),
tags: [...], // optional tags
queueSize: 4, // optional concurrency configuration
partSize: 5MB, // optional size of each part
leavePartsOnError: false, // optional manually handle dropped parts
params: target,
});
parallelUploads3.on("httpUploadProgress", (progress) => {
console.log(progress);
});
await parallelUploads3.done();
} catch (e) {
console.log(e);
}

Categories

Resources