Iam getting the error 'axios post request 413 payload too large' when trying to upload images/screenshots of size (700~) kb.I am able to upload small files though.Iam trying do a post operation to upload this file into the blob table in my postgres database.
upload functionality
function readFileAsync(file) {
return new Promise((resolve, reject) => {
let reader = new FileReader();
reader.onload = () => {
var base64String = reader.result;
console.log(base64String)
console.log(base64String.substr(base64String.indexOf(',') + 1))
resolve(base64String.substr(base64String.indexOf(',') + 1));
};
reader.onerror = reject;
reader.readAsDataURL(file);
})
}
async function uploadFile(path, data) {
try {
let contentBuffer = await readFileAsync(data.chosenfile);
console.log('content buffer is'+contentBuffer)
let response = await axios({
method: 'post',
url: 'api/store/blob' + path,
headers: {'session_id': data.sessionid},
data: {"id":data.chosenfile.name, "file": contentBuffer }
});
if (response.status == 200) {
console.log(response.status);
}
return response.data;
} catch (err) {
console.error(err);
}
}
As you can see from the code i am converting the file to base64url and from the url i am sending out only the base64 string in the axios payload. Plz help ?
Related
I would download file on local the create a stream then send to an API.
In localhost files get created via blobClient.downloadToFile(defaultFile);
But When I deploy function it can not find file to stream, so I think that the download does not happen or in bad location.
I get this error
[Error: ENOENT: no such file or directory, open 'D:\home\site\wwwroot\importPbix\exampleName.pbix'
Here's my code
const blobServiceClient = BlobServiceClient.fromConnectionString(
process.env.CONNEXION_STRING
);
const containerClient = blobServiceClient.getContainerClient(
params.containerName
);
const blobClient = containerClient.getBlobClient(process.env.FILE_LOCATION); // get file from storage
let blobData;
var defaultFile = path.join(params.baseDir, `${params.reportName}.pbix`); // use path module
let stream;
try {
blobData = await blobClient.downloadToFile(defaultFile);
console.log(blobData);
stream = fs.createReadStream(defaultFile);
} catch (error) {
params.context.log(error);
console.log(error);
}
var options = {
method: "POST",
url: `https://api.powerbi.com/v1.0/myorg/groups/${params.groupId}/imports?datasetDisplayName=${params.reportName}`,
headers: {
"Content-Type": "multipart/form-data",
Authorization: `Bearer ${params.accessToken} `,
},
formData: {
"": {
value: stream,
options: {
filename: `${params.reportName}.pbix`,
contentType: null,
},
},
},
};
//check if file keep in mem
return new Promise(function (resolve, reject) {
request(options, function (error, response) {
if (error) {
params.context.log(error);
reject(error);
} else {
params.context.log(response);
resolve(response.body);
}
fs.unlinkSync(defaultFile);
});
});
I found this post having same issue , that's why I user path module and passed __dirname to function params.baseDir.
If you want to download a file from Azure blob and read it as a stream, just try the code below, in this demo, I try to download a .txt file to a temp folder(you should create it first on Azure function)and print its content from the stream for a quick test:
module.exports = async function (context, req) {
const { BlockBlobClient } = require("#azure/storage-blob")
const fs = require('fs')
const connStr = '<connection string>'
const container = 'files'
const blobName = 'test.txt'
const tempPath = 'd:/home/temp/'
const tempFilePath = tempPath + blobName
const blobClient = new BlockBlobClient(connStr,container,blobName);
await blobClient.downloadToFile(tempFilePath).then(async function(){
context.log("download successfully")
let stream = fs.createReadStream(tempFilePath)
//Print text content,just check if stream has been readed successfully
context.log("text file content:")
context.log(await streamToString(stream))
//You can call your API here...
})
function streamToString (stream) {
const chunks = [];
return new Promise((resolve, reject) => {
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
stream.on('error', (err) => reject(err));
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')));
})
}
context.res = {
body: 'done'
}
}
Result
File has been downloaded:
read as stream successfully:
The issue:
I need to download a PDF file from my server but getting either "No file" or empty file
Details:
Here is my server-side code:
let fileBuffered = '';
// authentication for downloading a file from Dropbox API to my server
const dropbox = dropboxV2Api.authenticate({
token: process.env.DEV_DROPBOX_SECRET_KEY
});
// configuring parameters
const params = Object.freeze({
resource: "files/download",
parameters: {
path: `/${customerFileFolder}/${fileName}`
}
});
let dropboxPromise = new Promise(function (resolve, reject) {
dropbox(params, function (err, result) {
if (err) {
reject(err);
} else {
resolve(result);
}
}).on('data',function(data) {
fileBuffered += data;
})
const file = fileBuffered;
res.setHeader("Content-Type", "application/octet-stream");
res.send(file);
The PDF file's that I'm trying to download size is 139,694 bytes. The length of the fileBuffered variable is 132,597. Here is the content of the variable as it is shown in the debugger:
Seems like a legit PDF file
Here is the client-side
function documentFileDownload(fileName) {
const ip = location.host;
let request = $.ajax({
type: "POST",
url: `${http() + ip}/documentFileDownload`,
headers: {
"Accept": "application/octet-stream"
},
data: {
fileName: fileName
},
error: function (err) {
console.log("ERROR: " + err);
}
});
console.log(request);
return request;
}
Problem:
Then I get the response on a client-side it looks like this:
Note the size of the responseText: 254Kb.
What I actually get in the browser is a "Failed - No file" message
What else I tried:
I tried to play with different Content-Types (application/pdf, text/pdf) on a server-side and tried to convert the variable to base64 buffer
const file = `data:application/pdf;base64, ${Buffer.from(fileBuffered).toString("base64")}`;
and added res.setHeader("Accept-Encoding", "base64");
but still getting the same result.
Any ideas?
I found a solution. I missed a .on("end", ) event while reading data from Dropbox stream. Here is a working solution:
Here is the server-side:
let chunk = [];
let fileBuffered = '';
// authentication for downloading a file from Dropbox API to my server
const dropbox = dropboxV2Api.authenticate({
token: process.env.DEV_DROPBOX_SECRET_KEY
});
// configuring parameters
const params = Object.freeze({
resource: "files/download",
parameters: {
path: `/${customerFileFolder}/${fileName}`
}
});
let dropboxPromise = new Promise(function (resolve, reject) {
dropbox(params, function (err, result) {
if (err) {
reject(err);
} else {
resolve(result);
}
}).on('data',function(data) {
fileBuffered += data;
}).on('end', () => {
// console.log("finish");\
// generate buffer
fileBuffered = Buffer.concat(chunk);
});
const file = `data:application/pdf;base64, ${Buffer.from(fileBuffered).toString("base64")}`;
res.setHeader("Content-Type", "application/pdf");
res.send(file);
Client-side:
function documentFileDownload(fileName) {
const ip = location.host;
let request = $.ajax({
type: "POST",
url: `${http() + ip}/documentFileDownload`,
responseType: "arraybuffer",
headers: {
"Accept": "application/pdf"
},
data: {
fileName: fileName
},
error: function (err) {
console.log("ERROR: " + err);
}
});
// console.log(request);
return request;
}
Try adding dataType: "blob" in your $.ajax method
and within the headers object add this 'Content-Type', 'application/json'.
I am trying to upload an image from a URL to my Google Cloud Storage (Firebase). The following function shall return the file and a consecutive function will retrieve the actual Signed/Download Url to the new file. After all this I am updating a document in my Firestore Database with the new URL. That part works; the functions wait on uploading the (unfortunately incomplete) image and my document gets updated with the newly created file url. But the actual file/image is incomplete. :-(
async function saveToStorage(fileUrl) {
var storage = admin.storage();
var urlLib = require("url");
var pathLib = require("path");
//Get File Name from provided URL
var parsed = urlLib.parse(fileUrl);
var fileName = pathLib.basename(parsed.pathname);
//Create Storage Reference with new File Name
var bucket = storage.bucket('gs://myprojectname.appspot.com');
//Path Folder
var folderPath = 'data/photos/';
//Path Folder + File
var internalFilePath = folderPath + fileName ;
//Bucket File Ref
var file = bucket.file(internalFilePath);
const request = require('request');
const writeStream = file.createWriteStream({
metadata: {
contentType: 'image/jpg'
}
});
return new Promise((resolve, reject) => {
request.get(fileUrl)
.pipe(writeStream)
.on("error", (err) => {
console.error(`Error occurred`);
reject();
})
.on('finish', () => {
console.info(`Photo saved`);
resolve(file);
});
});
}
The Image that is saved/uploaded/streamed to my Cloud Storage file looks like this:
I have tried using node-fetch and request and rewrote my function in several ways, but always turn out with this result. I'm sure it has something to do with how I use my Promise, because if I omit the Promise the file actually completes but then the main code keeps executing instead of waiting for this Promise.
This has the same result (incomplete file):
return await fetch(fileUrl).then(res => {
const contentType = res.headers.get('content-type');
const writeStream = file.createWriteStream({
metadata: {
contentType
}
});
let p = new Promise((resolve, reject) => {
res.body.pipe(writeStream);
writeStream.on('finish', function() {
console.log("Stream finished")
resolve(file);
});
writeStream.on('error', function() {
reject(new Error("Whoops!"));
});
});
return p.then(
function(file) {
console.log('Photo saved');
return file},
function(error) {
console.error(error);
return;
});
});
And outright returning the stream writes a complete file, but my main code is not waiting for the file (and I need to handle the file)..
return res.body.pipe(writeStream)
.on('finish', () => {
return file;
console.log('Photo')
})
.on('error', err => {
return console.error(err);
});
Thanks for any help on this!
So this is the code that finally worked for me.
return new Promise((resolve, reject) => {
const req = request(fileUrl);
req.pause();
req.on('response', res => {
const writeStream = file.createWriteStream({
metadata: {
contentType: res.headers['content-type']
}
});
req.pipe(writeStream)
.on('finish', () => {
console.log('Photo saved');
resolve(file);
})
.on('error', err => {
writeStream.end();
console.error(err);
reject();
});
req.resume();
});
req.on('error', err => console.error(err));
});
I'm trying to send a file object (image) as a multipart/form-data request using the javascript fetch api.
The code below shows how I convert the file object to string format.
The image parameter is an array of File objects. I want to send the first image of that array to a webservice.
onImageUpload = ((image) => {
//File to arraybuffer
let fr = new FileReader();
fr.onload = function(e) {
var text = e.target.result;
AccountService.uploadProfileImage(localStorage.getItem('session-key'), text)
.then((ro) => {
if(ro.code == 200) {
this.showSnackbar("Profile image uploaded successfully!");
}
else {
this.showSnackbar("Error uploading your image. Please try again later.");
}
})
}
fr.readAsText(image[0]);
})
Below is my uploadProfileImage function, which makes the post request using the fetch api.
static async uploadProfileImage(sessionKey, image) {
var reader = new FileReader();
let ro = new ResponseObject();
let fd = new FormData();
let imgObj = new Image();
imgObj.src = image;
let blob = new Blob([new Uint8Array(image)], { type: "image/jpg"});
fd.append("name", localStorage.getItem('username'));
fd.append("file", blob);
return new Promise((resolve, reject) => {
fetch(UrlConstants.BASE_URL + this.urlUploadProfileImage, {
method: "POST",
headers: {
'Authorization': 'Bearer ' + sessionKey,
"Content-type": "multipart/form-data;"
},
body: fd
}).then((response) => {
ro.code = response.status;
return response.text();
})
.then((text) => {
ro.message = text;
resolve(ro);
})
.catch((ex) => {
reject(ex);
})
});
}
When I sent the image parameter in the uploadProfileImage without converting it into a blob the data are being sent, but i need the blob to have the Content-Type: "image/jpg" in the request as the api I'm working with can't handle the request without it.
The problem that the image is not being included in the request.
How can I do that?
Thanks for your help.
I am able to upload a file to my vendors API, and the vendor responds with a .png file as binary data. I am able to write this out to a blob in the browser, but I can't get it to upload in Azure blob storage. I also tried uploading it to a Web directory using fs.writefile but that produces a corrupt/non-bitmap image.
Ideally, I would like to upload my blob directly into Azure, but when I try it gives me the following error:
TypeError: must start with number, buffer, array or string
If I need to upload the blob to a Web directory and use Azure's createBlockBlobFromLocalFile, I would be more than happy to, but my attempts have failed thus far.
Here is my XMLHTTPRequest that opens the image in the browser that is returned after I post my file:
var form = document.forms.namedItem("fileinfo");
form.addEventListener('submit', function (ev) {
var oData = new FormData(form);
var xhr = new XMLHttpRequest();
xhr.responseType = "arraybuffer";
xhr.open("POST", "http://myvendorsapi/Upload", true);
xhr.onload = function (oEvent) {
if (xhr.status == 200) {
var blob = new Blob([xhr.response], { type: "image/png" });
var objectUrl = URL.createObjectURL(blob);
window.open(objectUrl);
console.log(blob);
var containerName = boxContainerName;
var filename = 'Texture_0.png';
$http.post('/postAdvanced', { containerName: containerName, filename: filename, file: blob }).success(function (data) {
//console.log(data);
console.log("success!");
}, function (err) {
//console.log(err);
});
} else {
oOutput.innerHTML = "Error " + xhr.status + " occurred when trying to upload your file.<br \/>";
}
};
xhr.send(oData);
ev.preventDefault();
}, false);
Here is my Node backend for the /postAdvanced call:
app.post('/postAdvanced', function (req, res, next) {
var containerName = req.body.containerName;
var filename = req.body.filename;
var file = req.body.file;
if (!Buffer.isBuffer(file)) {
// Convert 'file' to a binary buffer
}
var options = { contentType: 'image/png' };
blobSvc.createBlockBlobFromText(containerName, filename, file, function (error, result, response) {
if (!error) {
res.send(result);
} else {
console.log(error);
}
});
})
If someone can't help me with uploading directly to Azure, if I can get how to upload this blob to a directory, I can get it into Azure via createBlockBlobFromLocalFile
I have solved the issue. I needed to base64 encode the data on the client side before passing it to node to decode to a file. I needed to use XMLHTTPRequest to get binary data properly, as jQuery AJAX appears to have an issue with returning (see here: http://www.henryalgus.com/reading-binary-files-using-jquery-ajax/).
Here is my front end:
var form = document.forms.namedItem("fileinfo");
form.addEventListener('submit', function (ev) {
var oData = new FormData(form);
var xhr = new XMLHttpRequest();
xhr.responseType = "arraybuffer";
xhr.open("POST", "http://vendorapi.net/Upload", true);
xhr.onload = function (oEvent) {
if (xhr.status == 200) {
var blob = new Blob([xhr.response], { type: "image/png" });
//var objectUrl = URL.createObjectURL(blob);
//window.open(objectUrl);
console.log(blob);
var blobToBase64 = function(blob, cb) {
var reader = new FileReader();
reader.onload = function() {
var dataUrl = reader.result;
var base64 = dataUrl.split(',')[1];
cb(base64);
};
reader.readAsDataURL(blob);
};
blobToBase64(blob, function(base64){ // encode
var update = {'blob': base64};
var containerName = boxContainerName;
var filename = 'Texture_0.png';
$http.post('/postAdvancedTest', { containerName: containerName, filename: filename, file: base64}).success(function (data) {
//console.log(data);
console.log("success!");
// Clear previous 3D render
$('#webGL-container').empty();
// Generated new 3D render
$scope.generate3D();
}, function (err) {
//console.log(err);
});
})
} else {
oOutput.innerHTML = "Error " + xhr.status + " occurred when trying to upload your file.<br \/>";
}
};
xhr.send(oData);
ev.preventDefault();
}, false);
Node Backend:
app.post('/postAdvancedTest', function (req, res) {
var containerName = req.body.containerName
var filename = req.body.filename;
var file = req.body.file;
var buf = new Buffer(file, 'base64'); // decode
var tmpBasePath = 'upload/'; //this folder is to save files download from vendor URL, and should be created in the root directory previously.
var tmpFolder = tmpBasePath + containerName + '/';
// Create unique temp directory to store files
mkdirp(tmpFolder, function (err) {
if (err) console.error(err)
else console.log('Directory Created')
});
// This is the location of download files, e.g. 'upload/Texture_0.png'
var tmpFileSavedLocation = tmpFolder + filename;
fs.writeFile(tmpFileSavedLocation, buf, function (err) {
if (err) {
console.log("err", err);
} else {
//return res.json({ 'status': 'success' });
blobSvc.createBlockBlobFromLocalFile(containerName, filename, tmpFileSavedLocation, function (error, result, response) {
if (!error) {
console.log("Uploaded" + result);
res.send(containerName);
}
else {
console.log(error);
}
});
}
})
})