PDF Not Working After Uploading To Node Server - javascript

I am trying to upload a pdf from the frontend to my node server. The PDF successfully uploads on the node server but when I go to open it, I am unable to. Instead, I see a message that says "File cant be opened. Something went wrong." Why is this happening?
Also please dont suggest third party pdf uploaders like multer, etc. I am aware of these third party libraries but I just want pure node. Thank you so much.
Frontend code:
const uploadFile = document.getElementById("uploadFile");
uploadFile.addEventListener("change", (event) => {
readFile(event.target.files[0]);
});
function readFile(file) {
const uploadDesignPDF = `http://localhost:7000/api/upload/design`;
let fileReader = new FileReader();
fileReader.readAsDataURL(file);
fileReader.addEventListener("load", async (event) => {
let pdfStrChunk = event.target.result.replace(
/^data:application\/[a-z]+;base64,/,
""
);
let fileSize = file.size;
const chunk = 85000;
let numOfChunkSet = Math.ceil(fileSize / chunk);
let remainingChunk = fileSize;
let currentChunk = 0;
let chunkSet = [];
let range = {};
let data = {};
for (let i = 0; i < numOfChunkSet; i++) {
remainingChunk -= chunk;
if (remainingChunk < 0) {
remainingChunk += chunk;
chunkSet.push(remainingChunk);
range.start = currentChunk;
range.end = currentChunk + chunk;
currentChunk += remainingChunk;
} else {
chunkSet.push(chunk);
range.start = currentChunk;
range.end = (i + 1) * chunkSet[i];
currentChunk += chunk;
}
const chunkRead = pdfStrChunk.slice(range.start, range.end);
data.dataPDF = chunkRead;
let response = await fetch(uploadDesignPDF, {
method: "POST",
body: JSON.stringify(data),
headers: {
"Content-Type": "application/json",
},
responseType: "arrayBuffer",
responseEncoding: "binary",
});
let results = await response.json();
console.log(results);
}
});
}
Backend route:
const { uploadDesigns } = require("./upload.designs.controller.js");
const router = require("express").Router();
router.post("/upload/design", uploadDesigns);
Backend:
uploadDesigns: async (req, res) => {
try {
fs.writeFileSync(`./designs/testingPDF6.pdf`, req.body.dataPDF, "base64");
res.status(200).json({
message: "done with chunk",
});
} catch (error) {
res.status(500).json({
message: "Something went wrong. Please refresh page.",
});
}
}

You are working with base64-URL in vain. It is much more effective to use ArrayBuffer. The main advantage of ArrayBuffer is the 1-byte unit, while base64 breaks the byte representation three out of four times.
Instead of sending the file in chunks, I would suggest tracking progress through XMLHttpRequest.upload.onprogress(). I would only use chunks if the upload is through a WebSocket.
If the PDF file is the only information sent to the server, I'd prefer to send the file directly without any field names or other FormData information provided. In that case, it would be appropriate to change the POST method to PUT.
If you prefer to send the file directly, it would be ideal to use fs.createWriteStream() instead of fs.writeFileSync().
Then this approach will work
const ws = fs.createWriteStream(tmpFilePath);
request.pipe(ws);
To control the integrity of the data, you can add md5 or sha hash to the request headers and, on the server, duplicate the data stream into the object created by crypto.createHash(). In case of a hash mismatch, the file can be uploaded again.

Related

Javasctipt, Chunks and NodeJs

I need to upload a file on the Postgres database using the nodeJs server. On frontend (vueJs) I have <input ref="file_upload" type="file" multiple="true" #change="changeFile" > element where I pick files. After I select the wanted file I convert it to a base64 string with the following code:
var file_input = this.$refs.file_upload
var base64String
function changeFile() {
for(let i = 0; i < file_input.files.length; i++) {
var reader = new FileReader();
reader.onloadend = () => {
base64String = reader.result
.replace('data:', '')
.replace(/^.+,/, '');
console.log(base64String)
console.log("SIZE: " + base64String.length)
}
reader.readAsDataURL(file_input.files[i]);
}
}
file_input.addEventListener('change', changeFile);
After I convert it to a base64 string, on button click I create post request with this code:
btnSubmit.addEventListener("click", () => {
let dat_title = file_input.files[0].name;
let url_files = "http://localhost:3000/blobFile/" + dat_title + "/" + base64String
console.log("URL:\n" + url_files)
fetch(url_files, {
method: "POST"
})
.then(response => {
response.json().then(parsedJson => {
console.log(parsedJson);
})
})
})
And that's where problems start. If the size of the base64 string is less than 16kB, it will normally do a post request and will be inserted into the database table (column is bytea type, so before insert I decode base64 string). But, if the size of the base64 string is more than 16kB, it shows an error that says how it failed to fetch. So I figured out that the URL is too big to fetch and I need to split it into chunks. And my question is how can I do that. How can I split that base64 string into chunks and receive those chunks on the nodeJs server? I've tried millions of solutions but nothing worked. If you know how to tackle this problem please write it down. Under is nodeJs server configuration:
app.js
require('dotenv').config();
var express = require('express');
var cors = require('cors');
var app = express();
const pool = require('./dbConnect');
const port = 3000;
app.use(cors());
app.post("/blobFile/:title/:url(*)", pool.postBlobFile)
app.listen(port, () => {
var host = "localhost";
console.log(`Server listening on port http://%s:%s`, host, port);
})
dbConnect.js
const postBlobFile = (req, res) => {
const dat_title = req.params.title
var base64String = req.params.url
console.log("TITLE: " + dat_title)
console.log("STRING: " + base64String)
console.log("STRING_SIZE: " + base64String.length)
pool.query(`insert into test_blob (dat_naziv, dat_blob)
values ('${dat_title}', decode('${base64String}', 'base64'))`,
(err, results) => {
if (err) console.log(err);
else{
res.json(results.rows)
}
})
}
module.exports = {
pool,
postBlobFile,
}
THANK'S IN ADVANCE
POST is for a reason. you are using GET, POST is just sitting useless in your code
There are 2 Problems which I am seeing
I don't know what you are trying to do. but do note that there is a URL length limit. and you are trying to exploit it and that's why you are getting this error. I don't understand why you are using POST if you won't just want to use bas64 in the URL
It is a best practice that you don't use Postgres for blob or byte type of things. just a suggestion. use something like s3 or spaces.
btnSubmit.addEventListener("click", () => {
let dat_title = file_input.files[0].name;
let url_files = "http://localhost:3000/blobFile/"
console.log("URL:\n" + url_files)
fetch(url_files, {
method: "POST",
'data | body': {'**Your data over here**'}
})
.then(response => {
response.json().then(parsedJson => {
console.log(parsedJson);
})
})
})

converting data url to a format that I can use to send to my backend web api and store in database

I am currently getting drag and drop / uploaded images as a data url and displaying them with that url.
What I am now trying to do is send those uploaded images to my backend web api using ASP.Net core to store then in a sqlite database this is a requirement for my application.
Currently I am converting the data url to an arraybuffer using the following code.
async srcToFile(context, asset) {
const files = asset[0].files.fileList;
let results = [];
for (let i = 0; i < files.length; i++) {
const file = files[i];
const data = file.data;
const name = file.name;
const mimeType = file.type;
await fetch(data)
.then(function(res) {
const r = res.arrayBuffer();
console.warn('resource ', r);
return r;
})
.then(function(buf) {
console.warn('buffer: ', [buf]);
const fileData = {data:[buf], name:name, type:mimeType};
results.push(fileData);
console.warn('results of file: ', fileData);
});
}
console.warn(results);
return results;
}
then I put it in an data object to send to my server via axios this is what that data object looks like
const data = {
Name: asset[0].name,
Detail: asset[0].detail,
Files: asset[0].files.fileList
};
When I console out the Files it shows there is Arraybuffer data in it. But when I send it to my server it looks like that data is stripped out of the header call. Cause when I look at the header I no longer have that data in there and I cannot figure out why that is happening.
this is my axios call.
axios.post('https://localhost:5001/api/Assets', data)
.then(res => console.log(res))
.catch(error => console.log(error));
and my back end web api post controller
public async Task<ActionResult> PostAsset([FromBody] AssetSaveRequest request,[FromForm] List<IFormFile> files)
{
foreach (var file in files)
{
if (file.Length > 0)
{
using (var ms = new MemoryStream())
{
file.CopyTo(ms);
var fileBytes = ms.ToArray();
string s = Convert.ToBase64String(fileBytes);
// act on the Base64 data
}
}
}
var assetCreationDto = new AssetCreationDto(request);
//var assetCreationDto = "";
try
{
var asset = _mapper.Map<Asset>(assetCreationDto);
_context.Assets.Add(asset);
//await _context.SaveChangesAsync();
var assetDto = _mapper.Map<AssetDto>(asset);
return CreatedAtAction("GetAsset", new {assetDto.Id}, assetDto);
}
catch (DbUpdateException dbe)
{
var errorCode = ((Microsoft.Data.Sqlite.SqliteException) dbe.InnerException).SqliteErrorCode;
switch (errorCode)
{
case 19:
Console.WriteLine(((Microsoft.Data.Sqlite.SqliteException)dbe.InnerException).Message);
break;
default:
Console.WriteLine("Something went wrong");
break;
}
}
catch (Exception e)
{
Console.WriteLine(e);
throw;
}
return null;
}
That of which I don't know is working because I never get the file data, I do how ever get the Name and the details which come in fine.
I am looking for advice on what I should do here to get this to work. I have tried converting the arraybuffer to base64 string but that does not come out right any help and suggestions would be great to get me back on track with this project .
UPDATE:
I have modified my srcToFile code to give me a file, now I am using axios to send the file and data to the backend working with one file at this time and all im getting in the header now is [object object]. I've tried JSON.stringify on my data like so
const data = JSON.stringify({
Name: asset[0].name,
Detail: asset[0].detail,
Files: asset[0].files.fileList
});
It stringify's the name and detail but wipes out the file and I get nothing on the backend.
I have tested with postman and made several successful posts. but I can't seem to get the correct data from my Vue front end.
that is where I am at now. any suggestions always helps

Handling chunks of Data from Node/Express Backend using React/Fetch

I created an Express Backend which sends JSON data as a chunk of text using res.write(JSON.stringify(JSONChunk)).
I want to handle and process each chunck of res.write in react front end and am using the following method:
My Backend pseudo code
for(let i = 0; i < slices; i ++) {
var JSONChunck = await getData(i); // getData operation can take some time
res.write(JSON.stringify(JSONChunck));
}
res.end();
FE pseudocode:
fetch(API, OPTS).then(async (response) => {
const reader = response.body.getReader();
while (true) {
const { done, value } = await reader.read();
if (done) {
break;
}
var text = new TextDecoder("utf-8").decode(value);
var result = JSON.parse(text);
var processedResult = process(result);
}
})
However, when I try the above code in some systems, I receive an error when trying to do JSON.parse(text) and see that the value of 'text' does not fetch full JSON string and only a partial string.
I was wondering if I am doing anything wrong or if there is any better way to do the above.
Any help/suggestions appreciated.
Thank you!

Attaching base64 encoded file nodejs

I am trying to send a soap request with an attachment. Everything works fine except that the attachment i send is always of zero bytes. The soap server accepts a Base64 encoded file and i had achieved to do it in Java using the code
OutputStream outputStream = new ByteArrayOutputStream()
outputStream.writeTo(fileOutputStream);
Base64.encode(outputStream.toByteArray())//argument passed to the function which sends this to the SOAP API
I want to replicate the same with node but i am unable to do so. Below is the function i am using to achieve this. I am reading some files from the client and trying to send it to the SOAP API. I have marked the place in the code responsible to read and append the data the rest is just for reference.
function createSoapEntryWithAtt(req,response){
var form = new formidable.IncomingForm();
form.parse(req, function (err, fields, files) {
let filesArr = []
for(objkeys in files){
filesArr.push(files[objkeys])
}
return Promise.all(filesArr.map(item => {
return new Promise((res,rej) => {
var oldpath = item.path;
var newpath = 'C:/user/' + item.name;
**var data = fs.readFileSync(oldpath).toString('base64');
let result = []
for (var i = 0; i < data.length; i += 2)// trying to create a 64bit byte array
result.push('0x' + data[i] + '' + data[i + 1])**
console.log(result)
if(data)
res({ [`${item.name}`]: result })
rej("Error occured")
})
})).then(data => {
let url = config.url
var credentials = {
AuthenticationInfo: {
userName: "user",
password: "passwd"
}
}
let args = {
Notes: "Testing From Node App",
}
let count = 0
for (index in data) {
if (count <= 3) {
**for(keys in data[index]){
//console.log(data[index][keys])
args[`Attachment${++count}_Name`] = keys
args[`Attachment${++count}_Data`] = data[index][keys]//Attaching the file read
}
}**
}
soap.createClient(url, function (err, client) {
client.addSoapHeader(credentials)
client.CreateWorkInfo(args, function (err, res) {
if (err) {
console.log("Error is ----->" + err)
} else {
console.log("Response is -----> " + res)
response.end();
}
})
})
})
});
}
Please ignore this question .... and thanks and sorry if anyone wasted time on this question. The error was a careless mistake from my side in the line args["Attachment${++count}_Name"] = keys
args["Attachment${++count}_Data"] = data[index][keys]. Here as i am incrementing the count in both lines there is a mismatch in the sense that Attachment name will be 1 and then in the second line Attachment data will be 02 and hence the name does not contain any data.

Converting byte array output into Blob corrupts file

I am using the Office Javascript API to write an Add-in for Word using Angular.
I want to retrieve the Word document through the API, then convert it to a file and upload it via POST to a server.
The code I am using is nearly identical to the documentation code that Microsoft provides for this use case: https://dev.office.com/reference/add-ins/shared/document.getfileasync#example---get-a-document-in-office-open-xml-compressed-format
The server endpoint requires uploads to be POSTed through a multipart form, so I create a FormData object on which I append the file (a blob) as well as some metadata, when creating the $http call.
The file is being transmitted to the server, but when I open it, it has become corrupted and it can no longer be opened by Word.
According to the documentation, the Office.context.document.getFileAsync function returns a byte array. However, the resulting fileContent variable is a string. When I console.log this string it seems to be compressed data, like it should be.
My guess is I need to do some preprocessing before turning the string into a Blob. But which preprocessing? Base64 encoding through atob doesn't seem to be doing anything.
let sendFile = ( fileContent ) => {
let blob = new Blob([fileContent], { type: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' }),
fd = new FormData();
blob.lastModifiedDate = new Date();
fd.append('file', blob, 'uploaded_file_test403.docx');
fd.append('case_id', caseIdReducer.data());
$http.post('/file/create', fd, {
transformRequest: angular.identity,
headers: { 'Content-Type': undefined }
})
.success( ( ) => {
console.log('upload succeeded');
})
.error(( ) => {
console.log('upload failed');
});
};
function onGotAllSlices(docdataSlices) {
let docdata = [];
for (let i = 0; i < docdataSlices.length; i++) {
docdata = docdata.concat(docdataSlices[i]);
}
let fileContent = new String();
for (let j = 0; j < docdata.length; j++) {
fileContent += String.fromCharCode(docdata[j]);
}
// Now all the file content is stored in 'fileContent' variable,
// you can do something with it, such as print, fax...
sendFile(fileContent);
}
function getSliceAsync(file, nextSlice, sliceCount, gotAllSlices, docdataSlices, slicesReceived) {
file.getSliceAsync(nextSlice, (sliceResult) => {
if (sliceResult.status === 'succeeded') {
if (!gotAllSlices) { // Failed to get all slices, no need to continue.
return;
}
// Got one slice, store it in a temporary array.
// (Or you can do something else, such as
// send it to a third-party server.)
docdataSlices[sliceResult.value.index] = sliceResult.value.data;
if (++slicesReceived === sliceCount) {
// All slices have been received.
file.closeAsync();
onGotAllSlices(docdataSlices);
} else {
getSliceAsync(file, ++nextSlice, sliceCount, gotAllSlices, docdataSlices, slicesReceived);
}
} else {
gotAllSlices = false;
file.closeAsync();
console.log(`getSliceAsync Error: ${sliceResult.error.message}`);
}
});
}
// User clicks button to start document retrieval from Word and uploading to server process
ctrl.handleClick = ( ) => {
Office.context.document.getFileAsync(Office.FileType.Compressed, { sliceSize: 65536 /*64 KB*/ },
(result) => {
if (result.status === 'succeeded') {
// If the getFileAsync call succeeded, then
// result.value will return a valid File Object.
let myFile = result.value,
sliceCount = myFile.sliceCount,
slicesReceived = 0, gotAllSlices = true, docdataSlices = [];
// Get the file slices.
getSliceAsync(myFile, 0, sliceCount, gotAllSlices, docdataSlices, slicesReceived);
} else {
console.log(`Error: ${result.error.message}`);
}
}
);
};
I ended up doing this with the fileContent string:
let bytes = new Uint8Array(fileContent.length);
for (let i = 0; i < bytes.length; i++) {
bytes[i] = fileContent.charCodeAt(i);
}
I then proceed to build the Blob with these bytes:
let blob = new Blob([bytes], { type: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' });
If I then send this via a POST request, the file isn't mangled and can be opened correctly by Word.
I still get the feeling this can be achieved with less hassle / less steps. If anyone has a better solution, I'd be very interested to learn.
thx for your answer, Uint8Array was the solution. Just a little improvement, to avoid creating the string:
let bytes = new Uint8Array(docdata.length);
for (var i = 0; i < docdata.length; i++) {
bytes[i] = docdata[i];
}
Pff! what is wrong with a getting a instance of File and not using FileReader api? c'mon Microsoft!
You should take the byte array and throw it into the blob constructor, turning a binary blob to string in javascript is a bad idea that can lead to "out of range" error or incorrect encoding
just do something along with this
var byteArray = new Uint8Array(3)
byteArray[0] = 97
byteArray[1] = 98
byteArray[2] = 99
new Blob([byteArray])
if the chunk is an instance of a typed arrays or a instance of blob/file. in that case you can just do:
blob = new Blob([blob, chunk])
And please... don't base64 encode it (~3x larger + slower)

Categories

Resources