Javasctipt, Chunks and NodeJs - javascript

I need to upload a file on the Postgres database using the nodeJs server. On frontend (vueJs) I have <input ref="file_upload" type="file" multiple="true" #change="changeFile" > element where I pick files. After I select the wanted file I convert it to a base64 string with the following code:
var file_input = this.$refs.file_upload
var base64String
function changeFile() {
for(let i = 0; i < file_input.files.length; i++) {
var reader = new FileReader();
reader.onloadend = () => {
base64String = reader.result
.replace('data:', '')
.replace(/^.+,/, '');
console.log(base64String)
console.log("SIZE: " + base64String.length)
}
reader.readAsDataURL(file_input.files[i]);
}
}
file_input.addEventListener('change', changeFile);
After I convert it to a base64 string, on button click I create post request with this code:
btnSubmit.addEventListener("click", () => {
let dat_title = file_input.files[0].name;
let url_files = "http://localhost:3000/blobFile/" + dat_title + "/" + base64String
console.log("URL:\n" + url_files)
fetch(url_files, {
method: "POST"
})
.then(response => {
response.json().then(parsedJson => {
console.log(parsedJson);
})
})
})
And that's where problems start. If the size of the base64 string is less than 16kB, it will normally do a post request and will be inserted into the database table (column is bytea type, so before insert I decode base64 string). But, if the size of the base64 string is more than 16kB, it shows an error that says how it failed to fetch. So I figured out that the URL is too big to fetch and I need to split it into chunks. And my question is how can I do that. How can I split that base64 string into chunks and receive those chunks on the nodeJs server? I've tried millions of solutions but nothing worked. If you know how to tackle this problem please write it down. Under is nodeJs server configuration:
app.js
require('dotenv').config();
var express = require('express');
var cors = require('cors');
var app = express();
const pool = require('./dbConnect');
const port = 3000;
app.use(cors());
app.post("/blobFile/:title/:url(*)", pool.postBlobFile)
app.listen(port, () => {
var host = "localhost";
console.log(`Server listening on port http://%s:%s`, host, port);
})
dbConnect.js
const postBlobFile = (req, res) => {
const dat_title = req.params.title
var base64String = req.params.url
console.log("TITLE: " + dat_title)
console.log("STRING: " + base64String)
console.log("STRING_SIZE: " + base64String.length)
pool.query(`insert into test_blob (dat_naziv, dat_blob)
values ('${dat_title}', decode('${base64String}', 'base64'))`,
(err, results) => {
if (err) console.log(err);
else{
res.json(results.rows)
}
})
}
module.exports = {
pool,
postBlobFile,
}
THANK'S IN ADVANCE

POST is for a reason. you are using GET, POST is just sitting useless in your code
There are 2 Problems which I am seeing
I don't know what you are trying to do. but do note that there is a URL length limit. and you are trying to exploit it and that's why you are getting this error. I don't understand why you are using POST if you won't just want to use bas64 in the URL
It is a best practice that you don't use Postgres for blob or byte type of things. just a suggestion. use something like s3 or spaces.
btnSubmit.addEventListener("click", () => {
let dat_title = file_input.files[0].name;
let url_files = "http://localhost:3000/blobFile/"
console.log("URL:\n" + url_files)
fetch(url_files, {
method: "POST",
'data | body': {'**Your data over here**'}
})
.then(response => {
response.json().then(parsedJson => {
console.log(parsedJson);
})
})
})

Related

PDF Not Working After Uploading To Node Server

I am trying to upload a pdf from the frontend to my node server. The PDF successfully uploads on the node server but when I go to open it, I am unable to. Instead, I see a message that says "File cant be opened. Something went wrong." Why is this happening?
Also please dont suggest third party pdf uploaders like multer, etc. I am aware of these third party libraries but I just want pure node. Thank you so much.
Frontend code:
const uploadFile = document.getElementById("uploadFile");
uploadFile.addEventListener("change", (event) => {
readFile(event.target.files[0]);
});
function readFile(file) {
const uploadDesignPDF = `http://localhost:7000/api/upload/design`;
let fileReader = new FileReader();
fileReader.readAsDataURL(file);
fileReader.addEventListener("load", async (event) => {
let pdfStrChunk = event.target.result.replace(
/^data:application\/[a-z]+;base64,/,
""
);
let fileSize = file.size;
const chunk = 85000;
let numOfChunkSet = Math.ceil(fileSize / chunk);
let remainingChunk = fileSize;
let currentChunk = 0;
let chunkSet = [];
let range = {};
let data = {};
for (let i = 0; i < numOfChunkSet; i++) {
remainingChunk -= chunk;
if (remainingChunk < 0) {
remainingChunk += chunk;
chunkSet.push(remainingChunk);
range.start = currentChunk;
range.end = currentChunk + chunk;
currentChunk += remainingChunk;
} else {
chunkSet.push(chunk);
range.start = currentChunk;
range.end = (i + 1) * chunkSet[i];
currentChunk += chunk;
}
const chunkRead = pdfStrChunk.slice(range.start, range.end);
data.dataPDF = chunkRead;
let response = await fetch(uploadDesignPDF, {
method: "POST",
body: JSON.stringify(data),
headers: {
"Content-Type": "application/json",
},
responseType: "arrayBuffer",
responseEncoding: "binary",
});
let results = await response.json();
console.log(results);
}
});
}
Backend route:
const { uploadDesigns } = require("./upload.designs.controller.js");
const router = require("express").Router();
router.post("/upload/design", uploadDesigns);
Backend:
uploadDesigns: async (req, res) => {
try {
fs.writeFileSync(`./designs/testingPDF6.pdf`, req.body.dataPDF, "base64");
res.status(200).json({
message: "done with chunk",
});
} catch (error) {
res.status(500).json({
message: "Something went wrong. Please refresh page.",
});
}
}
You are working with base64-URL in vain. It is much more effective to use ArrayBuffer. The main advantage of ArrayBuffer is the 1-byte unit, while base64 breaks the byte representation three out of four times.
Instead of sending the file in chunks, I would suggest tracking progress through XMLHttpRequest.upload.onprogress(). I would only use chunks if the upload is through a WebSocket.
If the PDF file is the only information sent to the server, I'd prefer to send the file directly without any field names or other FormData information provided. In that case, it would be appropriate to change the POST method to PUT.
If you prefer to send the file directly, it would be ideal to use fs.createWriteStream() instead of fs.writeFileSync().
Then this approach will work
const ws = fs.createWriteStream(tmpFilePath);
request.pipe(ws);
To control the integrity of the data, you can add md5 or sha hash to the request headers and, on the server, duplicate the data stream into the object created by crypto.createHash(). In case of a hash mismatch, the file can be uploaded again.

NodeJS + ldapsj-client: problem saving thumbnailPhoto

Using the ldapsj-client module, I'm trying to save the thumbnailPhoto into a file
const auth = async () => {
const client = new LdapClient({ url: 'myaddomain' })
await client.bind('someemail#domain.com.br', 'passwaord')
const opts = {
filter: `(sAMAccountName=credential)`,
scope: "sub"
}
const s = await client.search(myBaseDN, opts)
console.log('thumbnailPhoto', s[0].thumbnailPhoto)
}
The console.log() outputs something like '����JFIF��C...'
I cannot figure out how to save this binary into a file. When I try several approaches, as explained here, does not work. It seems the data from AD is not in the same "format".
I tried to convert it into a Buffer and then, to base64
const buffer = Buffer.from(s[0].thumbnailPhoto, 'binary')
var src = "data:image/png;base64," + Buffer.from(s[0].thumbnailPhoto).toString('base64')
But the output is not a valid base64.

Attaching base64 encoded file nodejs

I am trying to send a soap request with an attachment. Everything works fine except that the attachment i send is always of zero bytes. The soap server accepts a Base64 encoded file and i had achieved to do it in Java using the code
OutputStream outputStream = new ByteArrayOutputStream()
outputStream.writeTo(fileOutputStream);
Base64.encode(outputStream.toByteArray())//argument passed to the function which sends this to the SOAP API
I want to replicate the same with node but i am unable to do so. Below is the function i am using to achieve this. I am reading some files from the client and trying to send it to the SOAP API. I have marked the place in the code responsible to read and append the data the rest is just for reference.
function createSoapEntryWithAtt(req,response){
var form = new formidable.IncomingForm();
form.parse(req, function (err, fields, files) {
let filesArr = []
for(objkeys in files){
filesArr.push(files[objkeys])
}
return Promise.all(filesArr.map(item => {
return new Promise((res,rej) => {
var oldpath = item.path;
var newpath = 'C:/user/' + item.name;
**var data = fs.readFileSync(oldpath).toString('base64');
let result = []
for (var i = 0; i < data.length; i += 2)// trying to create a 64bit byte array
result.push('0x' + data[i] + '' + data[i + 1])**
console.log(result)
if(data)
res({ [`${item.name}`]: result })
rej("Error occured")
})
})).then(data => {
let url = config.url
var credentials = {
AuthenticationInfo: {
userName: "user",
password: "passwd"
}
}
let args = {
Notes: "Testing From Node App",
}
let count = 0
for (index in data) {
if (count <= 3) {
**for(keys in data[index]){
//console.log(data[index][keys])
args[`Attachment${++count}_Name`] = keys
args[`Attachment${++count}_Data`] = data[index][keys]//Attaching the file read
}
}**
}
soap.createClient(url, function (err, client) {
client.addSoapHeader(credentials)
client.CreateWorkInfo(args, function (err, res) {
if (err) {
console.log("Error is ----->" + err)
} else {
console.log("Response is -----> " + res)
response.end();
}
})
})
})
});
}
Please ignore this question .... and thanks and sorry if anyone wasted time on this question. The error was a careless mistake from my side in the line args["Attachment${++count}_Name"] = keys
args["Attachment${++count}_Data"] = data[index][keys]. Here as i am incrementing the count in both lines there is a mismatch in the sense that Attachment name will be 1 and then in the second line Attachment data will be 02 and hence the name does not contain any data.

How to save the downloadable file link in zombie.js

I am scrapping a website using node.js and zombie.js. I am facing an issue where in a file I have an anchor which holds the link to download a pdf file.
If I click it using browser.clickLink() function, the result that I get in console is beyond my understanding. Is there a way to save this pdf file and have its link like in php? I want to save it for further processing. Here is my test js code
var http = require('http');
var browser = require('zombie');
var assert = require('assert');
const hostname = '127.0.0.1';
const port = 3000;
const server = http.createServer((req, res) => {
res.statusCode = 200;
//res.setHeader('Content-Type', 'text/plain');
//res.end('Hello World\n');
});
server.listen(port, hostname, () => {
console.log(`Server running at http://${hostname}:${port}/`);
});
var url = 'http://localhost/Node/HM_LandRegistry/downloadPdf.html'
browser.visit(url, function(error,browser) {
//browser.dump();
//console.log('browser.text (".link")', browser.text(".link"));
browser.clickLink("a.link");
browser.wait().then(function(){
console.log(browser.text());
browser.dump();
});
});
Here is something I found on google groups. It has solved my problem.
function getLinks(browser) {
var links = browser.querySelectorAll('.link');
return Array.prototype.map.call(links, function(e) {
return e.getAttribute('href'); // returns an array. Use .toString() to get string only
});
}
Save the link

Writing an image to file, received over an HTTP request in Node

I'm certain I'm missing something obvious, but the gist of the problem is I'm receiving a PNG from a Mapbox call with the intent of writing it to the file system and serving it to the client. I've successfully relayed the call, received a response of raw data and written a file. The problem is that my file ends up truncated no matter what path I take, and I've exhausted the answers I've found skirting the subject. I've dumped the raw response to the log, and it's robust, but any file I make tends to be about a chunk's worth of unreadable data.
Here's the code I've got at present for the file making. I tried this buffer move as a last ditch after several failed and comparably fruitless iterations. Any help would be greatly appreciated.
module.exports = function(req, res, cb) {
var cartography = function() {
return https.get({
hostname: 'api.mapbox.com',
path: '/v4/mapbox.wheatpaste/' + req.body[0] + ',' + req.body[1] + ',6/750x350.png?access_token=' + process.env.MAPBOX_API
}, function(res) {
var body = '';
res.on('data', function(chunk) {
body += chunk;
});
res.on('end', function() {
var mapPath = 'map' + req.body[0] + req.body[1] + '.png';
var map = new Buffer(body, 'base64');
fs.writeFile(__dirname + '/client/images/maps/' + mapPath, map, 'base64', function(err) {
if (err) throw err;
cb(mapPath);
})
})
});
};
cartography();
};
It is possible to rewrite your code in more compact subroutine:
const fs = require('fs');
const https = require('https');
https.get(url, (response)=> { //request itself
if(response) {
let imageName = 'image.png'; // for this purpose I usually use crypto
response.pipe( //pipe response to a write stream (file)
fs.createWriteStream( //create write stream
'./public/' + imageName //create a file with name image.png
)
);
return imageName; //if public folder is set as default in app.js
} else {
return false;
}
})
You could get original name and extension from url, but it safer to generate a new name with crypto and get file extension like i said from url or with read-chunk and file-type modules.

Categories

Resources