I am trying to send a soap request with an attachment. Everything works fine except that the attachment i send is always of zero bytes. The soap server accepts a Base64 encoded file and i had achieved to do it in Java using the code
OutputStream outputStream = new ByteArrayOutputStream()
outputStream.writeTo(fileOutputStream);
Base64.encode(outputStream.toByteArray())//argument passed to the function which sends this to the SOAP API
I want to replicate the same with node but i am unable to do so. Below is the function i am using to achieve this. I am reading some files from the client and trying to send it to the SOAP API. I have marked the place in the code responsible to read and append the data the rest is just for reference.
function createSoapEntryWithAtt(req,response){
var form = new formidable.IncomingForm();
form.parse(req, function (err, fields, files) {
let filesArr = []
for(objkeys in files){
filesArr.push(files[objkeys])
}
return Promise.all(filesArr.map(item => {
return new Promise((res,rej) => {
var oldpath = item.path;
var newpath = 'C:/user/' + item.name;
**var data = fs.readFileSync(oldpath).toString('base64');
let result = []
for (var i = 0; i < data.length; i += 2)// trying to create a 64bit byte array
result.push('0x' + data[i] + '' + data[i + 1])**
console.log(result)
if(data)
res({ [`${item.name}`]: result })
rej("Error occured")
})
})).then(data => {
let url = config.url
var credentials = {
AuthenticationInfo: {
userName: "user",
password: "passwd"
}
}
let args = {
Notes: "Testing From Node App",
}
let count = 0
for (index in data) {
if (count <= 3) {
**for(keys in data[index]){
//console.log(data[index][keys])
args[`Attachment${++count}_Name`] = keys
args[`Attachment${++count}_Data`] = data[index][keys]//Attaching the file read
}
}**
}
soap.createClient(url, function (err, client) {
client.addSoapHeader(credentials)
client.CreateWorkInfo(args, function (err, res) {
if (err) {
console.log("Error is ----->" + err)
} else {
console.log("Response is -----> " + res)
response.end();
}
})
})
})
});
}
Please ignore this question .... and thanks and sorry if anyone wasted time on this question. The error was a careless mistake from my side in the line args["Attachment${++count}_Name"] = keys
args["Attachment${++count}_Data"] = data[index][keys]. Here as i am incrementing the count in both lines there is a mismatch in the sense that Attachment name will be 1 and then in the second line Attachment data will be 02 and hence the name does not contain any data.
Related
I am trying to upload a pdf from the frontend to my node server. The PDF successfully uploads on the node server but when I go to open it, I am unable to. Instead, I see a message that says "File cant be opened. Something went wrong." Why is this happening?
Also please dont suggest third party pdf uploaders like multer, etc. I am aware of these third party libraries but I just want pure node. Thank you so much.
Frontend code:
const uploadFile = document.getElementById("uploadFile");
uploadFile.addEventListener("change", (event) => {
readFile(event.target.files[0]);
});
function readFile(file) {
const uploadDesignPDF = `http://localhost:7000/api/upload/design`;
let fileReader = new FileReader();
fileReader.readAsDataURL(file);
fileReader.addEventListener("load", async (event) => {
let pdfStrChunk = event.target.result.replace(
/^data:application\/[a-z]+;base64,/,
""
);
let fileSize = file.size;
const chunk = 85000;
let numOfChunkSet = Math.ceil(fileSize / chunk);
let remainingChunk = fileSize;
let currentChunk = 0;
let chunkSet = [];
let range = {};
let data = {};
for (let i = 0; i < numOfChunkSet; i++) {
remainingChunk -= chunk;
if (remainingChunk < 0) {
remainingChunk += chunk;
chunkSet.push(remainingChunk);
range.start = currentChunk;
range.end = currentChunk + chunk;
currentChunk += remainingChunk;
} else {
chunkSet.push(chunk);
range.start = currentChunk;
range.end = (i + 1) * chunkSet[i];
currentChunk += chunk;
}
const chunkRead = pdfStrChunk.slice(range.start, range.end);
data.dataPDF = chunkRead;
let response = await fetch(uploadDesignPDF, {
method: "POST",
body: JSON.stringify(data),
headers: {
"Content-Type": "application/json",
},
responseType: "arrayBuffer",
responseEncoding: "binary",
});
let results = await response.json();
console.log(results);
}
});
}
Backend route:
const { uploadDesigns } = require("./upload.designs.controller.js");
const router = require("express").Router();
router.post("/upload/design", uploadDesigns);
Backend:
uploadDesigns: async (req, res) => {
try {
fs.writeFileSync(`./designs/testingPDF6.pdf`, req.body.dataPDF, "base64");
res.status(200).json({
message: "done with chunk",
});
} catch (error) {
res.status(500).json({
message: "Something went wrong. Please refresh page.",
});
}
}
You are working with base64-URL in vain. It is much more effective to use ArrayBuffer. The main advantage of ArrayBuffer is the 1-byte unit, while base64 breaks the byte representation three out of four times.
Instead of sending the file in chunks, I would suggest tracking progress through XMLHttpRequest.upload.onprogress(). I would only use chunks if the upload is through a WebSocket.
If the PDF file is the only information sent to the server, I'd prefer to send the file directly without any field names or other FormData information provided. In that case, it would be appropriate to change the POST method to PUT.
If you prefer to send the file directly, it would be ideal to use fs.createWriteStream() instead of fs.writeFileSync().
Then this approach will work
const ws = fs.createWriteStream(tmpFilePath);
request.pipe(ws);
To control the integrity of the data, you can add md5 or sha hash to the request headers and, on the server, duplicate the data stream into the object created by crypto.createHash(). In case of a hash mismatch, the file can be uploaded again.
I need to upload a file on the Postgres database using the nodeJs server. On frontend (vueJs) I have <input ref="file_upload" type="file" multiple="true" #change="changeFile" > element where I pick files. After I select the wanted file I convert it to a base64 string with the following code:
var file_input = this.$refs.file_upload
var base64String
function changeFile() {
for(let i = 0; i < file_input.files.length; i++) {
var reader = new FileReader();
reader.onloadend = () => {
base64String = reader.result
.replace('data:', '')
.replace(/^.+,/, '');
console.log(base64String)
console.log("SIZE: " + base64String.length)
}
reader.readAsDataURL(file_input.files[i]);
}
}
file_input.addEventListener('change', changeFile);
After I convert it to a base64 string, on button click I create post request with this code:
btnSubmit.addEventListener("click", () => {
let dat_title = file_input.files[0].name;
let url_files = "http://localhost:3000/blobFile/" + dat_title + "/" + base64String
console.log("URL:\n" + url_files)
fetch(url_files, {
method: "POST"
})
.then(response => {
response.json().then(parsedJson => {
console.log(parsedJson);
})
})
})
And that's where problems start. If the size of the base64 string is less than 16kB, it will normally do a post request and will be inserted into the database table (column is bytea type, so before insert I decode base64 string). But, if the size of the base64 string is more than 16kB, it shows an error that says how it failed to fetch. So I figured out that the URL is too big to fetch and I need to split it into chunks. And my question is how can I do that. How can I split that base64 string into chunks and receive those chunks on the nodeJs server? I've tried millions of solutions but nothing worked. If you know how to tackle this problem please write it down. Under is nodeJs server configuration:
app.js
require('dotenv').config();
var express = require('express');
var cors = require('cors');
var app = express();
const pool = require('./dbConnect');
const port = 3000;
app.use(cors());
app.post("/blobFile/:title/:url(*)", pool.postBlobFile)
app.listen(port, () => {
var host = "localhost";
console.log(`Server listening on port http://%s:%s`, host, port);
})
dbConnect.js
const postBlobFile = (req, res) => {
const dat_title = req.params.title
var base64String = req.params.url
console.log("TITLE: " + dat_title)
console.log("STRING: " + base64String)
console.log("STRING_SIZE: " + base64String.length)
pool.query(`insert into test_blob (dat_naziv, dat_blob)
values ('${dat_title}', decode('${base64String}', 'base64'))`,
(err, results) => {
if (err) console.log(err);
else{
res.json(results.rows)
}
})
}
module.exports = {
pool,
postBlobFile,
}
THANK'S IN ADVANCE
POST is for a reason. you are using GET, POST is just sitting useless in your code
There are 2 Problems which I am seeing
I don't know what you are trying to do. but do note that there is a URL length limit. and you are trying to exploit it and that's why you are getting this error. I don't understand why you are using POST if you won't just want to use bas64 in the URL
It is a best practice that you don't use Postgres for blob or byte type of things. just a suggestion. use something like s3 or spaces.
btnSubmit.addEventListener("click", () => {
let dat_title = file_input.files[0].name;
let url_files = "http://localhost:3000/blobFile/"
console.log("URL:\n" + url_files)
fetch(url_files, {
method: "POST",
'data | body': {'**Your data over here**'}
})
.then(response => {
response.json().then(parsedJson => {
console.log(parsedJson);
})
})
})
I'm writing a telegram bot to report fail2ban bans. It's very simple and dirty, written hastily, but it can be used to report any message to a single telegram user:
var TelegramBot = require('node-telegram-bot-api');
var fs = require('fs');
var store = {
get: function (key) {
return fs.readFileSync(__dirname + '/' + key, { encoding: 'utf-8' });
},
set: function (key, value) {
fs.writeFileSync(__dirname + '/' + key, value, { encoding: 'utf-8' });
}
};
var token = store.get('token');
var args = process.argv.slice(2);
if (args.length == 0) {
console.error('No mode specified');
process.exit(0);
}
TelegramBot.prototype.unregisterText = function (regexp) {
for (var i = 0; i < bot.textRegexpCallbacks.length; ++i) {
if (bot.textRegexpCallbacks[i].regexp.toString() == regexp) {
bot.textRegexpCallbacks.splice(i, 1);
return;
}
}
};
fs.appendFileSync(__dirname + '/logs',
'[' + (new Date().toISOString().replace(/T/, ' ').replace(/\..+/, '')) + '] '
+ args.join(' ') + '\n',
{ encoding: 'utf-8' });
switch (args[0]) {
case 'setup':
var bot = new TelegramBot(token, { polling: true });
var step = 'none';
bot.onText(/\/setup/, function (msg, match) {
var fromId = msg.from.id;
step = 'setup-started';
bot.sendMessage(fromId, 'Starting setup. Please enter the verification key.');
bot.onText(/(.+)/, function (msg, match) {
if (step == 'setup-started') {
var key = match[1];
var verification = store.get('key');
if (key == verification) {
store.set('owner', msg.from.id);
step = 'verified';
bot.sendMessage(msg.from.id, 'Correct. Setup complete.');
} else {
step = 'none';
bot.unregisterText(/(.+)/);
bot.sendMessage(msg.from.id, 'Wrong. Setup aborted.');
}
}
});
});
break;
case 'report':
var bot = new TelegramBot(token, { polling: false });
var owner = store.get('owner');
var subject = args[1];
if (subject == 'message') {
var message = args.slice(2).join(' ');
bot.sendMessage(owner, message);
} else if (subject == 'file') {
var content = fs.readFileSync(args[2], { encoding: 'utf-8' });
bot.sendMessage(owner, content);
}
break;
default:
console.error('Unrecognized mode', args[0]);
break;
}
On my developer machine it works fine. I invoke:
node bot.js report message whatever message i want
And I correctly received "whatever message i want" on telegram. However, once I gitted it on my digitalocean vps, it no longer worked. It turns out the problem is with the telegram library:
Unhandled rejection Error: Error parsing Telegram response: <!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Bots: An introduction for developers</title>
...
Which apparently returns an html page instead of json... I also tried to contact the same endpoint (api.telegram.org/bothash/sendMessage) with curl on my vps and it returned json (with an error message because i didnt send any parameters, but still json).
I cannot fathom why this happens. Any help?
It seems like either you don't have a file with token on your VPN or the token is incorrect.
You can check it by yourself:
When you make a request to api.telegram.org/{token}/sendMessage, and {token} is incorrect, it redirects you to this page, which responds with HTML you've mentioned in your question.
So you have to debug a behavior of your store.get and store.get functions along with files and tokens to make sure you are using a correct one.
Also, I'd recommend to run bot.getMe() before using any other Telegram API methods to ensure you specified a correct bot token.
Here is server.js
var express = require("express"),
http = require("http"),
mongoose = require( "mongoose" ),
app = express();
app.use(express.static(__dirname + "/client"));
app.use(express.urlencoded());
mongoose.connect('mongodb://localhost/PvdEnroll', function(err) {
if (err) {
console.log(err);
} else {
console.log('Connected to mongodb!');
}
});
var CheckBoxSchema = mongoose.Schema({
npi: String,
boxes:[ String]
});
var CheckBox = mongoose.model("CheckBox", CheckBoxSchema);
http.createServer(app).listen(3000);
// here's where we get something from the client.
app.get("/checkbox.json", function (req, res) {
CheckBox.find( {}, function(err, CheckBox) {
console.log("STUBB2", checkbox);
res.json(checkbox);
});
});
app.post("/checkbox", function (req, res)
console.log("POSTING TO DB: ",req.body);
var newCkBoxData = new npiChecks({"npi": req.body.npi, "boxes":req.boxes});
newCkBOxData.save(function(err, results) {
if (err !== null) {
console.log(err);
res.send("ERROR");
} else {
CheckBox.find({}, function(err, result) {
if (err !== null) {
// the element dir not get saved
res.send("ERROR");
}
res.json(result);
});
}
});
});
The client, secA.js, pertains to a single HTML page.
var main = function (checkBoxObjects) {
"use strict";
$.getJSON("../data/checkBoxesA.json", function(checkBoxTxt) {
checkBoxTxt.forEach(function (data) {
$(".checkbox-input").append("<input type='checkbox' unchecked/>");
$(".checkbox-input").append(' ' + data.label + "<br/>");
$(".checkbox-input").append(' ' + data.note + "<br/>");
$(".checkbox-input").append(' '+ "<br/>");
});
});
};
$(document).ready(main);
providerNPI_ckBs = [];
NPI_number = [];
var loopForm = function(form) {
for ( var i = 0; i < form.elements.length; i++) {
if (form.elements[i].type == 'checkbox')
if (form.elements[i].checked == true) {
providerNPI_ckBs += 1 + ' ';
} else {
providerNPI_ckBs += 0 + ' ';
}
}
if (providerNPI_ckBs.length > 0)
if (NPI_number.length > 0)
createJSONobj();
}
var getNPI = function() {
NPI_number = document.getElementById("text_field1").value;
if (NPI_number.length > 0)
if (providerNPI_ckBs.length > 0) {
createJSONobj();
}
}
var createJSONobj = function() {
var JSONobj = '{' + JSON.stringify(NPI_number) + ':' +
JSON.stringify(providerNPI_ckBs) + '}';
JSON.stringify(JSONobj);
console.log(JSONobj);
// here we'll do a quick post to our todos route
$.post("npi_checks", JSONobj, function (response) {
console.log("We posted and the server responded!");
console.log(response);
});
}
// Note: This is temporary as I'm only intending to sent JSON data one way
// to the server. I'd just like to verify that I can send data both ways
$(document).ready(function (checkBoxObjects) {
$.getJSON("checkbox.json", function (checkBoxObjects) {
console.log("Client Recieved Array from Server: ", checkBoxObjects);
main(checkBoxObjects);
});
});
The Chrome console responds immediately with GET http://127.0.0.1:3000/html/checkbox.json 404 (Not Found)
The page loads and will accept data which the secA.js script formats as JSON. The database has been started by the server. All I need to know is how to send the data over to the server!
I'm clearly new to javascript and producing this application is part of learning the language along with MongoDB. I've structured this application similarly to an example tutorial book. One difference is that in the tutorial the traffic is two ways between client and server.
Any help is appreciated!
If the first argument to post, on the client side, is changed from ""npi_checks" to "/checkbox" to match the first argument app.post the data gets to the server and is loaded into mongoldb. This is the simple solution.
I want to download a zip file from the internet and unzip it in memory without saving to a temporary file. How can I do this?
Here is what I tried:
var url = 'http://bdn-ak.bloomberg.com/precanned/Comdty_Calendar_Spread_Option_20120428.txt.zip';
var request = require('request'), fs = require('fs'), zlib = require('zlib');
request.get(url, function(err, res, file) {
if(err) throw err;
zlib.unzip(file, function(err, txt) {
if(err) throw err;
console.log(txt.toString()); //outputs nothing
});
});
[EDIT]
As, suggested, I tried using the adm-zip library and I still cannot make this work:
var ZipEntry = require('adm-zip/zipEntry');
request.get(url, function(err, res, zipFile) {
if(err) throw err;
var zip = new ZipEntry();
zip.setCompressedData(new Buffer(zipFile.toString('utf-8')));
var text = zip.getData();
console.log(text.toString()); // fails
});
You need a library that can handle buffers. The latest version of adm-zip will do:
npm install adm-zip
My solution uses the http.get method, since it returns Buffer chunks.
Code:
var file_url = 'http://notepad-plus-plus.org/repository/7.x/7.6/npp.7.6.bin.x64.zip';
var AdmZip = require('adm-zip');
var http = require('http');
http.get(file_url, function(res) {
var data = [], dataLen = 0;
res.on('data', function(chunk) {
data.push(chunk);
dataLen += chunk.length;
}).on('end', function() {
var buf = Buffer.alloc(dataLen);
for (var i = 0, len = data.length, pos = 0; i < len; i++) {
data[i].copy(buf, pos);
pos += data[i].length;
}
var zip = new AdmZip(buf);
var zipEntries = zip.getEntries();
console.log(zipEntries.length)
for (var i = 0; i < zipEntries.length; i++) {
if (zipEntries[i].entryName.match(/readme/))
console.log(zip.readAsText(zipEntries[i]));
}
});
});
The idea is to create an array of buffers and concatenate them into a new one at the end. This is due to the fact that buffers cannot be resized.
Update
This is a simpler solution that uses the request module to obtain the response in a buffer, by setting encoding: null in the options. It also follows redirects and resolves http/https automatically.
var file_url = 'https://github.com/mihaifm/linq/releases/download/3.1.1/linq.js-3.1.1.zip';
var AdmZip = require('adm-zip');
var request = require('request');
request.get({url: file_url, encoding: null}, (err, res, body) => {
var zip = new AdmZip(body);
var zipEntries = zip.getEntries();
console.log(zipEntries.length);
zipEntries.forEach((entry) => {
if (entry.entryName.match(/readme/i))
console.log(zip.readAsText(entry));
});
});
The body of the response is a buffer that can be passed directly to AdmZip, simplifying the whole process.
Sadly you can't pipe the response stream into the unzip job as node zlib lib allows you to do, you have to cache and wait the end of the response. I suggest you to pipe the response to a fs stream in case of big files, otherwise you will full fill your memory in a blink!
I don't completely understand what you are trying to do, but imho this is the best approach. You should keep your data in memory only the time you really need it, and then stream to the csv parser.
If you want to keep all your data in memory you can replace the csv parser method fromPath with from that takes a buffer instead and in getData return directly unzipped
You can use the AMDZip (as #mihai said) instead of node-zip, just pay attention because AMDZip is not yet published in npm so you need:
$ npm install git://github.com/cthackers/adm-zip.git
N.B. Assumption: the zip file contains only one file
var request = require('request'),
fs = require('fs'),
csv = require('csv')
NodeZip = require('node-zip')
function getData(tmpFolder, url, callback) {
var tempZipFilePath = tmpFolder + new Date().getTime() + Math.random()
var tempZipFileStream = fs.createWriteStream(tempZipFilePath)
request.get({
url: url,
encoding: null
}).on('end', function() {
fs.readFile(tempZipFilePath, 'base64', function (err, zipContent) {
var zip = new NodeZip(zipContent, { base64: true })
Object.keys(zip.files).forEach(function (filename) {
var tempFilePath = tmpFolder + new Date().getTime() + Math.random()
var unzipped = zip.files[filename].data
fs.writeFile(tempFilePath, unzipped, function (err) {
callback(err, tempFilePath)
})
})
})
}).pipe(tempZipFileStream)
}
getData('/tmp/', 'http://bdn-ak.bloomberg.com/precanned/Comdty_Calendar_Spread_Option_20120428.txt.zip', function (err, path) {
if (err) {
return console.error('error: %s' + err.message)
}
var metadata = []
csv().fromPath(path, {
delimiter: '|',
columns: true
}).transform(function (data){
// do things with your data
if (data.NAME[0] === '#') {
metadata.push(data.NAME)
} else {
return data
}
}).on('data', function (data, index) {
console.log('#%d %s', index, JSON.stringify(data, null, ' '))
}).on('end',function (count) {
console.log('Metadata: %s', JSON.stringify(metadata, null, ' '))
console.log('Number of lines: %d', count)
}).on('error', function (error) {
console.error('csv parsing error: %s', error.message)
})
})
If you're under MacOS or Linux, you can use the unzip command to unzip from stdin.
In this example I'm reading the zip file from the filesystem into a Buffer object but it works
with a downloaded file as well:
// Get a Buffer with the zip content
var fs = require("fs")
, zip = fs.readFileSync(__dirname + "/test.zip");
// Now the actual unzipping:
var spawn = require('child_process').spawn
, fileToExtract = "test.js"
// -p tells unzip to extract to stdout
, unzip = spawn("unzip", ["-p", "/dev/stdin", fileToExtract ])
;
// Write the Buffer to stdin
unzip.stdin.write(zip);
// Handle errors
unzip.stderr.on('data', function (data) {
console.log("There has been an error: ", data.toString("utf-8"));
});
// Handle the unzipped stdout
unzip.stdout.on('data', function (data) {
console.log("Unzipped file: ", data.toString("utf-8"));
});
unzip.stdin.end();
Which is actually just the node version of:
cat test.zip | unzip -p /dev/stdin test.js
EDIT: It's worth noting that this will not work if the input zip is too big to be read in one chunk from stdin. If you need to read bigger files, and your zip file contains only one file, you can use funzip instead of unzip:
var unzip = spawn("funzip");
If your zip file contains multiple files (and the file you want isn't the first one) I'm afraid to say you're out of luck. Unzip needs to seek in the .zip file since zip files are just a container, and unzip may just unzip the last file in it. In that case you have to save the file temporarily (node-temp comes in handy).
Two days ago the module node-zip has been released, which is a wrapper for the JavaScript only version of Zip: JSZip.
var NodeZip = require('node-zip')
, zip = new NodeZip(zipBuffer.toString("base64"), { base64: true })
, unzipped = zip.files["your-text-file.txt"].data;