Stackoverflow JS Genius's!
I have an issue with my current project, it's using node's HTTP createServer, using Formidable to parse the body data.
See code below. (http-listener.js)
var listenport = 7200;
const server = http.createServer((req, res) => {
// Set vars ready
var data = '';
var plateImg = '';
var overview1 = '';
var overview2 = '';
new formidable.IncomingForm().parse(req)
// I originally thought it was sent in files, but it isnt, it's fields.
.on('file', function(name, file) {
console.log('Got file:', name);
})
// This is the correct procedure for my issue.
.on('field', function(name, field) {
console.log('Got a field:', name);
if(name.toLowerCase() === "anpr.xml")
{
// DO PARSE INTO JSON! This works, all is well.
xml2js.parseString(field, {explicitArray:false, ignoreAttrs:true}, function (err, result)
{
if(err)
{
alert('Parse: '+err);
}
// Console log parsed json data.
console.log("Read: "+result.EventNotificationAlert.ANPR.licensePlate);
console.log(result);
data = result;
});
}
if(name.toLowerCase() === "licenseplatepicture.jpg")
{
plateImg = field
// This doesnt work?
// I need to store these fields as an image. ? Is this possible with it being sent as a field and not as a file upload.
// This is the only option I have as I can't control the client sending this data (It's a camera)
fs.writeFile(config.App.ImageDir+'/Plate.jpg', plateImg, function(err) {
if(err)console.log(err);
});
}
if(name.toLowerCase() === "detectionpicture.jpg")
{
if(overview1 == '')
{
overview1 = field;
}
else if(overview2 == '')
{
overview2 = field;
}
else
{
// do nothing else.
console.log("Couldn't send images to variable.");
}
}
})
.on('error', function(err) {
alert(err);
})
.on('end', function() {
// Once finished, send to ANPR data to function to handle data and insert to database. WORKS
// Call anpr function.
ANPR_ListenData(data, plateImg, overview1, overview2, function(result) {
if(result.Status > 0)
{
console.log("Accepted by: "+result.Example);
// reset var
data = '';
plateImg = '';
overview1 = '';
overview2 = '';
res.writeHead(200, {'content-type':'text/html'});
res.end();
}
});
});
});
server.listen(listenport, () => {
console.log('ANPR Server listening on port: ' + listenport);
});
Basically the images that are sent in the fields: licenseplatepicture.jpg etc I want to store them directly into my app image directory.
Unfortunately I have no control over how the chunks are sent to this server due to it being a network camera, I simply need to write a procedure.
The full request chunk is quite large so I will upload the file to OneDrive for you to glance at and understand the request.
Any help with this will be appreciated. I've tried everything I can possibly think of, but the file saves unreadable :(. I don't know where else to look or what else I can try, other than what I've already done & tried.
Request Txt File: https://1drv.ms/t/s!AqAIyFoqrBTO6hTwCimcHDHODqEi?e=pxJY00
Ryan.
I fixed this by using Busboy package instead of Formidable.
This is how my http listener looks like using Busboy.
var inspect = util.inspect;
var Busboy = require('busboy');
http.createServer(function(req, res) {
if (req.method === 'POST') {
//vars
var ref = Math.random().toString(36).substring(5) + Math.random().toString(36).substring(2, 15);;
var xml = '';
var parseXml = '';
var over1, over2 = '';
var i = 0;
var busboy = new Busboy({ headers: req.headers });
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
console.log('File [' + fieldname + ']: filename: ' + filename + ', encoding: ' + encoding + ', mimetype: ' + mimetype);
if(filename.toLowerCase() === "licenseplatepicture.jpg")
{
var saveTo = config.App.ImageDir+"/"+ref+"_Plate.jpg";
if (!fs.existsSync(saveTo)) {
//file exists
file.pipe(fs.createWriteStream(saveTo));
}
}
if(filename.toLowerCase() === "detectionpicture.jpg")
{
i++;
var saveTo = config.App.ImageDir+"/"+ref+"_Front_"+i+".jpg";
if (!fs.existsSync(saveTo)) {
//file exists
file.pipe(fs.createWriteStream(saveTo));
}
}
file.on('data', function(data) {
if(filename.toLowerCase() === "anpr.xml")
{
xml += data;
}
console.log('File [' + fieldname + '] got ' + data.length + ' bytes');
});
file.on('end', function() {
console.log('File [' + fieldname + '] Finished');
});
});
busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) {
console.log('Field [' + fieldname + ']: value: ' + inspect(val));
// No fields according to busboy
});
busboy.on('finish', function() {
// DO PARSE INTO JSON! This works, all is well.
xml2js.parseString(xml, {explicitArray:false, ignoreAttrs:true}, function (err, result)
{
if(err)
{
alert('Parse: '+err);
}
// Set parsed var
parseXml = result;
});
var images = '';
if(i = 2)
{
images = `{"Plate":"${ref}_Plate.jpg", "Front":"${ref}_Front_1.jpg", "Overview":"${ref}_Front_2.jpg"}`;
} else {
images = `{"Plate":"${ref}_Plate.jpg", "Front":"${ref}_Front_1.jpg", "Overview":"null"}`;
}
// Once parsed, send on to ANPR listen function.
ANPR_ListenData(ref, parseXml, images, function(result) {
if(result.Status == 1)
{
console.log('Data transfered for: '+parseXml.EventNotificationAlert.ANPR.licensePlate);
console.log('Accepted Camera: '+result.Example);
res.writeHead(200, { Connection: 'close', Location: '/' });
res.end();
}
});
});
req.pipe(busboy);
}
}).listen(7200, function() {
console.log('Listening for requests');
});
Hope this helps someone else in the future. Certainly caused me a lot of a wasted time.
Busboy was the better package to use when I was reading into it more, it makes more sense for what I was attempting to achieve.
Ryan :).
All the best.
Related
I am facing problem of write after request end in nodejs :
I have a server.js file , which sends request to other js file (say abc.js) which sends response back to server.js file and then server.js file writes the resoponse and then end response.
my problem is if I write response in abc.js and end it there only it works fine, but if it is in sererconf.js it doesn't.
Let me make it clear that I get this bug only when i send 20-30 requests at a time. I want to know the logic behind it, I searched a lot, but no nice answer found, any help will be appreciated.
server.js full code:
/* create HTTP server */
var httpd = http.createServer(function(req, res) {
res.setHeader('Access-Control-Allow-Origin', '*');
res.writeHead(200, {"Content-Type" : "application/json"});
}).listen(3800);
/* send request to the file mentioned in url*/
httpd.on('request', function(req, res) {
urll = __dirname + '/..' + req.url;
fs.exists(urll, function (exists) {
if(exists){
var server = require(urll);
server.get(req,res);
}
});
module.exports = {
result : function(result){
if(Array.isArray(result)){
for(var key in result){
result[key] = JSON.parse(result[key]);
}
}
result = JSON.stringify(result);
res.write(result ,function(err) { if(!err) res.end(); });
},
};
});
**apps.js code**:
var constants = require('./lib/constant.js');
var APP_PATH = constants.APP_PATH;
module.exports = {
get : function(req) {
req.on('data', function(chunk) {
var hash = chunk;
hash = JSON.parse(hash);
var id = hash.id;
dirPath = APP_PATH + id;
fs.exists( dirPath, function (exists) {
if(exists)
read_app_dir(dirPath);
else
taskDone([]);
});
});
}
};
function read_app_dir(app_dir){
fs.readdir(app_dir,function(err, list){
if (err) {
httpd.log.info('cannot read apps dir at s_apps = '+err);
}else{
create_new_obj(list,app_dir);
}
});
}
function create_new_obj(list, app_dir){
appFilesObj = [];
var i = 0;
list.forEach(function(file) {
i=i+1;
file = app_dir +'/' +file;
appFilesObj.push(file);
if(i == Object.keys(list).length)
read_app_files(appFilesObj);
});
}
function read_app_files(appFilesObj,app_dir){
var apps = [];
var i = 0;
if(Object.keys(appFilesObj).length > 0){
appFilesObj.forEach(function(appfile) {
read_file(appfile,function(data){ i=i+1;
apps.push(data);
if(i == Object.keys(appFilesObj).length)
taskDone(apps);
});
});
}else{
taskDone([]);
}
}
function read_file(file,callback){
fs.readFile(file,'utf8', function (err, data) {
if (err)
httpd.log.info('cannot read file at s_apps = '+err);
else
callback(data);
});
}
function taskDone(apps){
var httpd = require(__dirname + '/server.js');
httpd.result(apps);
}
if I do res.write and res.end in this file in taskDone() then it works fine.
Thanks in advance :)
The problem with above code was, that I was sending back response by calling an exported function of server.js
like this:
var httpd = require(__dirname + '/server.js');
httpd.result(apps);
where result() is the function which I have exported in server.js to write response and end response
Instead of this, now I added a callback support while calling function of other files (ex-apps.js), so that I "res.write" and "res.end()" only when the actually called function gives back the response.
(I am not writing the whole code , please refer above code for difference in both)
httpd.on('request', function(req, res) {
urll = __dirname + '/..' + req.url;
fs.exists(urll, function (exists) {
if(exists){
var server = require(urll);
server.get(req,res,function(result){
res.write(result);
res.end();
});
}
});
**apps.js**
get : function(req, callback) {
req.on('data', function(chunk) {
//when task is done and taskDone() function is called I just callback() the result
function taskDone(result){
callback(result);
}
}
}
When I was sending result back by calling a function of server.js and then writing the response...I don't know how..but somehow server was getting confused in multiple requests and saying "write after end" error...while the end was called by some other user's request.
I may be wrong, but this is what I concluded from this :)
I hope this may help others.
I have a node application that reads an uploaded file like so:
router.route('/moduleUpload')
.post(function (request, response) {
request.files.file.originalname = request.files.file.originalname.replace(/ +?/g, '');
var media = new Media(request.files.file, './user_resources/module/' + request.body.module_id + '/');
if (!fs.existsSync(media.targetDir)) {
fs.mkdirSync(media.targetDir, 0777, function (err) {
if (err) {
console.log(err);
response.send("ERROR! Can't make the directory! \n"); // echo the result back
}
});
fs.chmodSync(media.targetDir, 0777);
}
moveFile(media);
var token = jwt.encode({
mediaObject: media
}, require('../secret')());
response.status(200).json(token);
});
Now when this file is uploaded and status code 200 is recieved my system then calls the following route:
router.route('/resourcePath/:encodedString')
.all(function (req, res) {
var decoded = jwt.decode(req.params.encodedString, require('../secret')());
var mediaObject = decoded.mediaObject;
var ext = mediaObject.file.originalname.substr(mediaObject.file.originalname.lastIndexOf('.'));
var path = 'app_server' + mediaObject.targetDir.substring(1) + mediaObject.fileName + ext;
var fileExist = false;
res.status(200).send(path)
});
Now for some reason this call is being called before the file is correctly in place which results in that sometimes my users cannot see the content.
To make sure the file was in the folder i thought of the following code to add:
var fileExist = false;
while (!fileExist) {
if (fs.existsSync('/var/www/learningbankapp/'+path)) {
fileExist = true;
}
}
However im not sure that this a good solution namly because it goes against node.js nature. So my question is, is there a better way to do it?
Here is server.js
var express = require("express"),
http = require("http"),
mongoose = require( "mongoose" ),
app = express();
app.use(express.static(__dirname + "/client"));
app.use(express.urlencoded());
mongoose.connect('mongodb://localhost/PvdEnroll', function(err) {
if (err) {
console.log(err);
} else {
console.log('Connected to mongodb!');
}
});
var CheckBoxSchema = mongoose.Schema({
npi: String,
boxes:[ String]
});
var CheckBox = mongoose.model("CheckBox", CheckBoxSchema);
http.createServer(app).listen(3000);
// here's where we get something from the client.
app.get("/checkbox.json", function (req, res) {
CheckBox.find( {}, function(err, CheckBox) {
console.log("STUBB2", checkbox);
res.json(checkbox);
});
});
app.post("/checkbox", function (req, res)
console.log("POSTING TO DB: ",req.body);
var newCkBoxData = new npiChecks({"npi": req.body.npi, "boxes":req.boxes});
newCkBOxData.save(function(err, results) {
if (err !== null) {
console.log(err);
res.send("ERROR");
} else {
CheckBox.find({}, function(err, result) {
if (err !== null) {
// the element dir not get saved
res.send("ERROR");
}
res.json(result);
});
}
});
});
The client, secA.js, pertains to a single HTML page.
var main = function (checkBoxObjects) {
"use strict";
$.getJSON("../data/checkBoxesA.json", function(checkBoxTxt) {
checkBoxTxt.forEach(function (data) {
$(".checkbox-input").append("<input type='checkbox' unchecked/>");
$(".checkbox-input").append(' ' + data.label + "<br/>");
$(".checkbox-input").append(' ' + data.note + "<br/>");
$(".checkbox-input").append(' '+ "<br/>");
});
});
};
$(document).ready(main);
providerNPI_ckBs = [];
NPI_number = [];
var loopForm = function(form) {
for ( var i = 0; i < form.elements.length; i++) {
if (form.elements[i].type == 'checkbox')
if (form.elements[i].checked == true) {
providerNPI_ckBs += 1 + ' ';
} else {
providerNPI_ckBs += 0 + ' ';
}
}
if (providerNPI_ckBs.length > 0)
if (NPI_number.length > 0)
createJSONobj();
}
var getNPI = function() {
NPI_number = document.getElementById("text_field1").value;
if (NPI_number.length > 0)
if (providerNPI_ckBs.length > 0) {
createJSONobj();
}
}
var createJSONobj = function() {
var JSONobj = '{' + JSON.stringify(NPI_number) + ':' +
JSON.stringify(providerNPI_ckBs) + '}';
JSON.stringify(JSONobj);
console.log(JSONobj);
// here we'll do a quick post to our todos route
$.post("npi_checks", JSONobj, function (response) {
console.log("We posted and the server responded!");
console.log(response);
});
}
// Note: This is temporary as I'm only intending to sent JSON data one way
// to the server. I'd just like to verify that I can send data both ways
$(document).ready(function (checkBoxObjects) {
$.getJSON("checkbox.json", function (checkBoxObjects) {
console.log("Client Recieved Array from Server: ", checkBoxObjects);
main(checkBoxObjects);
});
});
The Chrome console responds immediately with GET http://127.0.0.1:3000/html/checkbox.json 404 (Not Found)
The page loads and will accept data which the secA.js script formats as JSON. The database has been started by the server. All I need to know is how to send the data over to the server!
I'm clearly new to javascript and producing this application is part of learning the language along with MongoDB. I've structured this application similarly to an example tutorial book. One difference is that in the tutorial the traffic is two ways between client and server.
Any help is appreciated!
If the first argument to post, on the client side, is changed from ""npi_checks" to "/checkbox" to match the first argument app.post the data gets to the server and is loaded into mongoldb. This is the simple solution.
I've been working on a node.js based server for a project. Currently, I have a script on an html page that POSTS data to a jsp page on the same server - which works. However, I can't seem to send data back down the pipe. I've noted the same with various node.js posting scripts.
EDIT: Where I say JSP, I just mean standard javascript; I just made pages with .jsp extensions instead of .js so I could still send scripts as .js.
EDIT 2: The headers /do/ get sent. It just appears that response.write() or reponse.end() aren't acutally sending anything.. I've tried gzipping the data and changing the transfer-encoding header to match with no luck.
Think I might wireshark it.
EDIT 3: Wireshark can't detect the POSTs for some reason >.>
EDIT 4: Noticed that response.write() is returning false. Not sure how to debug. I'll add some more code at the end.
Here's the JSP page's code.
/**
* This POST parser takes XML data and translates it as follows..
*
* XML -> JS Object -> XML
*
* This is so I can test some libraries.
*/
// Eyes, to inspect variables
var inspect = require('eyes').inspector({maxLength: false});
// This is an XML parser, that parses XML into standard JS arrays
var xml2js = require('xml2js');
// This one converts JSON (Or JS objects) to XML.
var jsontoxml = require('jsontoxml');
exports.seperateHeader = false; // We have no separate header function
exports.separateEnd = true;
exports.GET = function(query, request, response) { // Our GET function.
response.writeHead(200, { // Write the header
'Content-Type': 'text/plain; charset=utf-8'
});
response.end("No POST data here!"); // Tell the user that there was no POST data.
}
exports.POST = function(query, postdata, request, response) { // Out POST function.
var rootnode = undefined;
var realdata = undefined;
var result = undefined;
if( postdata["data"].startsWith("<?xml") ) // It's XML, parse it
{
console.log(" | Detected XML.");
var parser = new xml2js.Parser(); // Create an XML parser
parser.parseString(postdata["data"], function (err, data) { // Parse the XML from the POST data
if(err){
inspect(err); // If we have an error, inspect it with eyes
}
else
{
// inspect(result); // Removed; printed the XML data as an array
for (var prop in data) { // Get the root node of our XML; this is the command
rootnode = prop;
break;
}
realdata = data[rootnode]; // Get the data without the root node
result = data;
}
});
}
else // Try to parse it as JSON
{
console.log(" | Detected JSON.");
result = JSON.parse(postdata["data"]);
for (var prop in result) { // Get the root node of our JSON; this is the command
rootnode = prop;
break;
}
realdata = result[rootnode]; // Get the data without the root node
}
console.log(" | Before: ")
inspect(postdata["data"]); // Inspect the data we've got (XML or JSON)
console.log(" | Native object: ")
inspect(result); // Inspect the data that's been parsed to an object
console.log(" | XML: ")
xmldata = jsontoxml.obj_to_xml(result) // Object -> XML
xmldata = '<?xml version="1.0" encoding="UTF-8"?><Request>' + xmldata + "</Request>"; // WUPOS extra XML stuff and the XML header
inspect(xmldata); // Inspect the XML created from the object
response.writeHead(200, { // Write the header
'Content-Type': 'text/plain; charset=utf-8',
'Content-Length': xmldata.length
});
response.write(xmldata);
}
And here is the internal code for the http response..
var fs = require('fs');
var url = require('url');
var path = require('path');
var querystring = require("querystring")
var ext = require("./ext.js").ext // For getting MIME types (I know, there's another module for this)
// Logging function
function Log(message, prefix, isSecure)
{
if (!prefix)
{
prefix = " ";
}
else
{
if (isSecure)
{
prefix = "HTTPS";
}
else
{
prefix = "HTTP ";
}
}
console.log(prefix + " | " + message);
}
exports.Log = Log;
// httpRequest; this function serves standard HTTP requests
function httpRequest(request, response, isSecure) {
request.setEncoding('utf-8'); // Set the encoding
requrl = url.parse(request.url, true); // Parse the URL
reqhost = request.connection.address();// Get the IP and port of the user
if (requrl.pathname == "/") // If they were requesting the root..
{
if (path.existsSync("../html/index.jsp")) // If index.jsp exists..
{
reqfile = "/index.jsp"; // Remember that we want that file
}
else // Otherwise, index.html
{
reqfile = "/index.html";
}
// Log it
if (requrl.search) {
Log(
"[" + reqhost.address + ":" + reqhost.port + "] " + request.method + " " + reqfile + requrl.search + " (Redirected from \"/\")",
true, isSecure
);
}
else {
Log(
"[" + reqhost.address + ":" + reqhost.port + "] " + request.method + " " + reqfile + " (Redirected from \"/\")",
true, isSecure
);
}
}
else // If not,
{ // Log it,
Log(
"[" + reqhost.address + ":" + reqhost.port + "] " + request.method + " " + requrl.href
, true, isSecure
);
reqfile = requrl.pathname; // Remember which file was requested
}
if (reqfile.endsWith(".jsp")) { // If the file is a JS page
try { // Try..
reqjs = require("../html/" + reqfile); // ..to import the code from our script
if (reqjs.separateHeader) { // If the script has a separate function for sending the header..
reqjs.headers(request, response); // Send it
}
if (request.method == 'GET') // If we have a GET
{
reqjs.GET(requrl.query, request, response); // Run the script's GET function
}
else if (request.method == 'POST') // If we have a POST
{
// Grab all the POST data
var fullBody = '';
request.on('data', function(chunk) {
if(fullBody.length > 1e6) // If we're getting a massive amount of data, kill the connection
{
Log("POST flood attack / faulty client detected. Connection closed.", false, isSecure);
request.connection.destroy();
return;
}
fullBody += chunk.toString();
});
request.on('end', function() {
var postdata = querystring.parse(fullBody); // Parse the POST data
if (reqjs.POST) // If the script has a POST function,
{
reqjs.POST(requrl.query, postdata, request, response); // Call it
}
else
{ // Otherwise, just call the GET function
reqjs.GET(requrl.query, request, response);
}
});
}
}
catch(e) // If there's an error..
{
response.writeHead(500, {
'Content-Type': 'text/plain'
});
response.write("Error: " + e); // Send it to the browser
Log("Error: " + e, false, isSecure); // Log it
}
response.end(); // Finish the response
}
else // If the file is not a JS page,
{
fs.readFile("html" + reqfile, function(err, data) { // Read the file in
if(err) { // If there's an error..
errortype = err.message.split(",")[0]; // ..get the error's code
if (errortype == "ENOENT") // File not found
{
response.statusCode = 404;
response.end("File not found: " + reqfile); // Send them a 404
Log("File not found.", false, isSecure); // Log it
}
else if (errortype == "EISDIR") // File is actually a directory
{
if (path.existsSync("html" + reqfile + "/index.jsp")) // If there's an index.jsp file here..
{ // Redirect the browser
Log("Found index.jsp", false, isSecure);
response.writeHead(301, "Moved Permanently", {
"Location" : reqfile + "/index.jsp"
});
response.end("Please click here.")
return; // Return, so we don't have to wrap the next section of code in braces
}
else if (path.existsSync("html" + reqfile + "/index.html")) // Or, if there's an index.html file here..
{ // Redirect the browser
Log("Found index.html", false, isSecure);
response.writeHead(301, "Moved Permanently", {
"Location" : reqfile + "/index.html"
});
response.end("Please click here.")
return; // Return, so we don't have to wrap the next section of code in braces
}
// If we don't have those files, list them
Log("Listing files in html/"+reqfile, false, isSecure); // Log it
response.statusCode = 200; // Use Node.js's standard "OK" header
// Write out some HTML
response.write("<html><head></head><body>\n");
response.write("<h1>Directory listing: " + reqfile + "</h1>\n");
response.write("<ul>\n");
// List off the files
var filelist = fs.readdirSync("html" + reqfile);
// For every file..
for (element in filelist)
{
// Compile some HTML
var datastr = "";
datastr += "<li>";
datastr += "<a href=\"" + reqfile + "/" + filelist[element] + "\">";
if (filelist[element].endsWith(".jsp") || filelist[element].endsWith(".html"))
{ // If it ends in html or js, it's a normal page, so colour it green
datastr += "<span style=\"color: green;\">";
datastr += filelist[element];
datastr += "</span></a>";
}
else
{ // Otherwise, just put it in the list
datastr += filelist[element];
datastr += "</a>";
}
datastr += "</li>\n";
response.write(datastr); // Write out the HTML and go around again
}
response.end("</ul></body></html>"); // Finish the response
}
else
{ // There was some other problem when opening the file
Log("Could not open file: " + err, false, isSecure); // Log it
response.statusCode = 501 // Internal server error code
response.end("Could not open file: " + err.message); // Tell the browser
}
}
else
{ // No problems or anomalies. Serve it!
var contenttype = ext.getContentType(ext.getExt(reqfile).replace(".", "")); // Get the MIME type
Log("Content-Type: " + contenttype, false, isSecure); // Log it
response.writeHead(200, "OK", {
'Content-Type': contenttype
// 'Access-Control-Allow-Origin': 'http://b.localhost:25566',
// 'Access-Control-Allow-Methods': 'POST, GET',
// 'Access-Control-Allow-Headers': 'Content-Type'
});
response.write(data); // Send the data (TODO: Send in chunks?)
response.end() // End
}
});
}
}
exports.httpRequest = httpRequest;
And the code for the html page..
<html>
<head>
<title>JS test A</title>
<script src="js/jquery.js"></script>
</head>
<body style="margin-left: 30%; margin-right: 30%;">
<div id="tests" style="float:left; width=40%;">
Test A
</div>
<div id="output" style="float:right; width=60%;">
<form id="form">
<textarea id="output" name="output"></textarea>
</form>
</div>
<script>
$(document).ready(function(){
$("a#a").click(function(event){
text = $("textarea").val();
$("textarea").val(text + "POST test.\n");
text = $("textarea").val();
var http = new XMLHttpRequest();
jsonobj = {
array: {
obj1: "obj1",
obj2: "obj2"
},
obj3: "obj3"
}
var url = "postTest3.jsp";
var params = "data="+JSON.stringify(jsonobj);
http.open("POST", url, true);
//Send the proper header information along with the request
http.setRequestHeader("Content-type", "application/x-www-form-urlencoded");
http.onreadystatechange = function() {//Call a function when the state changes.
if(http.readyState == 4 && http.status == 200) {
$("textarea").val(text + http.responseText);
}
else
{
$("textarea").val(text + "Repsponded: " + http.status + "\n");
}
}
http.send(params);
});
});
</script>
</body>
</html>
EDIT 4: Additional code
{
// Grab all the POST data
var fullBody = '';
request.on('data', function(chunk) {
if(fullBody.length > 1e6) // If we're getting a massive amount of data, kill the connection
{
Log("POST flood attack / faulty client detected. Connection closed.", false, isSecure);
request.connection.destroy();
return;
}
fullBody += chunk.toString();
});
request.on('end', function() {
var postdata = querystring.parse(fullBody); // Parse the POST data
if (reqjs.POST) // If the script has a POST function,
{
postout = reqjs.POST(requrl.query, postdata, request, response); // Call it
if (postout) {
inspect(response.write(postout, 'utf8'));
}
}
else
{ // Otherwise, just call the GET function
reqjs.GET(requrl.query, request, response);
}
});
}
Does anyone have any ideas on this?
Okay, so, I fixed the problem. Thought I'd share my solution here.
Basically, after adding some inspect() calls, it turned out that because reponse.write() was being carried out asynchronously, and response.end() wasn't, response.end() was being called first. This is why response.write() returned false.
I fixed it by moving the response.end() into my asynchronous blocks.
I`m trying to create a file downloader as a background service but when a large file is scheduled, it's first put in memory and then, at the end of the download the file is written to disk.
How can I make the file be wrote gradually to the disk preserving memory considering that I may have lots of files being downloaded at the same time?
Here's the code I`m using:
var sys = require("sys"),
http = require("http"),
url = require("url"),
path = require("path"),
fs = require("fs"),
events = require("events");
var downloadfile = "http://nodejs.org/dist/node-v0.2.6.tar.gz";
var host = url.parse(downloadfile).hostname
var filename = url.parse(downloadfile).pathname.split("/").pop()
var theurl = http.createClient(80, host);
var requestUrl = downloadfile;
sys.puts("Downloading file: " + filename);
sys.puts("Before download request");
var request = theurl.request('GET', requestUrl, {"host": host});
request.end();
var dlprogress = 0;
setInterval(function () {
sys.puts("Download progress: " + dlprogress + " bytes");
}, 1000);
request.addListener('response', function (response) {
response.setEncoding('binary')
sys.puts("File size: " + response.headers['content-length'] + " bytes.")
var body = '';
response.addListener('data', function (chunk) {
dlprogress += chunk.length;
body += chunk;
});
response.addListener("end", function() {
fs.writeFileSync(filename, body, 'binary');
sys.puts("After download finished");
});
});
I changed the callback to:
request.addListener('response', function (response) {
var downloadfile = fs.createWriteStream(filename, {'flags': 'a'});
sys.puts("File size " + filename + ": " + response.headers['content-length'] + " bytes.");
response.addListener('data', function (chunk) {
dlprogress += chunk.length;
downloadfile.write(chunk, encoding='binary');
});
response.addListener("end", function() {
downloadfile.end();
sys.puts("Finished downloading " + filename);
});
});
This worked perfectly.
does the request package work for your uses?
it lets you do things like this:
request(downloadurl).pipe(fs.createWriteStream(downloadtohere))
Take a look at http-request:
// shorthand syntax, buffered response
http.get('http://localhost/get', function (err, res) {
if (err) throw err;
console.log(res.code, res.headers, res.buffer.toString());
});
// save the response to 'myfile.bin' with a progress callback
http.get({
url: 'http://localhost/get',
progress: function (current, total) {
console.log('downloaded %d bytes from %d', current, total);
}
}, 'myfile.bin', function (err, res) {
if (err) throw err;
console.log(res.code, res.headers, res.file);
});
When downloading large file please use fs.write and not writeFile as it will override the previous content.
function downloadfile(res) {
var requestserver = http.request(options, function(r) {
console.log('STATUS: ' + r.statusCode);
console.log('HEADERS: ' + JSON.stringify(r.headers));
var fd = fs.openSync('sai.tar.gz', 'w');
r.on('data', function (chunk) {
size += chunk.length;
console.log(size+'bytes received');
sendstatus(res,size);
fs.write(fd, chunk, 0, chunk.length, null, function(er, written) {
});
});
r.on('end',function(){
console.log('\nended from server');
fs.closeSync(fd);
sendendstatus(res);
});
});
}
Instead of holding the content into memory in the "data" event listener you should write to the file in append mode.
Use streams like Carter Cole suggested. Here is a more complete example
var inspect = require('eyespect').inspector();
var request = require('request');
var filed = require('filed');
var temp = require('temp');
var downloadURL = 'http://upload.wikimedia.org/wikipedia/commons/e/ec/Hazard_Creek_Kayaker.JPG';
var downloadPath = temp.path({prefix: 'singlePageRaw', suffix: '.jpg'});
var downloadFile = filed(downloadPath);
var r = request(downloadURL).pipe(downloadFile);
r.on('data', function(data) {
inspect('binary data received');
});
downloadFile.on('end', function () {
inspect(downloadPath, 'file downloaded to path');
});
downloadFile.on('error', function (err) {
inspect(err, 'error downloading file');
});
You may need to install modules which you can do via
npm install filed request eyespect temp