Node.js Asynchronous read and write - javascript

I am having problem with asynchronous file read and write operations. only the last file is written to the server.
js:
function uploadassignment(req, res){
var path;
var multiparty = require("multiparty");
var form = new multiparty.Form();
console.log(req.query);
var filelength = req.query.filecount;
console.log(filelength);
form.parse(req, function(err, fields, files){
console.log(req.body);
for(i=0;i<filelength;i++){
var img = files.file[i];
console.log(img);
console.log('divide');
var fs = require('fs');
fs.readFile(img.path, function(err, data){
var originalfile = img.originalFilename.split('.');
console.log(originalfile);
var file_ext = originalfile[1];
path = "public/assignments/"+img.originalFilename;
console.log(path);
fs.writeFile(path, data, function(error){
if(error)console.log(error);
});
})
}
});
};

This is a common bug caused by using a loop variable without a closure. By the time the callback for the read operation is invoked, the loop has terminated and the index i points to the last element (and hence your img contains the last file). Create a function (a closure) that accepts the index as the parameter and call this function from the loop:
function blah(i) {
var img = files.file[i];
console.log(img);
console.log('divide');
var fs = require('fs');
fs.readFile(img.path, function(err, data){
var originalfile = img.originalFilename.split('.');
console.log(originalfile);
var file_ext = originalfile[1];
path = "public/assignments/"+img.originalFilename;
console.log(path);
fs.writeFile(path, data, function(error){
if(error)console.log(error);
});
})
}
for(i=0;i<filelength;i++) blah(i);

This isn't quite an answer, but it is too long for a comment.
What is not working? The file reading/writing bit of your code works fine:
var fs = require("fs")
img = {
path: "./test.txt",
originalFilename: "test.txt"
}
fs.readFile(img.path, function(err, data){
if(err)console.log(err);
var originalfile = img.originalFilename.split('.');
console.log(originalfile);
var file_ext = originalfile[1];
path = "public/assignments/"+img.originalFilename;
console.log(path);
fs.writeFile(path, data, function(error){
if(error)console.log(error);
});
})
With a directory structure like:
script.js
text.txt
public
assignments
I think your problem might be that you are assigning "fs" locally, then trying to call it from an async function. That might be why only the last one works (maybe.)
Try moving var fs = require('fs'); to the top of your code.

Related

Node.js - Callback is not a function

I am trying to retrieve a var hash to use it in another module. But I have a problem with callback. I have the error "callback is not a function". I use callback because my variable hash is undefined, so I guess it's a problem of asynchronous.
hash.js
var fs = require('fs');
var crypto = require('crypto');
var algorithm = 'sha256';
var hash = function(filename, callback){
var shasum = crypto.createHash(algorithm);
var s = fs.ReadStream(filename);
s.on('data', function(data) {
shasum.update(data)
})
s.on('end', function() {
var hash = shasum.digest('hex')
callback(hash);
})
}
exports.hash = hash;
app.js
app.post('/upload', upload.single('userfile'), function(req, res){
res.cookie('filename', req.file.originalname);
res.cookie('filesize', req.file.size);
var filename = __dirname +'/'+ req.file.path;
console.log(hash.hash(filename))
//res.cookie('hash', hash.hash(filename));
res.redirect('/hash')
})
Your hash() function is defined with this signature:
var hash = function(filename, callback)
That means you need to pass it BOTH a filename and a callback. But, you are passing it only a filename:
console.log(hash.hash(filename))
As you can see in the implementation, the callback is not optional and is the only way to actually get the result like this:
hash.hash(filename, function(result) {
console.log(result);
});

Why is this an unreachable code?

I'm calling the function getKeywords from another function and got an Unrecheable code detected section and don't understand why. Any help?
var env = require('dotenv').config();
var request = require('request')
var getKeywords = function(){
request.get('URI', //URI IS CORRECT IN MY CODE
function(err, httpResponse, body){
if(err){ //UNREACHABLE CODE DETECTED
console.error("request.post Error:", err);
return false;
} //UNREACHABLE CODE DETECTED
else{
console.log('Im here');
return JSON.parse(httpResponse.body).keywords;
}
});
}
module.export = getKeywords;
Here is the calling code.
var getKeywords = require('./getKeywords.js');
var keywords = new getKeywords();
var env = require('dotenv').config();
var difflib = require('difflib');
var postMention = require('./postMention.js');
var detection = function(obj, i){
var keyword = keywords[i];
var mentionObject = {
//some json
//postMention(mentionObject);
}
}
module.exports = detection;
Some tools have the ability to analyze every call to your function. It may be possible that all the places in your code that call the function you never set err parameter to true.

Nodejs non-blocking and Event Emitter. Refresh URL

fist of all im not shure if the following is a non-blocking problem?
im getting started with https://github.com/sahat/hackathon-starter
currently i try to read all files out of a folder and later process all files...
i used EventEmitter to kind of manage the workflow.
i want to clear all arrays if the URL is refeshed or loaded new, but somehow if i reaload the URL there seems to be something inside the arrays which cases multiple outputs of the same data?
at the moment i just would be happy to have a correct console.log output.
/**
* GET /
* Home page.
*/
var fs = require('fs');
//XML
var jsxml = require("node-jsxml");
var Namespace = jsxml.Namespace,
QName = jsxml.QName,
XML = jsxml.XML,
XMLList = jsxml.XMLList;
//EventEmitter
var EventEmitter=require('events').EventEmitter;
var dateinamenEE=new EventEmitter();
var dateiinhaltEE=new EventEmitter();
var dateinamen = [];
var dateiinhalt = [];
exports.index = function(req, res) {
fs.readdir('./data', function (err, files) {
if (!err) {
files.forEach(function(value) {
dateinamen.push(value);
});
dateinamenEE.emit('dateinamen_ready');
} else {
throw err;
}
});
dateinamenEE.on('dateinamen_ready',function(){
dateinamen.forEach(function(value) {
var buf = fs.readFileSync('./data/'+value, "utf8");
var xml = new XML(buf);
var list = xml.descendants("suggestion");
var ergebnis = "";
var basiswort = "";
var buchstabe = "";
var obj = null;
list.each(function(item, index){
ergebnis = item.attribute('data').toString()
//basiswort = value.replace("%2B", " ");
//basiswort = basiswort.replace(".xml", "");
//var pieces = buchstabe.split(" ");
obj = {k: basiswort, b: buchstabe, e: ergebnis};
dateiinhalt.push(obj);
});
});
dateiinhaltEE.emit('dateiinhalt_ready');
});
dateiinhaltEE.on('dateiinhalt_ready',function(){
//console.log(dateiinhalt);
console.log("dateinamen:" + dateinamen.length);
console.log("dateiinhalt:" + dateiinhalt.length);
});
res.render('home', {
title: 'Home'
});
};
If if log the length of the 2 arrays the output on the second reload shows. First time loading the url:
Express server listening on port 3000 in development mode
dateinamen:2
dateiinhalt:20
Second time / refreshing the url:
GET / 200 898.198 ms - -
GET /fonts/fontawesome-webfont.woff2?v=4.3.0 304 12.991 ms - -
GET /favicon.ico 200 4.516 ms - -
dateinamen:4
dateiinhalt:60
dateinamen:4
dateiinhalt:60
dateinamen:4
dateiinhalt:100
dateinamen:4
dateiinhalt:100
GET / 200 139.259 ms - -
What causes the code to extend the arrays while reloading the page?
The non-blocking problem is due do your for(...) loops.
Changing them by : array.forEach(function(elem, index){});
EDIT
The arrays should be initialized inside the index function :
exports.index = function(req, res) {
var dateinamen = [];
var dateiinhalt = [];
...
Also, I'm not sure you need the use of EventEmitter.
Something like
`
fs.readdir('./data', function (err, files) {
if (!err) {
files.forEach(function(file){
var buf = fs.readFileSync('./data/'+file, "utf8");
var xml = new XML(buf);
var list = xml.descendants("suggestion");
var ergebnis = null;
var obj = null;
list.each(function(item, index){
ergebnis = item.attribute('data').toString();
obj = {k: file, v: ergebnis};
dateiinhalt.push(obj);
});
});
console.log(dateiinhalt);
} else {
throw err;
}
});
`
could do the job no?
(I wanted to say this as a comment, but I'm still missing reputation)

node.js write filestream disordered in p2p file download

guys. I try to use node.js to create a p2p file sharing application. While downloading a file, it will download the file block by block.
The block size used in the following code is 1KB. But it has a problem, when available sockets number MAX_SOCKET_CNT is set to 30, it will not work.
How to run the code:
First, run node server.js, then, run node client.js, this will download the file named fav.mp3 to fav-local.mp3.
After downloading, try to run diff fav.mp3 fav-local.mp3 to check if the file downloaded completely.
Could you help me to figure out where the problem is?
Any answer or suggestion is welcome. Thanks in advance.
Here is the source code:
server.js
var http = require("http");
var url = require("url");
var fs = require('fs');
function downloadBlock(request, response){
var urlParts = url.parse(request.url, true);
var query = urlParts.query;
if('block_id' in query){
response.writeHead(200, {"Content-Type": "audio/mpeg"});
var startHere=parseInt(query["block_id"]);
var BLOCK_SIZE=1024;
var currentPosition=startHere*BLOCK_SIZE;
var readStream = fs.createReadStream('fav.mp3',{start: startHere*BLOCK_SIZE, end:startHere*BLOCK_SIZE+BLOCK_SIZE-1});
readStream.pipe(response);
}else{
response.writeHead(200, {"Content-Type": "text/html"});
response.write("refused");
response.end();
console.log("Warning: not a file block download request...");
}
}
http.createServer(downloadBlock).listen(8801);
console.log("Server has started. please ensure that the fav.mp3 file(size=439355B) is here.");
client.js
var http = require('http');
var fs = require('fs');
var remoteFile='fav.mp3';
var fileSize=439355;
var localFile='fav-local.mp3';
var totalBlocks=Math.floor((fileSize+1023)/1024);
/**************************************************/
//KEY POINT
var MAX_SOCKET_CNT=totalBlocks; //worked
//var MAX_SOCKET_CNT=30;//not work //????because of recursive downloadBlock function????
/*************************************************/
for(var i=0;i<MAX_SOCKET_CNT;++i){
downloadBlock('127.0.0.1',8801,remoteFile, localFile,i,totalBlocks);
}
function downloadBlock(IP,PORT,remoteFile,localFile,blockID,totalBlocksNum){
if(blockID >= totalBlocksNum) return;
var BLOCK_SIZE=1024;
var file = fs.createWriteStream(localFile,{start: blockID*BLOCK_SIZE});
var request = http.get("http://localhost:"+PORT+"/download_block?block_id="+blockID, function(response) {
response.pipe(file);
file.on('finish', function() {
var callback=function downloadBlockOver(){
console.log("compelete download blockID:"+blockID);
var nextBlockID=blockID+MAX_SOCKET_CNT;
if(nextBlockID<totalBlocksNum){
downloadBlock(IP,PORT,remoteFile,localFile,nextBlockID,totalBlocksNum); //why not this work if MAX_SOCKET_CNT=30???
}
}
file.close(callback);
});
});
}
Aha.
Finally, I find out the solution. You have to create the file first. Then change the flag of write stream with "r+" before download.
function touchFile(fileName,fileSize){
var BLOCK_SIZE=1024;
var totalBlocks=Math.floor((fileSize+BLOCK_SIZE-1)/BLOCK_SIZE);
var file = fs.createWriteStream(fileName);
var blockBuffer=new Buffer(BLOCK_SIZE);
for(var i=0;i<totalBlocks-1;++i){
file.write(blockBuffer,0, BLOCK_SIZE);
}
var leftToFill=new Buffer(fileSize-(totalBlocks-1)*BLOCK_SIZE);
file.write(leftToFill,0, leftToFill.Length);
file.end();
}
add the function "touchFile(localFile,fileSize);" before downloadBlock.
Then change:
var file = fs.createWriteStream(localFile,{start: blockID*BLOCK_SIZE});
to
var file = fs.createWriteStream(localFile,{start: blockID*BLOCK_SIZE,flags:'r+',autoClose: true});
It will work now.
touchFile just work when MAX_SOCKET_CNT==3.
The final solution is use node module random-access-file:https://github.com/mafintosh/random-access-file
The client.js code:
var http = require('http');
var fs = require('fs');
var randomAccessFile = require('random-access-file');
var remoteFile='fav.mp3';
var fileSize=439355;
//var fileSize=6000;//363213; //439355;
var localFile='fav-local.mp3';
var totalBlocks=Math.floor((fileSize+1023)/1024);
/**************************************************/
//KEY POINT
//var MAX_SOCKET_CNT=totalBlocks; //worked
var MAX_SOCKET_CNT=20;//worked
/*************************************************/
var file = randomAccessFile('fav-local.mp3');
for(var i=0;i<MAX_SOCKET_CNT;++i){
downloadBlock('127.0.0.1',8801,remoteFile, localFile,i,totalBlocks);
}
file.close();
function downloadBlock(IP,PORT,remoteFile,localFile,blockID,totalBlocksNum){
if(blockID >= totalBlocksNum) return;
var BLOCK_SIZE=1024;
var chunks=[];
var request = http.get("http://localhost:"+PORT+"/download_block?block_id="+blockID, function(response) {
response.on('data', function (chunk) {
chunks.push(chunk);
});
response.on('error', function (e) {
console.log('error.......'+e);
});
response.on('close', function () {
console.log('close ....');
});
response.on('end', function () {
var dataToProcess=Buffer.concat(chunks);
console.log('block data size: ' + dataToProcess.length);
file.write(blockID*BLOCK_SIZE, dataToProcess,
function(err) {
if(err){
console.log("error in write...");
}else{
var nextBlockID=blockID+MAX_SOCKET_CNT;
if(nextBlockID<totalBlocksNum){
console.log("download next block:"+nextBlockID);
downloadBlock(IP,PORT,remoteFile,localFile,nextBlockID,totalBlocksNum);
}
}
}
);
});
});
}

How to download and unzip a zip file in memory in NodeJs?

I want to download a zip file from the internet and unzip it in memory without saving to a temporary file. How can I do this?
Here is what I tried:
var url = 'http://bdn-ak.bloomberg.com/precanned/Comdty_Calendar_Spread_Option_20120428.txt.zip';
var request = require('request'), fs = require('fs'), zlib = require('zlib');
request.get(url, function(err, res, file) {
if(err) throw err;
zlib.unzip(file, function(err, txt) {
if(err) throw err;
console.log(txt.toString()); //outputs nothing
});
});
[EDIT]
As, suggested, I tried using the adm-zip library and I still cannot make this work:
var ZipEntry = require('adm-zip/zipEntry');
request.get(url, function(err, res, zipFile) {
if(err) throw err;
var zip = new ZipEntry();
zip.setCompressedData(new Buffer(zipFile.toString('utf-8')));
var text = zip.getData();
console.log(text.toString()); // fails
});
You need a library that can handle buffers. The latest version of adm-zip will do:
npm install adm-zip
My solution uses the http.get method, since it returns Buffer chunks.
Code:
var file_url = 'http://notepad-plus-plus.org/repository/7.x/7.6/npp.7.6.bin.x64.zip';
var AdmZip = require('adm-zip');
var http = require('http');
http.get(file_url, function(res) {
var data = [], dataLen = 0;
res.on('data', function(chunk) {
data.push(chunk);
dataLen += chunk.length;
}).on('end', function() {
var buf = Buffer.alloc(dataLen);
for (var i = 0, len = data.length, pos = 0; i < len; i++) {
data[i].copy(buf, pos);
pos += data[i].length;
}
var zip = new AdmZip(buf);
var zipEntries = zip.getEntries();
console.log(zipEntries.length)
for (var i = 0; i < zipEntries.length; i++) {
if (zipEntries[i].entryName.match(/readme/))
console.log(zip.readAsText(zipEntries[i]));
}
});
});
The idea is to create an array of buffers and concatenate them into a new one at the end. This is due to the fact that buffers cannot be resized.
Update
This is a simpler solution that uses the request module to obtain the response in a buffer, by setting encoding: null in the options. It also follows redirects and resolves http/https automatically.
var file_url = 'https://github.com/mihaifm/linq/releases/download/3.1.1/linq.js-3.1.1.zip';
var AdmZip = require('adm-zip');
var request = require('request');
request.get({url: file_url, encoding: null}, (err, res, body) => {
var zip = new AdmZip(body);
var zipEntries = zip.getEntries();
console.log(zipEntries.length);
zipEntries.forEach((entry) => {
if (entry.entryName.match(/readme/i))
console.log(zip.readAsText(entry));
});
});
The body of the response is a buffer that can be passed directly to AdmZip, simplifying the whole process.
Sadly you can't pipe the response stream into the unzip job as node zlib lib allows you to do, you have to cache and wait the end of the response. I suggest you to pipe the response to a fs stream in case of big files, otherwise you will full fill your memory in a blink!
I don't completely understand what you are trying to do, but imho this is the best approach. You should keep your data in memory only the time you really need it, and then stream to the csv parser.
If you want to keep all your data in memory you can replace the csv parser method fromPath with from that takes a buffer instead and in getData return directly unzipped
You can use the AMDZip (as #mihai said) instead of node-zip, just pay attention because AMDZip is not yet published in npm so you need:
$ npm install git://github.com/cthackers/adm-zip.git
N.B. Assumption: the zip file contains only one file
var request = require('request'),
fs = require('fs'),
csv = require('csv')
NodeZip = require('node-zip')
function getData(tmpFolder, url, callback) {
var tempZipFilePath = tmpFolder + new Date().getTime() + Math.random()
var tempZipFileStream = fs.createWriteStream(tempZipFilePath)
request.get({
url: url,
encoding: null
}).on('end', function() {
fs.readFile(tempZipFilePath, 'base64', function (err, zipContent) {
var zip = new NodeZip(zipContent, { base64: true })
Object.keys(zip.files).forEach(function (filename) {
var tempFilePath = tmpFolder + new Date().getTime() + Math.random()
var unzipped = zip.files[filename].data
fs.writeFile(tempFilePath, unzipped, function (err) {
callback(err, tempFilePath)
})
})
})
}).pipe(tempZipFileStream)
}
getData('/tmp/', 'http://bdn-ak.bloomberg.com/precanned/Comdty_Calendar_Spread_Option_20120428.txt.zip', function (err, path) {
if (err) {
return console.error('error: %s' + err.message)
}
var metadata = []
csv().fromPath(path, {
delimiter: '|',
columns: true
}).transform(function (data){
// do things with your data
if (data.NAME[0] === '#') {
metadata.push(data.NAME)
} else {
return data
}
}).on('data', function (data, index) {
console.log('#%d %s', index, JSON.stringify(data, null, ' '))
}).on('end',function (count) {
console.log('Metadata: %s', JSON.stringify(metadata, null, ' '))
console.log('Number of lines: %d', count)
}).on('error', function (error) {
console.error('csv parsing error: %s', error.message)
})
})
If you're under MacOS or Linux, you can use the unzip command to unzip from stdin.
In this example I'm reading the zip file from the filesystem into a Buffer object but it works
with a downloaded file as well:
// Get a Buffer with the zip content
var fs = require("fs")
, zip = fs.readFileSync(__dirname + "/test.zip");
// Now the actual unzipping:
var spawn = require('child_process').spawn
, fileToExtract = "test.js"
// -p tells unzip to extract to stdout
, unzip = spawn("unzip", ["-p", "/dev/stdin", fileToExtract ])
;
// Write the Buffer to stdin
unzip.stdin.write(zip);
// Handle errors
unzip.stderr.on('data', function (data) {
console.log("There has been an error: ", data.toString("utf-8"));
});
// Handle the unzipped stdout
unzip.stdout.on('data', function (data) {
console.log("Unzipped file: ", data.toString("utf-8"));
});
unzip.stdin.end();
Which is actually just the node version of:
cat test.zip | unzip -p /dev/stdin test.js
EDIT: It's worth noting that this will not work if the input zip is too big to be read in one chunk from stdin. If you need to read bigger files, and your zip file contains only one file, you can use funzip instead of unzip:
var unzip = spawn("funzip");
If your zip file contains multiple files (and the file you want isn't the first one) I'm afraid to say you're out of luck. Unzip needs to seek in the .zip file since zip files are just a container, and unzip may just unzip the last file in it. In that case you have to save the file temporarily (node-temp comes in handy).
Two days ago the module node-zip has been released, which is a wrapper for the JavaScript only version of Zip: JSZip.
var NodeZip = require('node-zip')
, zip = new NodeZip(zipBuffer.toString("base64"), { base64: true })
, unzipped = zip.files["your-text-file.txt"].data;

Categories

Resources