Javascript (node) error: Unexpected token function - javascript

I am trying the learn younode workshop, the make it modular step to be precise. I wrote the code in the link and am getting the "error in the title". I have checked brackets and parenthesis but can not seem to find where i got it wrong. Any help would be appreciated (i just started with node).
My code is this and you can find it also at this link:
http://pastebin.com/G8x2GH7h
module.exports = function (directoryPath, extention, function(error, arrayOfNames) {
var fs = require('fs');
var path = require('path');
var FileNamesArray = [];
fs.readdir(directoryPath,function (error,list){
list.forEach(function (file) {
if (path.extname(file) === '.' + extention) {
FileNamesArray.push(file);
}
});
});
return FileNamesArray;
}){
return ArrayOfNames;
}

Your problem is that you are declaring function inside of function declaration.

It should be like this
exports.functionName = function() {
// your code
};
And then In you app.js you can use
var abc = require('path to file name');
abc.functionName();
example
// square.js
var PI = Math.PI;
exports.area = function (r) {
return PI * r * r;
};
exports.circumference = function (r) {
return 2 * PI * r;
};
// app.js
var square = require("./square.js");
var mySquare = square(2);
console.log('The area of my square is ' + mySquare.area());
Remove the anonymous function inside your function and hopefully it should work for you.

You are missing the title of you function in the export section, should be in this way:
module.exports = {
getFileNamesList : function (directoryPath, extention, function(error, arrayOfNames) {
var fs = require('fs');
var path = require('path');
var FileNamesArray = [];
fs.readdir(directoryPath,function (error,list){
list.forEach(function (file) {
if (path.extname(file) === '.' + extention) {
FileNamesArray.push(file);
}
});
});
return FileNamesArray;
}){
return ArrayOfNames;
}
}

Related

How to read and Write multiple files using node js?

In a array I have filenames; I want to first read one file and perform some operation then store result in a separate file. Then read 2nd file, perform operation again and save result in new 2nd file. Do the same procedure for all files. Below I have written code to read and write files.
TextReader.js
var fs = require('fs');
const readline= require('readline');
var headerIndex = [];
var isFirstLine = true;
var finalList = [];
module.exports={
readTextFile: (filename)=>{
console.log('inside textreader')
readline.createInterface({
input: fs.createReadStream(`./s3/${filename}`)
}).on('line', function(line) {
console.log(line);
console.log("-----------------------------");
if (isFirstLine) {
headerIndex = line.split('|');
}
else if (!isFirstLine){
let rowValues = line.split('|');
let valueIndex = 0;
var singlePerson = {};
headerIndex.forEach(currentval => {
singlePerson[currentval] = rowValues[valueIndex];
valueIndex++;
});
finalList.push(singlePerson);
}
isFirstLine = false;
}).on('close',function(){
//console.log(finalList);
var data='';
var header= "Employee ID"+'\t'+headerIndex[0]+'\t'+headerIndex[2]+'\t'+headerIndex[1]+'\t'+headerIndex[4]
+'\t'+headerIndex[3]+'\t'+headerIndex[5]+'\n';
for (var i = 0; i < finalList.length; i++) {
function split(name){
var conv=name.split(' ');
var result=[conv.slice(0, -1).join(' '),conv.slice(-1)[0]].join(conv.length < 2 ? '' : ',');
return result;
}
split(finalList[i].UserName);
data=data+finalList[i].LoginID+'\t'+split(finalList[i].UserName)+'\t'+finalList[i].Email+'\t'
+finalList[i].LoginID+'\t'+'A&G Professional'+'\t'+finalList[i].Title+'\t'+finalList[i].State+'\n';
}
var newFilename= filename.substr(0, filename.lastIndexOf("."))
var alldata= header + data;
//console.log(alldata)
fs.appendFile(`./s3/${filename}.xlsx`,alldata, (err) => {
if (err) throw err;
console.log('File created');
});
});
}
}
I am calling readTextFile(); from another file.
demo.js
const { readTextFile } = require("./textReader");
var array=['UserRoleDetails_12102021063206.txt',
'UserRoleDetails_12102021064706 (1).txt',
'UserRoleDetails_12102021064706.txt',
'UserRoleDetails_12102021070206.txt']
array.forEach(function(currentItem){
readTextFile(currentItem);
})
The problem i am facing is that all files are processed at the same time and all the datas of all files are stored together.
first, this node js is not work in sequential as you mention here
and second, array.forEach is not useful here to do the sequential operation
you need to use
const { readTextFile } = require("./textReader");
var array=['UserRoleDetails_12102021063206.txt',
'UserRoleDetails_12102021064706 (1).txt',
'UserRoleDetails_12102021064706.txt',
'UserRoleDetails_12102021070206.txt']
for (const element of array) {
readTextFile(currentItem);
}
NOTE:- readTextFile(currentItem) your this function is not async so maybe you need to make it async
if you are not clear then raise your hand

Java Script - export and import

I am developing a Java script Missing Part evaluation tool for Excel-Add-In. For this purpose firstly I developed a class Part.js in Microsoft Visual Studio 2015:
export class Part {
function Part(number) {
if (this instanceof Part) {
this.number = number;
} else {
return new Person(firstName)
}
}
};
module.exports = Part;
Additionally, I wrote the class MissingPartSummary .js
(function () {
"use strict";
import * as Part from 'Part';
Office.initialize = function (reason) {
$(document).ready(function () {
//app.initialize();
$('#create-difference-maps').click(createDifferenceMaps)
});
};
function createDifferenceMaps() {
Excel.run(function (context) {
var part = new Part("N1022181");
var dataSheet = context.workbook.worksheets.getActiveWorksheet();
//dataSheet.getRange("A1:C3").values = 7;
var rangeAddress = "F11 : J12";
var range = dataSheet.getRange(rangeAddress);
//range.load('cellCount');
range.load('values');
return context.sync().then(function () {
//console.log("Number of cells: " + range.cellCount);
console.log("Text: " + range.values);
});
context.sync();
}).catch(function (error) {
console.log("Error: " + error);
if (error instanceof OfficeExtension.Error) {
console.debug("Debug info:" + JSON.stringify(error.debugInfo));
}
});
}
})();
Unfortunately, if I try to execute this code, MS Visual Studio says that there is a syntax error in line 4. The Import of Part fails. Why? Both files Part.js and MissingPartSummary.js are in the same folder ../App/Home/
There is the error message from Visual Studio that I get, if I try to start the Visual Studio Project:
I've already solved the problem myself. The point about Javascript there exists two types "text/javascript" and "module". Because I've defined type="text/javascript" in MissingPartSummary.html, it lead to the import error. If I change the type to module, there will be no error. But there will be problems on other places. So I decided in favour of text/javascript rather than module. After this I changed Part.js.
var p = p || {};
p.Part = function (row) {
this.bestellstatus = row[0];
this.bezeichnung = row[1];
this.teilenummer = row[2];
this.teilenummer = this.teilenummer.concat(row[3]);
this.teilenummer = this.teilenummer.concat(row[4]);
this.teilenummer = this.teilenummer.concat(row[5]);
this.teilenummer = this.teilenummer.concat(row[6]);
this.prozesssteurer = row[19];
this.bemerkung = row[22];
};
p.Part.prototype = (function () {
var _isValid = function () {
var valid = false;
if (typeof this.bestellstatus == 'string') {
valid = (this.bestellstatus != "Info");
}
if (typeof this.teilenummer == 'string') {
valid = valid & (this.teilenummer != "");
}
return valid
};
return {
isValid: _isValid
};
}());
In the next step, I wrote
<script src="Part.js" type="text/javascript"> </script>
in MissingPartSummary.html. By this way, I can use the Part.js in MissingPartSummary.js in the following way.
var rangeAddressA = "D8 : Z30";
var rangeA = dataSheet.getRange(rangeAddressA);
rangeA.load(['values', 'text', 'rowCount', 'columnCount']);
return context.sync().then(function () {
var text = rangeA.text;
var numOrRows = rangeA.rowCount;
for (var i = 1; i < numOfRows; i++) {
var part = new p.Part(text[i]);
}
});
Since you're using ES6 Modules (export class Part and import * as Part from 'Part';), you don't need to specify the module exports in Part.js. So you shouldn't need the last line, module.exports = Part;.
Furthermore, in MissingPartSumary.js, you'll need to specify the relative path to Part.js in your import statement. Instead of import * as Part from 'Part';, it should be import * as Part from './Part.js';.

posting to node from angular controller

Ok, I do not understand what is going here, works locally but not on my server.
I have a angular controller that post to my node server.
each time I try and run the function that triggers the post I get
POST http://www.mysite.co.uk/mm3/back-end/savephotos 404 (Not Found)
Im honestly lost, ive rewritten the post 5 times I cant find the problem.
If anyone can see where ive gone wrong please help.
angular controller
mm3\js\controller.js
//all photos've been pushed now sending it to back end
$timeout(function () {
$http.post('back-end/savephoto', $scope.photosToPhp).then(function (success) {
$scope.generating = false;
$scope.generateBtn = 'Generate';
//creating mock up gallery
for (var x = 0; x < success.data.photos; x++) {
var file = '/mm3/tmp/' + success.data.folder + "/out" + x + ".png";
$scope.gallery.push(file);
}
$scope.photosToPhp = [];
}, function (error) {
});
}, 800);
then my node back-end
UPDATED:
So I have added a few console logs in my function to see where its going wrong and where it is getting to.
I keep getting:
test 1 function started error saving photo
mm3\back-end\controller.js
app.post('/mm3/back-end/savePhoto', function (req, res) {
console.log('test 1 function started');
var folder = Math.random().toString(36).substr(2, 20);
var photos = req.body;
var counts = 0;
var callback = function(counts){
if(counts < photos.length){
saveBase64(photos[counts],folder,counts,callback);
console.log('test 2 save photo');
}else{
var counts = 0;
var response = {"folder":folder, "photos": photos.length};
console.log('test 3 save photo else');
res.send(response)
}
};
saveBase64(photos[counts],folder,counts,callback);
});
app.post('/mm3/downloadZip', function(req, res){
var photos = req.body;
var out = photos[0];
var test = out.split('/');
var loc = test.pop();
var end = test.join('/');
console.log('test 3 function Generate zip file');
console.log(end);
var outName = '/' + end +'/mm3/MockUp.zip';
var output = fs.createWriteStream(outName);
var archive = archiver('zip', {store: true });
var zip = function(photos, f){
for(var t = 0; t < photos.length; t++){
var file = 'mockUp'+ t +'.jpg';
var from = '/var/www/html' + photos[t];
archive.file( from, { name: file });
}
f();
};
output.on('close', function() {
var photos = req.body;
var out = photos[0];
var test = out.split('/');
var loc = test.pop();
var end = test.join('/');
res.send(end + '/MockUp.zip');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
archive.on('error', function(err) {
throw err;
});
archive.pipe(output);
zip(photos, f);
function f(){
archive.finalize();
}
});
function saveBase64(photo,folder, counts, callback){
var result = photo.split(',')[1];
var path = '/mm3/tmp/' + folder;
var filename = path + "/out"+ counts + ".png";
mkdirp( path, function() {
fs.writeFile(filename, result, 'base64', function(error){
if (error) {
console.log('error saving photo');
}else{
console.log('photo saved');
counts ++;
callback(counts);
}
});
});
}
I think this is the problem:
app.post('back-end/savephoto', function (req, res) {
// skipped some lines
});
change it to
app.post('/back-end/savephoto', function (req, res) {
// skipped some lines
});
In Angular, the below:
$http.post('back-end/savephoto......
Becomes:
$http.post('/back-end/savephoto.....
In Node, the below:
app.post('back-end/savephoto.....
Becomes:
app.post('back-end/savephoto....
Then, you need to add a console.log under the Node route to see if it even is executed. This will narrow it down. Also, you can remove the $http.post call outside of the timeout to eliminate the obvious.
Let me know how you get on.
Shayan

Node.JS - want to move output off console into log/err files

Can someone give me a hand with converting the following code from console output to file output? I'm struggling with logging and the asynchronous nature of Node. The script works great in a console, but I'd like to pipe the sorted output into individual server sections within a file with STDERR going to another file.
var rexec = require('remote-exec');
var fs = require('fs');
var lineReader = require('line-reader');
var streamBuffers = require('stream-buffers');
var _ = require('lodash');
var conn_options = {
port: 22,
username: '*****',
privateKey: fs.readFileSync('R:/nodeJS/sshkey.priv')
}
// something that dumps out a bunch of data...
var cmds = ['df']
var filename = 'servers.txt';
lineReader.eachLine(filename,function(line,last,cb){
var buffer = new streamBuffers.WritableStreamBuffer();
var my_conn_options = _.clone(conn_options);
rexec(line,cmds,my_conn_options,function(err){
if (err) {
console.log(line, err);
} else {
console.log('>>>> Start: ' + line + '<<<<')
console.log(buffer.getContentsAsString());
console.log('>>>> End: ' + line + '<<<<')
};
});
if (last) {
cb(false); // stop reading
} else {
cb();
}
});
check this example, that should help..
var fs = require('fs');
var util = require('util');
var logFile = fs.createWriteStream('log.txt', { flags: 'a' });
// Or 'w' to truncate the file every time the process starts.
var logStdout = process.stdout;
console.log = function () {
logFile.write(util.format.apply(null, arguments) + '\n');
logStdout.write(util.format.apply(null, arguments) + '\n');
}
console.error = console.log;

node.js: read a text file into an array. (Each line an item in the array.)

I would like to read a very, very large file into a JavaScript array in node.js.
So, if the file is like this:
first line
two
three
...
...
I would have the array:
['first line','two','three', ... , ... ]
The function would look like this:
var array = load(filename);
Therefore the idea of loading it all as a string and then splitting it is not acceptable.
Synchronous:
var fs = require('fs');
var array = fs.readFileSync('file.txt').toString().split("\n");
for(i in array) {
console.log(array[i]);
}
Asynchronous:
var fs = require('fs');
fs.readFile('file.txt', function(err, data) {
if(err) throw err;
var array = data.toString().split("\n");
for(i in array) {
console.log(array[i]);
}
});
If you can fit the final data into an array then wouldn't you also be able to fit it in a string and split it, as has been suggested?
In any case if you would like to process the file one line at a time you can also try something like this:
var fs = require('fs');
function readLines(input, func) {
var remaining = '';
input.on('data', function(data) {
remaining += data;
var index = remaining.indexOf('\n');
while (index > -1) {
var line = remaining.substring(0, index);
remaining = remaining.substring(index + 1);
func(line);
index = remaining.indexOf('\n');
}
});
input.on('end', function() {
if (remaining.length > 0) {
func(remaining);
}
});
}
function func(data) {
console.log('Line: ' + data);
}
var input = fs.createReadStream('lines.txt');
readLines(input, func);
EDIT: (in response to comment by phopkins) I think (at least in newer versions) substring does not copy data but creates a special SlicedString object (from a quick glance at the v8 source code). In any case here is a modification that avoids the mentioned substring (tested on a file several megabytes worth of "All work and no play makes Jack a dull boy"):
function readLines(input, func) {
var remaining = '';
input.on('data', function(data) {
remaining += data;
var index = remaining.indexOf('\n');
var last = 0;
while (index > -1) {
var line = remaining.substring(last, index);
last = index + 1;
func(line);
index = remaining.indexOf('\n', last);
}
remaining = remaining.substring(last);
});
input.on('end', function() {
if (remaining.length > 0) {
func(remaining);
}
});
}
Using the Node.js readline module.
var fs = require('fs');
var readline = require('readline');
var filename = process.argv[2];
readline.createInterface({
input: fs.createReadStream(filename),
terminal: false
}).on('line', function(line) {
console.log('Line: ' + line);
});
js:
var array = fs.readFileSync('file.txt', 'utf8').split('\n');
ts:
var array = fs.readFileSync('file.txt', 'utf8').toString().split('\n');
Essentially this will do the job: .replace(/\r\n/g,'\n').split('\n').
This works on Mac, Linux & Windows.
Code Snippets
Synchronous:
const { readFileSync } = require('fs');
const array = readFileSync('file.txt').toString().replace(/\r\n/g,'\n').split('\n');
for(let i of array) {
console.log(i);
}
Asynchronous:
With the fs.promises API that provides an alternative set of asynchronous file system methods that return Promise objects rather than using callbacks. (No need to promisify, you can use async-await with this too, available on and after Node.js version 10.0.0)
const { readFile } = require('fs').promises;
readFile('file.txt', function(err, data) {
if(err) throw err;
const arr = data.toString().replace(/\r\n/g,'\n').split('\n');
for(let i of arr) {
console.log(i);
}
});
More about \r & \n here: \r\n, \r and \n what is the difference between them?
use readline (documentation). here's an example reading a css file, parsing for icons and writing them to json
var results = [];
var rl = require('readline').createInterface({
input: require('fs').createReadStream('./assets/stylesheets/_icons.scss')
});
// for every new line, if it matches the regex, add it to an array
// this is ugly regex :)
rl.on('line', function (line) {
var re = /\.icon-icon.*:/;
var match;
if ((match = re.exec(line)) !== null) {
results.push(match[0].replace(".",'').replace(":",''));
}
});
// readline emits a close event when the file is read.
rl.on('close', function(){
var outputFilename = './icons.json';
fs.writeFile(outputFilename, JSON.stringify(results, null, 2), function(err) {
if(err) {
console.log(err);
} else {
console.log("JSON saved to " + outputFilename);
}
});
});
file.lines with my JFile package
Pseudo
var JFile=require('jfile');
var myF=new JFile("./data.txt");
myF.lines // ["first line","second line"] ....
Don't forget before :
npm install jfile --save
With a BufferedReader, but the function should be asynchronous:
var load = function (file, cb){
var lines = [];
new BufferedReader (file, { encoding: "utf8" })
.on ("error", function (error){
cb (error, null);
})
.on ("line", function (line){
lines.push (line);
})
.on ("end", function (){
cb (null, lines);
})
.read ();
};
load ("file", function (error, lines){
if (error) return console.log (error);
console.log (lines);
});
To read a big file into array you can read line by line or chunk by chunk.
line by line refer to my answer here
var fs = require('fs'),
es = require('event-stream'),
var lines = [];
var s = fs.createReadStream('filepath')
.pipe(es.split())
.pipe(es.mapSync(function(line) {
//pause the readstream
s.pause();
lines.push(line);
s.resume();
})
.on('error', function(err) {
console.log('Error:', err);
})
.on('end', function() {
console.log('Finish reading.');
console.log(lines);
})
);
chunk by chunk refer to this article
var offset = 0;
var chunkSize = 2048;
var chunkBuffer = new Buffer(chunkSize);
var fp = fs.openSync('filepath', 'r');
var bytesRead = 0;
while(bytesRead = fs.readSync(fp, chunkBuffer, 0, chunkSize, offset)) {
offset += bytesRead;
var str = chunkBuffer.slice(0, bytesRead).toString();
var arr = str.split('\n');
if(bytesRead = chunkSize) {
// the last item of the arr may be not a full line, leave it to the next chunk
offset -= arr.pop().length;
}
lines.push(arr);
}
console.log(lines);
This is a variation on the answer above by #mtomis.
It creates a stream of lines. It emits 'data' and 'end' events, allowing you to handle the end of the stream.
var events = require('events');
var LineStream = function (input) {
var remaining = '';
input.on('data', function (data) {
remaining += data;
var index = remaining.indexOf('\n');
var last = 0;
while (index > -1) {
var line = remaining.substring(last, index);
last = index + 1;
this.emit('data', line);
index = remaining.indexOf('\n', last);
}
remaining = remaining.substring(last);
}.bind(this));
input.on('end', function() {
if (remaining.length > 0) {
this.emit('data', remaining);
}
this.emit('end');
}.bind(this));
}
LineStream.prototype = new events.EventEmitter;
Use it as a wrapper:
var lineInput = new LineStream(input);
lineInput.on('data', function (line) {
// handle line
});
lineInput.on('end', function() {
// wrap it up
});
i just want to add #finbarr great answer, a little fix in the asynchronous example:
Asynchronous:
var fs = require('fs');
fs.readFile('file.txt', function(err, data) {
if(err) throw err;
var array = data.toString().split("\n");
for(i in array) {
console.log(array[i]);
}
done();
});
#MadPhysicist, done() is what releases the async. call.
Using Node.js v8 or later has a new feature that converts normal function into an async function.
util.promisify
It's an awesome feature. Here's the example of parsing 10000 numbers from the txt file into an array, counting inversions using merge sort on the numbers.
// read from txt file
const util = require('util');
const fs = require('fs')
fs.readFileAsync = util.promisify(fs.readFile);
let result = []
const parseTxt = async (csvFile) => {
let fields, obj
const data = await fs.readFileAsync(csvFile)
const str = data.toString()
const lines = str.split('\r\n')
// const lines = str
console.log("lines", lines)
// console.log("str", str)
lines.map(line => {
if(!line) {return null}
result.push(Number(line))
})
console.log("result",result)
return result
}
parseTxt('./count-inversion.txt').then(() => {
console.log(mergeSort({arr: result, count: 0}))
})
I had the same problem, and I have solved it with the module line-by-line
https://www.npmjs.com/package/line-by-line
At least for me works like a charm, both in synchronous and asynchronous mode.
Also, the problem with lines terminating not terminating \n can be solved with the option:
{ encoding: 'utf8', skipEmptyLines: false }
Synchronous processing of lines:
var LineByLineReader = require('line-by-line'),
lr = new LineByLineReader('big_file.txt');
lr.on('error', function (err) {
// 'err' contains error object
});
lr.on('line', function (line) {
// 'line' contains the current line without the trailing newline character.
});
lr.on('end', function () {
// All lines are read, file is closed now.
});
Another answer using an npm package. The nexline package allows one to asynchronously read a file line-by-line:
"use strict";
import fs from 'fs';
import nexline from 'nexline';
const lines = [];
const reader = nexline({
input: fs.createReadStream(`path/to/file.ext`)
});
while(true) {
const line = await reader.next();
if(line === null) break; // line is null if we reach the end
if(line.length === 0) continue; // Ignore empty lines
// Process the line here - below is just an example
lines.push(line);
}
This approach will work even if your text file is larger than the maximum allowed string length, thereby avoiding the Error: Cannot create a string longer than 0x1fffffe8 characters error.
To put each line as an item inside an array, a new function was added in Node.js v18.11.0 to read files line by line
filehandle.readLines([options])
This is how you use this with a text file you want to read a file and put each line in an array
import { open } from 'node:fs/promises';
const arr = [];
myFilereader();
async function myFileReader() {
const file = await open('./TextFileName.txt');
for await (const line of file.readLines()) {
arr.push(line);
}
console.log(arr)
}
To understand more read Node.js documentation here is the link for file system readlines():
https://nodejs.org/api/fs.html#filehandlereadlinesoptions

Categories

Resources