Ok, I do not understand what is going here, works locally but not on my server.
I have a angular controller that post to my node server.
each time I try and run the function that triggers the post I get
POST http://www.mysite.co.uk/mm3/back-end/savephotos 404 (Not Found)
Im honestly lost, ive rewritten the post 5 times I cant find the problem.
If anyone can see where ive gone wrong please help.
angular controller
mm3\js\controller.js
//all photos've been pushed now sending it to back end
$timeout(function () {
$http.post('back-end/savephoto', $scope.photosToPhp).then(function (success) {
$scope.generating = false;
$scope.generateBtn = 'Generate';
//creating mock up gallery
for (var x = 0; x < success.data.photos; x++) {
var file = '/mm3/tmp/' + success.data.folder + "/out" + x + ".png";
$scope.gallery.push(file);
}
$scope.photosToPhp = [];
}, function (error) {
});
}, 800);
then my node back-end
UPDATED:
So I have added a few console logs in my function to see where its going wrong and where it is getting to.
I keep getting:
test 1 function started error saving photo
mm3\back-end\controller.js
app.post('/mm3/back-end/savePhoto', function (req, res) {
console.log('test 1 function started');
var folder = Math.random().toString(36).substr(2, 20);
var photos = req.body;
var counts = 0;
var callback = function(counts){
if(counts < photos.length){
saveBase64(photos[counts],folder,counts,callback);
console.log('test 2 save photo');
}else{
var counts = 0;
var response = {"folder":folder, "photos": photos.length};
console.log('test 3 save photo else');
res.send(response)
}
};
saveBase64(photos[counts],folder,counts,callback);
});
app.post('/mm3/downloadZip', function(req, res){
var photos = req.body;
var out = photos[0];
var test = out.split('/');
var loc = test.pop();
var end = test.join('/');
console.log('test 3 function Generate zip file');
console.log(end);
var outName = '/' + end +'/mm3/MockUp.zip';
var output = fs.createWriteStream(outName);
var archive = archiver('zip', {store: true });
var zip = function(photos, f){
for(var t = 0; t < photos.length; t++){
var file = 'mockUp'+ t +'.jpg';
var from = '/var/www/html' + photos[t];
archive.file( from, { name: file });
}
f();
};
output.on('close', function() {
var photos = req.body;
var out = photos[0];
var test = out.split('/');
var loc = test.pop();
var end = test.join('/');
res.send(end + '/MockUp.zip');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
archive.on('error', function(err) {
throw err;
});
archive.pipe(output);
zip(photos, f);
function f(){
archive.finalize();
}
});
function saveBase64(photo,folder, counts, callback){
var result = photo.split(',')[1];
var path = '/mm3/tmp/' + folder;
var filename = path + "/out"+ counts + ".png";
mkdirp( path, function() {
fs.writeFile(filename, result, 'base64', function(error){
if (error) {
console.log('error saving photo');
}else{
console.log('photo saved');
counts ++;
callback(counts);
}
});
});
}
I think this is the problem:
app.post('back-end/savephoto', function (req, res) {
// skipped some lines
});
change it to
app.post('/back-end/savephoto', function (req, res) {
// skipped some lines
});
In Angular, the below:
$http.post('back-end/savephoto......
Becomes:
$http.post('/back-end/savephoto.....
In Node, the below:
app.post('back-end/savephoto.....
Becomes:
app.post('back-end/savephoto....
Then, you need to add a console.log under the Node route to see if it even is executed. This will narrow it down. Also, you can remove the $http.post call outside of the timeout to eliminate the obvious.
Let me know how you get on.
Shayan
Related
I am trying to build a file of json data from repeated calls to a restAPI. The final file to be written is the sum of the data received from all the calls. At present the file is being written with contents of the first call then overwritten by the contents of the first + second call (see console output below code).
As I have to make many calls, once the code is working, I would like to only write the file once the request has finished and the json string has been built. Does anyone now how I would go about doing this? Maybe with a callback(?), which I still don't have the hang of, once the requests have finished or the json string has finished being built.
"use strict";
const fs = require('fs');
const request = require('request');
var parse = require('csv-parse');
const path = "../path tocsv.csv";
const pathJSON = "../pathtoJSON.json";
var shapes = "https://url";
var options = {
url: '',
method: 'GET',
accept: "application/json",
json: true,
};
var csvData = [];
var jsonData = "[";
fs.createReadStream(path)
.pipe(parse({delimiter: ','}))
.on('data', function(data) {
csvData.push(data[1]);
})
.on('end',function() {
var start = Date.now();
var records = csvData.length //2212 objects
console.log(records);
var dataLength = 2 //set low at moment
for (var i = 0; i < dataLength; i += 1) {
var url = shapes + csvData[i];
options.url = url; //set url query
request(options, function(error, response, body) {
var time = Date.now() - start;
var s = JSON.stringify(body.response);
console.log( '\n' + (Buffer.byteLength(s)/1000).toFixed(2)+
" kilobytes downloaded in: " + (time/1000) + " sec");
console.log(i)
buildJSON(s);
});
}
function buildJSON(s) {
var newStr = s.substring(1, s .length-1);
jsonData += newStr + ',';
writeFile(jsonData);
}
function writeFile(jsonData) {
fs.writeFile(pathJSON, jsonData, function(err) {
if (err) {
return console.log(err);
} else {
console.log("file complete")
}
});
}
});
128.13 kilobytes downloaded in: 2.796 sec
2
file complete
256.21 kilobytes downloaded in: 3.167 sec
2
file complete
Perhaps writing to the file after all requests are complete will help. In the current code, the writeFile function is called each time a request is completed (which overwrites the file each time)
A quick way to fix this is to count requests (and failures) and write to file only after all the requests are complete.
"use strict";
const fs = require('fs');
const request = require('request');
var parse = require('csv-parse');
const path = "../path tocsv.csv";
const pathJSON = "../pathtoJSON.json";
var shapes = "https://url";
var options = {
url: '',
method: 'GET',
accept: "application/json",
json: true,
};
var csvData = [];
var jsonData = "[";
fs.createReadStream(path)
.pipe(parse({
delimiter: ','
}))
.on('data', function (data) {
csvData.push(data[1]);
})
.on('end', function () {
var start = Date.now();
var records = csvData.length //2212 objects
console.log(records);
var dataLength = 2 //set low at moment
var jsonsDownloaded = 0; // Counter to track complete JSON requests
var jsonsFailed = 0; // Counter to handle failed JSON requests
for (var i = 0; i < dataLength; i += 1) {
var url = shapes + csvData[i];
options.url = url; //set url query
request(options, function (error, response, body) {
if(error){
jsonsFailed++;
writeFile(jsonData);
return;
}
jsonsDownloaded++;
var time = Date.now() - start;
var s = JSON.stringify(body.response);
console.log('\n' + (Buffer.byteLength(s) / 1000).toFixed(2) +
" kilobytes downloaded in: " + (time / 1000) + " sec");
console.log(i)
buildJSON(s);
});
}
function buildJSON(s) {
var newStr = s.substring(1, s.length - 1);
jsonData += newStr + ',';
writeFile(jsonData);
}
function writeFile(jsonData) {
if(dataLength - (jsonsDownloaded + jsonsFailed) > 0){
return;
}
fs.writeFile(pathJSON, jsonData, function (err) {
if (err) {
return console.log(err);
} else {
console.log("file complete")
}
});
}
});
Note:
Requests being fired in quick succession like (2000 requests in a for loop) in my experience does not work well.. Try batching them. Also, doing it this way does not guarantee order (if that is important in your usecase)
An alternative would be to open your file in append mode. You can do this by passing an extra options object with flag set to your fs.writeFile call.
fs.writeFile(pathJSON, jsonData, {
flag: 'a'
}, function (err) {
if (err) {
return console.log(err);
}
});
References:
fs.writeFile Docs
File system flags
I have a file which is tab separated. It has thousands of data. How can I use nodeJs to read the file, line by line, parse them and create an object and insert them in a mongo DB.
I am just learning node and mongo. I come from different background. So how can this be done.
Finally the Mongo DB has to be populated with proper data.
I searched in net but I could not find the complete solution.
Thanks.
I had an issue with the answer by Juvenik. My problem was that the database would not be populated by the time readline had completed. The lines were being read synchronously, but the DB insertion was asynchronous.
Instead, I found a simpler solution with the line-reader package. It reads the lines and waits for a callback before continuing.
var MongoClient = require('mongodb').MongoClient
var dbName = 'yourDbName'
var url = 'mongodb://localhost:27017/' + dbName
var collectionName = 'yourCollectionName'
var filename = 'yourFileName.txt'
var printLine = 1000
MongoClient.connect(url, function(err, db) {
if (err) {
console.error('Problem connecting to database')
} else {
console.log('Connected correctly to server.')
var lineReader = require('line-reader')
var collection = db.collection(collectionName)
var lineNum = -1
var headers = []
lineReader.eachLine(filename, function(line, last, cb) {
lineNum++
try {
var split = line.split('\t')
var object = {}
if (lineNum > 0) {
for (var i = 0; i < split.length; i += 1) {
object[headers[i]] = split[i]
}
collection.insert(object, function (insertErr, insertObj) {
if (insertErr) console.error(insertErr)
if (lineNum % printLine === 0) console.log('Line ' + lineNum)
if (last) {
console.log('Done with ' + filename + ' (' + lineNum + ' records)')
process.exit(0)
} else {
cb()
}
})
} else {
headers = line.split('\t')
cb()
}
} catch (lineError) {
console.error(lineError)
}
})
}
})
I came across similar problem. This approach worked for me.
Have a look, it might be helpful.
var mongoDb = require('mongodb');
var mongoClient = mongoDb.MongoClient;
var dbname = 'YOUR_DB_NAME';
var collectionName = 'YOUR_COLLECTION_NAME';
var url = 'mongodb://localhost:27017/'+dbname;
var filename = 'FIle_Name.txt';
console.log('***************Process started');
mongoClient.connect(url,function(err,db){
if(err){
console.log('error on connection '+err);
}
else{
console.log('***************Successfully connected to mongodb');
var collection = db.collection(collectionName);
var fs = require('fs');
var readline = require('readline');
var stream = require('stream');
var instream = fs.createReadStream(filename);
var outstream = new stream;
var rl = readline.createInterface(instream,outstream);
console.log('***************Parsing, please wait ...');
rl.on('line',function(line){
try{
var arr = line.split('\t');
var object = {};
//Parse them here
//Example
object['name'] = arr[0]; //Just an example
var res = collection.insert(object);
}
catch (err){
console.log(err);
}
});
rl.on('close',function(){
db.close();
console.log('***************completed');
});
}
});
I am a learner too. If someone can make it better, it will be good.
Here is a more performant (inserting batches of objects) and updated version (using async and latest mongo driver) of frank-0's answer
const lineReader = require('line-reader');
async function readFileAndInsertInMongo(file) {
let total = 0;
return new Promise((resolve, reject) => {
let buffer = [];
lineReader.eachLine(file, (line, last, cb) => {
// prepare your object based on the line content
let insertObject = {'some_content': 'some_value'};
if (total % 10000 === 0 || last) {
collection.insertMany(buffer, function(err, res){
if (last) {
if (err) {
reject(err);
} else {
resolve(res);
}
} else {
buffer = [];
return cb();
}
});
} else {
buffer.push(insertObject);
return cb();
}
});
});
}
This really is the best solution I have found to parse huge files and insert them in the database without exploding Node's memory. Hope this can help ;)
For my project I have a server.js that calls a helper function place-search.js as shown below.
var express = require('express');
var server = express.Router();
var placeSearch = require("./helpers/place-search");
var obj = "hello";
server.use(function(req, res, next) {
console.log(req.method, req.url);
next();
});
server.post('/', function(req, res) {
/* get the object passed by the client's post request */
obj = req.body;
//console.log("Obj: " + obj);
/* send the confirmation back to the client */
res.status(200).send("body");
placeSearch.placeSearch(obj);
});
module.exports.server = server;
Here is my place-search.js :
var config = require("./config.js");
var Promise = require('bluebird');
var DistanceMatrix = require("./distance-matrix.js");
var GooglePlaces = Promise.promisifyAll(require("googleplaces"));
var googlePlaces = new GooglePlaces(config.apiKey, config.outputFormat);
var extract = require('./extract.js');
var combination = require('./combination_ver2.js');
var permutation = require('./permutation.js');
function placeSearch(obj) {
console.log("Inside place search!");
/**
* Place search - https://developers.google.com/places/documentation/#PlaceSearchRequests
*/
var arr = [];
var count = 0;
var rad = obj["radius"];
console.log("radius: " + rad);
var loc = obj["location"];
console.log("Location: " + loc);
var mode = obj["mode"];
var params = obj["params"];
/* client's keywords */
var arr;
var ar = [];
for (var i = 0; i < params; i++) {
arr[i] = obj[i];
console.log(arr[i]);
var param = {
location: loc,
radius: rad,
mode: mode,
keyword: arr[i]
};
ar.push(param);
}
console.log("before promises");
var promises = ar.map(function(name) {
return googlePlaces.placeSearch(name, function(response) {
arr.push(response);
console.log(response);
console.log(count++);
//If all responses have been returned
//Find combos and pass to distance-matrix
if (count == ar.length) {
var Matrix = new Array();
var result = new Array();
//to extract only lat and lng from arr.results
//Matrix = extract.extract(arr);
result = combination.combination(arr);
// NOW RESULT IS THE ARRAY OF ALL COMBINATION
// NOW RESULT IS THE ARRAY OF COMBINATIONS OF latlng pairs AND PASS IT TO FRONTEND
/*result.forEach(function(combo, index) {
console.log("combo" + combo)
DistanceMatrix.distanceMatrix(mode, combo, result.length);
});*/
// IF YOU WANT TO SEE PERMUTATION
//permutation.permutation(result);
console.log("combination results: " + result);
}
})
});
}
module.exports.placeSearch = placeSearch;
My problem is I do not know how to pass the result variable back to the server.js so that I can use that result as an input for another helper function. I can not for the life of me figure out how to do this. Any help at all would be greatly appreciated.
Well, I don't see your placeSearch function returning anything at all right now, nor doing any kind of callback. Your placeSearch function should expose a callback parameter, which then gets called once you have the answer you want to send back.
Your server file will then take action on that callback. Abbreviating your code, it'd look something like this:
server.post('/', function(req, res) {
/* get the object passed by the client's post request */
obj = req.body;
//console.log("Obj: " + obj);
placeSearch.placeSearch(obj, function(error, data){
/* send the data back to the client */
res.status(200).send(data);
});
});
To support that, your placeSearch function will have to call its callback when appropriate:
function placeSearch(obj, callback){
/* all the stuff you do to assemble your data */
// if (there_is_some_error):
if(err) return cb(err);
// when you have all your data available, no error has occurred
return cb(null, data);
}
Something else you might notice is that your ar.map won't work as you seem to expect. ar.map is a synchronous function, you're calling async code inside... not gonna work the way you think. It's a bit long for this post, but you should look at the async library from npm to manage an array of asynchronous requests to collect one combined result.
use callback your code looks like this:
function placeSearch(obj,callback) {
console.log("Inside place search!");
/**
* Place search - https://developers.google.com/places/documentation/#PlaceSearchRequests
*/
var arr = [];
var count = 0;
var rad = obj["radius"];
console.log("radius: " + rad);
var loc = obj["location"];
console.log("Location: " + loc);
var mode = obj["mode"];
var params = obj["params"];
/* client's keywords */
var arr;
var ar = [];
for (var i = 0; i < params; i++) {
arr[i] = obj[i];
console.log(arr[i]);
var param = {
location: loc,
radius: rad,
mode: mode,
keyword: arr[i]
};
ar.push(param);
}
console.log("before promises");
var promises = ar.map(function(name) {
return googlePlaces.placeSearch(name, function(response) {
arr.push(response);
console.log(response);
console.log(count++);
//If all responses have been returned
//Find combos and pass to distance-matrix
if (count == ar.length) {
var Matrix = new Array();
var result = new Array();
//to extract only lat and lng from arr.results
//Matrix = extract.extract(arr);
result = combination.combination(arr);
// NOW RESULT IS THE ARRAY OF ALL COMBINATION
// NOW RESULT IS THE ARRAY OF COMBINATIONS OF latlng pairs AND PASS IT TO FRONTEND
/*result.forEach(function(combo, index) {
console.log("combo" + combo)
DistanceMatrix.distanceMatrix(mode, combo, result.length);
});*/
// IF YOU WANT TO SEE PERMUTATION
//permutation.permutation(result);
console.log("combination results: " + result);
callback(null,result);
}
})
});
}
in server.js:
server.post('/', function(req, res) {
/* get the object passed by the client's post request */
obj = req.body;
//console.log("Obj: " + obj);
/* send the confirmation back to the client */
res.status(200).send("body");
placeSearch.placeSearch(obj,function(err,result){
if(!err){
console.log(result);
}
})
});
It seems like you're having trouble with the async operation. You'll want to return the promise from your place-search module. You'll also need to convert the callbacks from placeSearch into a promise.
EDIT: updated since googlePlaces.placeSearch doesn't return a promise
inside placeSearch
function placeSearch(obj) {
//...
var promises = ar.map(function(name) {
var placeDefer = Q.defer();
return googlePlaces.placeSearch(name, function(response) {
placeDefer.resolve(response); // or placeDefer.reject if a failure occurs
});
return placeDefer.promise;
});
return promises;
}
and in your route:
// I'm going to just assume Q promise library here
var Q = require("q");
server.post('/', function(req, res) {
/* get the object passed by the client's post request */
obj = req.body;
//console.log("Obj: " + obj);
/* send the confirmation back to the client */
res.status(200).send("body");
Q.all(placeSearch.placeSearch(obj))
.spread(function() {
var places = Array.prototype.slice.call(arguments);
// possibly call res.status(200).send("body"); here?
// only if you're trying to use the places in your response
});
});
I'm trying to import a file using javascript, and every time I run, it flashes the same error. I already tried redownloading the file system, and I just downloaded requirejs.
var importFile = function() {
console.log("started import");
var fs = require(['fs']);
console.log("required!");
fs.exists('articles.txt', function(exists) {
if(exists) console.log("found file");
});
fs.readFile('articles.txt', function(err, data) {
if(err) {
throw err;
console.log("error thrown");
}
var rawFileData = data.toString().split("\n");
for(i in rawFileData) {
console.log(articles[i]);
}
});
for(var i = 0; i< rawFileData.length; i+=4) {
var title = rawDataFiles[i];
var tags = rawDataFiles[i+1].split(",");
var content = rawDataFiles[i+2];
var date = rawDataFiles[i+3];
articles.append(new Article(title, tags, content, date));
}
}
I solved this using jQuery:
var importFile = function() {
console.log("started import");
var file = "/path/to/file.txt";
console.log(file);
$.get(file, function(data) {
var lines = data.split("\n");
var id = 0;
$.each(lines, function(n, elem) {
parseLine(elem, id);
id++;
});
console.log("done parsing.");
}, "text");
console.log("done getting");
};
though it never prints "done getting". I don't know why.
If fs hasn't loaded before and require is really requirejs, you have to have it in a callback.
require(['fs'], function(fs) { ... });
I suspect you are using node and its node's require. You should just drop the square brackets.
var fs = require('fs');
I'm pulling text from N urls. First I get the N urls in linksOnPage and then i run a doOnPage function to get the text from each url. When i run code only 1 of the N urls gets processed through the function. I assume it's because the processing function is running asynchronously. How do I stack these up in a queue and run them all/ whats a better way to do this?
Here's the main JS code:
var nodeio, linksOnPage, lyricsFromLink, db;
nodeio = require('node.io');
db = require('./db');
db.loadDB();
var loadSong = function(artist, title, lyrics){
console.log("loadSong being called");
var newSongObj = {};
newSongObj['artist'] = artist;
newSongObj['title'] = title;
newSongObj['lyrics'] = lyrics;
//store the lyrics in a mongo table
var newSong = new db.Song(newSongObj);
newSong.save(function(err) {
if(err){
throw err;
} else{
console.log("saved with no errors!");
}
});
};
// generic utility for getting links on a page and running a function on each one
exports.linksOnPage = function(pageObj, linkSelector, doOnPage, contentSelector) {
nodeio.scrape(function(){
this.getHtml(pageObj.pageUrl, function(err, $) {
var links = [];
var i = 0;
$(linkSelector).each(function(link) {
var fullLink = pageObj.rootUrl + link.attribs.href
links.push(fullLink);
//run a function on each link
console.log('getting lyrics for song: ', i);
doOnPage(pageObj.artist, fullLink, contentSelector);
i = i+1;
});
//this.emit(links);
});
});
}
// get the lyrics for a specific song
exports.lyricsFromLink = function(artist, pageUrl, lyricsSelector) {
nodeio.scrape(function(){
this.getHtml(pageUrl, function(err, $) {
var lyrics = "";
console.log('before each statement');
$(lyricsSelector).each(function(lyricParagraph) {
lyrics = lyrics + " " + lyricParagraph.text;
});
console.log('after each statement');
loadSong(artist, pageUrl, lyrics);
this.emit(lyrics)
});
});
}