const fs = require('fs');
const express = require('express');
const app = express();
app.use(express.json());
app.get('/submit', (req, res) => {
let Com_Title = req.query.ComTitle;
let Com_Text = req.query.ComText;
let data = {
Title: Com_Title,
Text: Com_Text,
}
console.log(data);
let jsonData = JSON.stringify(data);
// fs.writeFileSync('notes.json', dataJSON)
// let MyData = JSON.parse(jsonData);
fs.appendFileSync('ComplaintFile.json', jsonData, err => {
if (err) {
console.log(err);
res.sendStatus(404).end();
}
console.log('Data Added');
res.send('Added');
})
});
let port = 8080;
app.listen(port, () => {
console.log("Listening to 8080");
})
{
"Title": "Canteen Issues",
"Text": "A paragraph"
}{
"Title": "Canteen ",
"Text": "Topic sentences are similar "
}
I have issues on saving data in JSON. Actually is saving the data on JSON file but it have comma issues like if I add one or more data that next data storing without the comma.
Anyone faced this issue?
Using appendfilesync is not appropriate for storing JSON data like that because JSON is structured data and appendfilesync will just add data to the end of the file. You need to:
Read the data that is already in the file.
Parse the data as JSON.
Add the new data to the JSON object/array.
Stringify the JSON data.
Save the data to the file.
In this example the initial file would look like this:
[{"Title":"Title","Text":"Content"}]
So, it is an array. And you can then add objects to that array.
var fs = require("fs");
fs.readFile("data.json", function(err, buf) {
let dataStr = buf.toString();
let dataObj = JSON.parse(dataStr);
let newData = {Title: "Title", Text: "Content"};
dataObj.push(newData); // assuming that this is an array
let newDataStr = JSON.stringify(dataObj);
fs.writeFile("data.json", newDataStr, (err) => {
if (err) console.log(err);
console.log("Successfully Written to File.");
});
});
You are adding data correctly, you just need to get data from file if exist then append new data with previous data.
Related
I get a Syntax Error: "Unexpected end of JSON input at JSON.parse () at IncomingMessage.". I don't know why I am getting that error.
Here is my code:
const express = require("express");
const https = require("http");
const app = express();
app.get("/", function(req, res){
const url = "http://api.openweathermap.org/data/2.5/forecast?q=Accra&id=524901&appid=18a15324259a5d6f30f8e6610f8a4310";
https.get(url, function(response){
response.on("data", function(data){
const weatherData = JSON.parse(data);
console.log(weatherData);
// console.log(data);
});
});
});
Here you are trying to get the data before confirming wether it's completely received or not.
instead of reading at the on('data'), wait for on('end')
something like this :
body = [];
req.on('data', (chunk) => {
// Storing the chunk data
body.push(chunk);
});
req.on('end', () => {
// combining and parsing the chunk data to normal data
const parsedBody = Buffer.concat(body).toString();
const message = parsedBody.split('=')[1];
// Now you can parse the **message** to find the data
console.log(JSON.parse(message))
});
This will collect and parse the data once after collecting all the chunks.
I'm using n-readline npm package to read text files from the server.
I have a use case, where I had to read at least 300MB of data from those text files and will be using Redis or Mongoose to save around 40mb of data to be transferred to the browser where I use vue to process that or load data whenever needed.
Now I'm running into a problem where I'm unable to get the results when I call that function for the first time, if I call that again, the function provides me with results.
const express = require('express');
const multer = require('multer');
const fs = require('fs');
const NreadLine = require('n-readline');
const app = express();
var asyncres = [];
var lines = []
const upload = multer({ storage });
async function nreadline(){
let linenumbers = [];
var rl = new NreadLine({
filepath: './uploads/6789765/serverout1.txt',
limit: 50
});
await rl.start();
await rl.on('line', (line, linenumer) => {
linenumbers.push(linenumer);
});
await rl.on('end', () => {
console.log('done');
asyncres = [...linenumbers];
});
//This is where I'm expecting the results to be returned
return asyncres;
}
function getresults() {
let ress = nreadline();
console.log(ress);
return ress;
}
// Express APIs defined below
app.post('/upload', upload.single('file'), (req, res) => {
res.json({ "status": "success", file: req.file });
});
// API for multiple form upload
app.post('/multiple', upload.array('files'), (req, res) => {
// ticket = req.body.ticket
res.json({ "status": "success", files: req.files });
});
//Get Request for reading the files
app.get('/reader', (req, res) => {
let results = getresults();
res.json({ "status": results });
});
app.listen(port, () => {
console.log("running on 3344 port");
});
Please Help. I am not sure what I'm doing wrong.
To answer my own question, I found another npm package, that kinda solves my problem.
The package name is n-readlines and below the code that I modified.
const lineByLine = require('n-readlines');
function nreadline(filePath){
let logs = [];
try{
let rl = new lineByLine('./uploads/6789765/serverout0.txt');
let line;
while(line=rl.next()){
logs.push(line.toString('utf8'));
}
return logs;
}catch(err){
}
So the file uploaded is an excel file that sheetJS needs to read, otherwise it will show as {}.
app.post('/sendExcel', function(req, res) {
let data = req.body;
var workbook = sheetJS.read(data, {type: 'buffer'});
console.log(workbook.Sheets['Sheet1); //prints... "{ A1: { t: 's', v: '[object Object]' }, '!ref': 'A1' }"
let excel = workbook.Sheets['Sheet1']['A1']['v'][0]; //prints... "["
So I've tried various things including changing the type client side as I had problems with it being of type buffer. So now it works partially, but I still can't access the data in the sheet.
As an example, I used the file path instead here, and it's shown to work as normal.
app.get('/excel', function(err, res, data) {
var wb = sheetJS.readFile("data.xlsx");
let excel = wb.Sheets['Sheet1']['A1']['v'];
console.log(excel); //this prints "vehicle", which is what is supposed to happen, not "[".
res.send(excel)
});
I am supposed to get the excel data from the form upload. That's the issue. It is is now successful when sending to the db, but will not access the whole data. I believe I need to change it back to an array.
You can use:
var fileReader = new FileReader();
fileReader.readAsArrayBuffer(workbook);
But this will not run in app.js
Here is my other answer with client-side and server-side. It might be helpful to others.
Javascript Read Excel file on server with SheetJS
Don't use the file reader. Append the excel sheet to the form in the body normally.
Client side:
let excelInput = document.getElementById("fileToUpload");
//excelInput: this html element allows you to upload the excel sheet to it
let excelFile = excelInput.files[0];
let form = new FormData();
form.append("excel", excelFile);
fetch('/sendExcel', {method: "POST", body: form})
.then((data) => {
console.log('Success:', data);
})
.catch((error) => {
console.error('Error:', error);
});
Then use formidable server side.
Server side:
const sheetJS = require('xlsx');
const formidable = require('formidable');
app.post('/excel', function(req, res) {
let data = req.body;
const form = formidable({ multiples: true });
form.parse(req, (err, fields, files, next) => {
if (err) {
next(err);
return;
}
var f = files[Object.keys(files)[0]];
var workbook = sheetJS.readFile(f.path);
res.send(workbook);
});
});
So formidable has to be used otherwise it won't work. Then you can use sheetJS.readFile instead of sheetJS.read.
I was just wondering how to add(push) data into an array via an express router endpoint. Suppose I have an array inside data/data.js directory and my router code look this:
const express = require('express');
const bodyParser = require('body-parser');
const productRouter = express.Router();
//! bring data
const { products } = require('../data/data');
productRouter.use(bodyParser.json());
productRouter
.route('/')
.post((req, res, next) => {
if (req.body !== null) {
//some logic
} else {
products.push(req.body);
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.json(products);
}
} else {
//handle error
}
});
module.exports = productRouter;
When I involve POST method in the endpoint of my route then it push the new data and response with an updated array. But when I check my express file then it still the old array. Why I lost the data?
I heartily thank if anyone helps me to figure out this.
As #Aditya toke and #Andre Nuechter suggested I update my code like this:
.post((req, res, next) => {
if (req.body !== null) {
if (products.some((product) => product.id === req.body.id)) {
err = new Error('Product with id:' + req.body.id + ' already exit');
err.status = 404;
return next(err);
} else {
const data_path = path.join(__dirname, '../data/data.js');
fs.appendFileSync(data_path, JSON.stringify(req.body));
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.json(products);
}
} else {
err = new Error('Body didnot contain product information');
err.status = 404;
return next(err);
}
});
But it pushes the new data like this:
exports.products = [
{
title: 'camera',
imageUrl: 'https://source.unsplash.com/gGgoDJRD2WM/300x300',
id: 1,
},
...
]
exports.orders = [];
{"title":"mens","imageUrl":"https://source.unsplash.com/1-nx1QR5dTE/300x300","id":"6"}
Which is not what I want. Is there any way to add this to the products array? Or any better approach?
As you mention about the saving data in the file,
that is only possible by using the filesystem to write data in the file,
I would recommend using the JSON extension file, as it's easy to parse and read.
You have to use the filesystem to write data in the file.
And to add further it can be utilised as a global variable all over the project.
There are multiple ways to play with the filesystem using node js.
https://nodejs.org/api/fs.html
Updated The Answer
#falamiw Follow these steps
1. Don't use data.js start using data.json
Structure inside data.json will be like this
{
products : [
{"title":"mens","imageUrl":"https://source.unsplash.com/1-nx1QR5dTE/300x300","id":"6"},
{"title":"women","imageUrl":"https://source.unsplash.com/1-nx1QR5dTE/300x300","id":"7"}
]
}
Code to make changes in this JSON file
const fs = require('fs');
let rawdata = fs.readFileSync('data.json');
let productArray = JSON.parse(rawdata);
// push changes to your array
productArray ['product'].push({ any thing you want to add })
After pushing object inside array now we will make changes in the data.json file using fileSystem.
let data = JSON.stringify(productArray );
fs.writeFileSync('data.json', data);
That's it now you can see the changes in the file.
I have not tested this code but I am sure it will work you just to debug and check its working.
The data is lost, because push does not alter the file on your disk.
To do that you need to use something like fs.writeFile.
I'm writing an application that scrapes fan sites for characters as a practice exercise. Currently I have an array of URLs that I am looping through and scraping the data I want, then outputting this data to a output.json file to store for later. I am having issues with my formatting when writing to this file.
Maybe I should store my data differently, I am open to suggestions on best practices/other methods. I would just like this data accessible later.
server.js
var express = require('express');
var cheerio = require('cheerio');
var app = express();
var rp = require('request-promise');
var fsp = require('fs-promise');
app.get('/', function(req, res){
urls = [
'fansite.com/boss1', 'fansite.com/boss2'
];
function parse(html) {
var bosses = require('./output.json');
var $ = cheerio.load(html);
$('.page-header__title').filter(function () {
var data = $(this);
name = data.text();
bosses.name = name;
})
return bosses;
}
var append = file => content => fsp.appendFile(file, JSON.stringify(content, null, 2));
urls.forEach(function (url) {
rp(url)
.then(parse)
.then(append('output.json'))
.then(() => console.log('Success'))
.then(res.send('Bosses Updated.'))
.catch(err => console.log('Error:', err));
});
})
app.listen('8081')
console.log('Running on port 8081');
exports = module.exports = app;
output.json
{
}{
"name": "Boss1"
}{
"name": "Boss2"
}
You're better off just modifying the in-memory javascript object, and then saving it all to the file in an overwrite / replace kind of approach, rather than appending to the file (unless you expect the file to become so huge that it breaks memory limits).
To do that, just maintain an in-memory copy of the data and then just write it out: fs.writeFile(fileName, JSON.stringify(content, null, 4));
Otherwise, you have to figure out how to insert the new object inside the old one, or risk making it invalid json.