download csv to browser downloads node.js - javascript

I have this code:
fs = require("fs");
var downloadData = "select * from PRODUCTS"
ibmdb.open(req.session.ibmdbconnDash, function (err, conn) {
if (err) return console.log(err);
conn.query(downloadData, function (err, rows) {
if (err) {
console.log(err);
}
const ws = fs.createWriteStream("productsDownloaded.csv");
const jsonData = JSON.parse(JSON.stringify(rows));
console.log("jsonData", jsonData);
fastcsv
.write(jsonData, { headers: true })
.on("finish", function() {
console.log("Write to productsDownloaded.csv successfully!");
})
.pipe(ws);
var value = true;
res.render("edit-products", {
page_title: "edit-products",
data: rows,
userName: req.session.username,
FN: req.session.firstname,
LN: req.session.lastname,
CO: req.session.company,
value: value,
});
conn.close(function () {
console.log("closing function p1 of delete product");
});
});
});
however, it downloads but it doesn't go anywhere. So when im testing locally it goes into my vs code directory, but i want it to download by the browser, so do i have to send something to the front end? Like if i press the download button and trigger this function, it should download to the users directory. how can i achieve this?

Related

zip and download folders to local using nodejs

How to zip and download folder from D:/downloads path. As a 1st step, I was able to create a folder inside 'downloads' with dummy content. As a next step I want to zip and download that folder.
async downloadFolder(selectedProduct) {
try {
let completeZip = await this.jobService.zipBlobs(selectedProduct.path, this.role).toPromise();
if(completeZip['status']=='success'){
let download = await this.jobService.downloadBlobs(selectedProduct.path, this.role).toPromise();
console.log(download)
}
} catch (error) {
console.log(error);
}
}
API:
Once file is written , I want to zip that folder and download that folder to local but nothing happens
exports.zipBlobs = async function (req, res) {
var userrole = req.body.userrole;
var path = req.body.path;
fileUploadPath="d:/downloads";
blobService.listBlobsSegmentedWithPrefix(containerName, path, null, (err, data) => {
if (err) {
reject(err);
} else {
data.entries.forEach(entry => {
console.log(entry.name);//'155ce0e4-d763-4153-909a-407dc4e328d0/63690689-e183-46ae-abbe-bb4ba5507f1a_MULTI_0_3/output/res2/res2.fcs';
if (fs.existsSync(fileUploadPath)) {
var sourceFilePath = fileUploadPath +'/'+entry.name ;
if (!fs.existsSync(sourceFilePath)) {
fs.mkdir(require('path').dirname(sourceFilePath), { recursive: true }, (err) => {
if (err) {
console.log("Failed :" + err);
}
else{
console.log('folder created,create file');
const fstream = fs.createWriteStream(sourceFilePath);
fstream.write('fileContent');
fstream.end();
fstream.on("finish", f => {
console.log('finish',f) ;
});
fstream.on("error", e => {
console.log('error',e);
});
}
});
}else{
console.log('folders already exists,create file');
const fstream = fs.createWriteStream(sourceFilePath);
fstream.write('fileContent');
fstream.end();
fstream.on("finish", f => {
console.log('finish',f) ;
});
fstream.on("error", e => {
console.log('error',e);
});
}
}else{
console.log('downloads folder does not exists!')
}
});
}
});
}
API to zip and download folder :
exports.downloadFolders = async function (req, res) {
var userrole = req.body.userrole;
var path = req.body.path;
try {
const folderpath = 'D:\downloads\622b6a148a813f18b8b2de81';
require('child_process').execSync(`zip -r archive *`, {
cwd: folderpath
});
// does not create zip, neither downloads
res.download(folderpath + '/archive.zip');
return;
}catch(error_1) {
res.status(200).json({
status: error_1
});
return;
}
}
In Javascript strings, backslashes must be doubled:
const folderpath = 'D:\\downloads\\622b6a148a813f18b8b2de81';
Without doubling them, you effectively get
const folderpath = 'D:downloads22b6a148a813f18b8b2de81'
because '\d' === 'd' and '\6' is a non-printable character.
You can also write the result of zip to the standard output and pipe it into the response object:
res.set("Content-Disposition", "attachment;filename=archive.zip");
require("child_process").exec("zip -r - *", {
cwd: folderpath
}).stdout.pipe(res);
This is something I used in one of my projects where I needed the whole directory downloaded as zip:
require the following library:
const zipdir = require('zip-dir')
Then, when you need to download the zip, call it as follows:
zipdir(
'D:/downloads/622b6a148a813f18b8b2de81',
{ saveTo: 'D:/downloads/622b6a148a813f18b8b2de81/archive.zip' },
(err, buffer) => {
if (err) throw err;
console.log('New zip file created!');
}
);
Following is the API signature:
app.get('/api/zip', function (req, res) {
//create new zip
zipdir(
'D:/downloads/622b6a148a813f18b8b2de81',
{ saveTo: 'D:/downloads/622b6a148a813f18b8b2de81/archive.zip' },
(err, buffer) => {
if (err) throw err;
console.log('New zip file created!');
res.download('D:/downloads/622b6a148a813f18b8b2de81/archive.zip');
}
);
});

Node File upload issue

I am constantly getting the printed out message of "No File Upload" Failed when I select my image and hit upload. It never goes to true..
As you can see, I am not actually uploading here. Just testing the req.files is there something wrong in my router.post? Any input would be appreciated.
router.post('/upload', async (req, res) => {
try {
if(!req.files) {
res.send({
status: false,
message: 'No file uploaded'
});
} else {
res.send({
status: true,
message: 'Files are uploaded',
data: data
});
}
} catch (err) {
res.status(500).send(err);
}
})
module.exports=router
can you share your controller file of it and also you are using async function but not defining await inside of that function,must have to use 'await' when executing async function.
here is the controller file AAmir
let fs = require('fs');
let async = require('async');
function uploaddownFiles(connection, fromFolder, toFolder, sftpmethod) {
return new Promise((resolve, reject) => {
// Getting all file list in given folder of local machine
let fileList = fs.readdirSync(fromFolder);
// filter only files not folders
fileList = fileList.filter(file => {
if (file.includes('.')) return true;
return false;
});
console.log('Total files: ', fileList.length)
if (!fileList.length) return reject('No file to send')
connection.sftp(function (err, sftp) {
if (err) return;
async.eachOfSeries(fileList, (file, key, cb) => {
let moveFrom = `${fromFolder}/${file}`;
let moveTo = `${toFolder}/${file}`;
if (sftpmethod=== 'put')
sftp.fastPut(moveFrom, moveTo, {}, function (uploadError) {
if (uploadError) return cb(uploadError);
console.log("Successfully Uploaded", file);
cb();
});
else if (sftpmethod === 'get')
sftp.fastGet(moveFrom, moveTo, {}, function (uploadError) {
if (uploadError) return cb(uploadError);
console.log("Successfully Downloaded", file);
cb();
});
}, function (err) {
if (err) {
console.log(err);
return reject(err);
} else {
console.log('all files have been uploaded/downloaded');
return resolve();
}
})
});
});
}

Processing a file then uploading it : nodejs / formidable

I'm using the npm package formidable to:
Check the content of a file
If the content satisfies the condition, the file upload can be done; otherwise, reject the request with an error message
Here's the layout of my code:
import Formidable from 'formidable';
const form = new Formidable({
uploadDir: 'myFolder',
allowEmptyFiles: false,
keepExtensions: true,
});
form.on('fileBegin', (formname, file) => {
// Changing the filename
});
let canUploadFile = false;
form.onPart = (part) => {
// Checking if the filename is mentioned in the file content
// If so, upload the file
// Else, reject and send a 400 HTTP response
const dmnFileName = path.basename(part?.filename ?? '', '.dmn');
part.on('data', (buffer) => {
// do whatever you want here
const bufferData = Buffer.from(buffer).toString();
const parser = new htmlparser2.Parser({
onopentag(name: any, attributes: any) {
// check is done here by setting canUploadFile
},
});
parser.write(bufferData);
parser.end();
});
};
form.parse(req, (err) => {
if (err) {
next(err);
return;
}
logger.info(`File is being parsed...`);
});
form.on('end', async function () {
if(canUploadFile) {
res.send({ message: 'File uploaded!' });
} else {
res.status(400).send({
message: 'The file cannot be uploaded',
});
}
}
WhenI execute this code, the file doesn't get uploaded, but when I remove the form.onPart, the file gets uploaded!
What am I missing?

Using node-thumbnail to generate thumbnail from image

I am trying to generate a thumbnail from image using node-thumbnail, the thumbnail is being uploaded to my container in azure storage but it looks like the original file not like a thumbnail. Here's my code, first I am uploading the original image, then reading it and generating a thumbnail from it, then uploading the thumbnail to container. What am I doing wrong? I couldn't find much resources online on how to do this, please help!
app.post('/upload', function(req, res) {
if (!req.files)
return res.status(400).send('No files were uploaded.');
// The name of the input field (i.e. "sampleFile") is used to retrieve the uploaded file
let sampleFile = req.files.sampleFile;
let name = sampleFile.name;
let data = sampleFile.data;
//var options = { contentSettings: { contentType: 'image/jpeg' } }
blobSvc.createBlockBlobFromText('test-container', name, data, function(error, result, response){
if (error){
return res.status(500).send(error);
} else {
console.log('Uploaded to container');
}
var info = blobSvc.getBlobToLocalFile ("test-container", name, name,
function (error, blockBlob, response) {
thumb({
source: name, // could be a filename: dest/path/image.jpg
destination: './',
concurrency: 4,
width: 100
}, function(files, err){
if (err) throw err;
console.log("resized");
//Delete the downloaded BIG one
//Upload the thumbnail
blobSvc.createBlockBlobFromLocalFile("test-container", files[0].dstPath, files[0].dstPath,
function (error, blockBlob, response) {
if (!error) {
console.log("thumbnail uploaded: " + name);
} else{
console.log(error);
}
});
});
});
});
This isn't really an Azure Storage issue, it's more of a node-thumbnail issue.
How about using Jimp:
var azure = require('azure-storage');
var Jimp = require("jimp");
var path = require('path');
// ...
var info = blobSvc.getBlobToLocalFile("test-container", name, name, function(error, blockBlob, response) {
if (!error) {
var dstName = path.parse(name).name + "_thumb" + path.parse(name).ext;
Jimp.read(name, function(err, image) {
if (err) throw err;
image.resize(100, Jimp.AUTO) // resize
.quality(60) // set JPEG quality
.write(dstName, function(err, ret) { // save
//Upload the thumbnail
blobSvc.createBlockBlobFromLocalFile("test-container", dstName, dstName, function(error, blockBlob, response) {
if (!error) {
console.log("thumbnail uploaded: " + dstName);
} else {
console.log(error);
}
});
});
});
}
});

Using Node.js to connect to a REST API

Is it sensible to use Node.js to write a stand alone app that will connect two REST API's?
One end will be a POS - Point of sale - system
The other will be a hosted eCommerce platform
There will be a minimal interface for configuration of the service. nothing more.
Yes, Node.js is perfectly suited to making calls to external APIs. Just like everything in Node, however, the functions for making these calls are based around events, which means doing things like buffering response data as opposed to receiving a single completed response.
For example:
// get walking directions from central park to the empire state building
var http = require("http");
url = "http://maps.googleapis.com/maps/api/directions/json?origin=Central Park&destination=Empire State Building&sensor=false&mode=walking";
// get is a simple wrapper for request()
// which sets the http method to GET
var request = http.get(url, function (response) {
// data is streamed in chunks from the server
// so we have to handle the "data" event
var buffer = "",
data,
route;
response.on("data", function (chunk) {
buffer += chunk;
});
response.on("end", function (err) {
// finished transferring data
// dump the raw data
console.log(buffer);
console.log("\n");
data = JSON.parse(buffer);
route = data.routes[0];
// extract the distance and time
console.log("Walking Distance: " + route.legs[0].distance.text);
console.log("Time: " + route.legs[0].duration.text);
});
});
It may make sense to find a simple wrapper library (or write your own) if you are going to be making a lot of these calls.
Sure. The node.js API contains methods to make HTTP requests:
http.request
http.get
I assume the app you're writing is a web app. You might want to use a framework like Express to remove some of the grunt work (see also this question on node.js web frameworks).
/*Below logics covered in below sample GET API
-DB connection created in class
-common function to execute the query
-logging through bunyan library*/
const { APIResponse} = require('./../commonFun/utils');
const createlog = require('./../lib/createlog');
var obj = new DB();
//Test API
routes.get('/testapi', (req, res) => {
res.status(201).json({ message: 'API microservices test' });
});
dbObj = new DB();
routes.get('/getStore', (req, res) => {
try {
//create DB instance
const store_id = req.body.storeID;
const promiseReturnwithResult = selectQueryData('tablename', whereField, dbObj.conn);
(promiseReturnwithResult).then((result) => {
APIResponse(200, 'Data fetched successfully', result).then((result) => {
res.send(result);
});
}).catch((err) => { console.log(err); throw err; })
} catch (err) {
console.log('Exception caught in getuser API', err);
const e = new Error();
if (err.errors && err.errors.length > 0) {
e.Error = 'Exception caught in getuser API';
e.message = err.errors[0].message;
e.code = 500;
res.status(404).send(APIResponse(e.code, e.message, e.Error));
createlog.writeErrorInLog(err);
}
}
});
//create connection
"use strict"
const mysql = require("mysql");
class DB {
constructor() {
this.conn = mysql.createConnection({
host: 'localhost',
user: 'root',
password: 'pass',
database: 'db_name'
});
}
connect() {
this.conn.connect(function (err) {
if (err) {
console.error("error connecting: " + err.stack);
return;
}
console.log("connected to DBB");
});
}
//End class
}
module.exports = DB
//queryTransaction.js File
selectQueryData= (table,where,db_conn)=>{
return new Promise(function(resolve,reject){
try{
db_conn.query(`SELECT * FROM ${table} WHERE id = ${where}`,function(err,result){
if(err){
reject(err);
}else{
resolve(result);
}
});
}catch(err){
console.log(err);
}
});
}
module.exports= {selectQueryData};
//utils.js file
APIResponse = async (status, msg, data = '',error=null) => {
try {
if (status) {
return { statusCode: status, message: msg, PayLoad: data,error:error }
}
} catch (err) {
console.log('Exception caught in getuser API', err);
}
}
module.exports={
logsSetting: {
name: "USER-API",
streams: [
{
level: 'error',
path: '' // log ERROR and above to a file
}
],
},APIResponse
}
//createlogs.js File
var bunyan = require('bunyan');
const dateFormat = require('dateformat');
const {logsSetting} = require('./../commonFun/utils');
module.exports.writeErrorInLog = (customError) => {
let logConfig = {...logsSetting};
console.log('reached in writeErrorInLog',customError)
const currentDate = dateFormat(new Date(), 'yyyy-mm-dd');
const path = logConfig.streams[0].path = `${__dirname}/../log/${currentDate}error.log`;
const log = bunyan.createLogger(logConfig);
log.error(customError);
}
A more easy and useful tool is just using an API like Unirest; URest is a package in NPM that is just too easy to use jus like
app.get('/any-route', function(req, res){
unirest.get("https://rest.url.to.consume/param1/paramN")
.header("Any-Key", "XXXXXXXXXXXXXXXXXX")
.header("Accept", "text/plain")
.end(function (result) {
res.render('name-of-the-page-according-to-your-engine', {
layout: 'some-layout-if-you-want',
markup: result.body.any-property,
});
});

Categories

Resources