Processing a file then uploading it : nodejs / formidable - javascript

I'm using the npm package formidable to:
Check the content of a file
If the content satisfies the condition, the file upload can be done; otherwise, reject the request with an error message
Here's the layout of my code:
import Formidable from 'formidable';
const form = new Formidable({
uploadDir: 'myFolder',
allowEmptyFiles: false,
keepExtensions: true,
});
form.on('fileBegin', (formname, file) => {
// Changing the filename
});
let canUploadFile = false;
form.onPart = (part) => {
// Checking if the filename is mentioned in the file content
// If so, upload the file
// Else, reject and send a 400 HTTP response
const dmnFileName = path.basename(part?.filename ?? '', '.dmn');
part.on('data', (buffer) => {
// do whatever you want here
const bufferData = Buffer.from(buffer).toString();
const parser = new htmlparser2.Parser({
onopentag(name: any, attributes: any) {
// check is done here by setting canUploadFile
},
});
parser.write(bufferData);
parser.end();
});
};
form.parse(req, (err) => {
if (err) {
next(err);
return;
}
logger.info(`File is being parsed...`);
});
form.on('end', async function () {
if(canUploadFile) {
res.send({ message: 'File uploaded!' });
} else {
res.status(400).send({
message: 'The file cannot be uploaded',
});
}
}
WhenI execute this code, the file doesn't get uploaded, but when I remove the form.onPart, the file gets uploaded!
What am I missing?

Related

download csv to browser downloads node.js

I have this code:
fs = require("fs");
var downloadData = "select * from PRODUCTS"
ibmdb.open(req.session.ibmdbconnDash, function (err, conn) {
if (err) return console.log(err);
conn.query(downloadData, function (err, rows) {
if (err) {
console.log(err);
}
const ws = fs.createWriteStream("productsDownloaded.csv");
const jsonData = JSON.parse(JSON.stringify(rows));
console.log("jsonData", jsonData);
fastcsv
.write(jsonData, { headers: true })
.on("finish", function() {
console.log("Write to productsDownloaded.csv successfully!");
})
.pipe(ws);
var value = true;
res.render("edit-products", {
page_title: "edit-products",
data: rows,
userName: req.session.username,
FN: req.session.firstname,
LN: req.session.lastname,
CO: req.session.company,
value: value,
});
conn.close(function () {
console.log("closing function p1 of delete product");
});
});
});
however, it downloads but it doesn't go anywhere. So when im testing locally it goes into my vs code directory, but i want it to download by the browser, so do i have to send something to the front end? Like if i press the download button and trigger this function, it should download to the users directory. how can i achieve this?

Node File upload issue

I am constantly getting the printed out message of "No File Upload" Failed when I select my image and hit upload. It never goes to true..
As you can see, I am not actually uploading here. Just testing the req.files is there something wrong in my router.post? Any input would be appreciated.
router.post('/upload', async (req, res) => {
try {
if(!req.files) {
res.send({
status: false,
message: 'No file uploaded'
});
} else {
res.send({
status: true,
message: 'Files are uploaded',
data: data
});
}
} catch (err) {
res.status(500).send(err);
}
})
module.exports=router
can you share your controller file of it and also you are using async function but not defining await inside of that function,must have to use 'await' when executing async function.
here is the controller file AAmir
let fs = require('fs');
let async = require('async');
function uploaddownFiles(connection, fromFolder, toFolder, sftpmethod) {
return new Promise((resolve, reject) => {
// Getting all file list in given folder of local machine
let fileList = fs.readdirSync(fromFolder);
// filter only files not folders
fileList = fileList.filter(file => {
if (file.includes('.')) return true;
return false;
});
console.log('Total files: ', fileList.length)
if (!fileList.length) return reject('No file to send')
connection.sftp(function (err, sftp) {
if (err) return;
async.eachOfSeries(fileList, (file, key, cb) => {
let moveFrom = `${fromFolder}/${file}`;
let moveTo = `${toFolder}/${file}`;
if (sftpmethod=== 'put')
sftp.fastPut(moveFrom, moveTo, {}, function (uploadError) {
if (uploadError) return cb(uploadError);
console.log("Successfully Uploaded", file);
cb();
});
else if (sftpmethod === 'get')
sftp.fastGet(moveFrom, moveTo, {}, function (uploadError) {
if (uploadError) return cb(uploadError);
console.log("Successfully Downloaded", file);
cb();
});
}, function (err) {
if (err) {
console.log(err);
return reject(err);
} else {
console.log('all files have been uploaded/downloaded');
return resolve();
}
})
});
});
}

Wait for function to finish to do something else node.js formidable

so my issue here is that the console.log() at the end of the code always prints before the parse for the formidable form is finish and i dont know how to avoid this
exports.scrape = (req,res) => {
let form = new formidable.IncomingForm();
form.keepExtensions = true;
form.parse(req, (err, fields, files) => {
if (err) {
return res.status(400).json({
error: 'Image could not be uploaded'
});
}
const { endpoint } = fields;
if (!endpoint ) {
return res.status(400).json({
error: 'All fields are required'
});
}
if (files.file) {
if (files.file.size > 1000000) {
return res.status(400).json({
error: 'Image should be less than 1mb in size'
});
}
var oldPath = files.file.path;
console.log(oldPath);
var newPath = path.join(__dirname, '../src/files/csv')
+ '/'+files.file.name
var rawData = fs.readFileSync(oldPath)
fs.writeFile(newPath, rawData, function(err){
if(err) console.log(err)
return console.log(newPath);
})
}
})
console.log("test!");
}
And then when the parse is complete make another action bc i need the file being created to later use it in another script that will be initialized when the parse is over, that would bere where the console.log("test!") is
Get a basic understanding of how callbacks work. Then you realize that putting console.log at the bottom is not chronologically the end.

send text data with formdata via Axios post

I am sending over a PDF file in formdata with an Axios post request. So that file will get uploaded/saved to a folder on the server. I'm using multer on the server to save the file. And that works great.
Now I also want to add some fields to the DB related to the file. One is the generated file name that gets generated right before the file is saved. So I don't want to make a round trip back to the client and then make another call out to the server to update the DB. So I want a send a few text strings along with the formdata. But no matter what I try, I cannot read any text data from that formdata object in my Node code. FYI I am using Express on my Node server.
Client side code that kicks off the upload process: (notice I am attempting to append additional fields to the formdata object)
const uploadFilesAsync = () => {
const data = new FormData();
const filenames = [];
uploadedFiles.forEach((f) => {
filenames.push(f.name);
data.append('file', f);
});
const fileInfo = {
customer: selectedCustomer,
load: selectedLoad,
filenames
};
data.append('customer', selectedCustomer);
data.append('load', selectedLoad);
data.append('filenames', filenames.toString());
// I also tried the following and then passing fileInfo in with data and setLoaded
const fileInfo = {customer: selectedCustomer, load: selectedLoad,
filenames: filenames.toString()};
uploadFiles(data, setLoaded)
.then((res) => {
console.log('uploadFiles res: ', res);
if (res.status === 200) {
// addFileInfoToDB(fileInfo)
// .then((r) => console.log('addFileInfoToDB: ', r))
// .catch((e) => console.log({ e }));
}
})
.catch((e) => console.log(e));
};
And then the client side function uploadFiles:
export const uploadFiles = (data, setLoaded) => {
console.log({ data });
const config = {
onUploadProgress: function(progressEvent) {
const percentCompleted = Math.round(
(progressEvent.loaded * 100) / progressEvent.total
);
setLoaded(percentCompleted);
},
headers: {
'Content-Type': 'multipart/form-data'
}
};
// and then here if I passed in the fileInfo object, I tried sending `{data, fileInfo}`
// instead of just data, but that broke file upload portion too
return axios
.post(baseURL + '/SDS/upload', data, config)
.then((response) => {
console.log({ response });
return response;
})
.catch((e) => {
return Promise.reject(e);
});
};
And finally the server side function that does all the work:
static async uploadSDS(req, res) {
console.log(req.body);
let uploadSuccess = false;
upload(req, res, async function(err) {
if (err instanceof multer.MulterError) {
// return res.status(500).json({ Error1: err });
//return { status: 500 };
} else if (err) {
// return res.status(500).json({ Error2: err });
//return { status: 500 };
} else {
uploadSuccess = true;
}
console.log(uploadSuccess);
// return res.status(200).send(req.file);
//return { status: 200 };
// if (uploadSuccess) {
// try {
// const result = await SDS.addFileInfoToDB(req.fileInfo);
// if (result) {
// return res.status(200).json({ result });
// }
// } catch (e) {
// console.log(e);
// }
// }
});
}
When I console.log the req.body it is always empty.

Why is my file from image "request" incomplete when saving to Google Cloud Storage?

I am trying to upload an image from a URL to my Google Cloud Storage (Firebase). The following function shall return the file and a consecutive function will retrieve the actual Signed/Download Url to the new file. After all this I am updating a document in my Firestore Database with the new URL. That part works; the functions wait on uploading the (unfortunately incomplete) image and my document gets updated with the newly created file url. But the actual file/image is incomplete. :-(
async function saveToStorage(fileUrl) {
var storage = admin.storage();
var urlLib = require("url");
var pathLib = require("path");
//Get File Name from provided URL
var parsed = urlLib.parse(fileUrl);
var fileName = pathLib.basename(parsed.pathname);
//Create Storage Reference with new File Name
var bucket = storage.bucket('gs://myprojectname.appspot.com');
//Path Folder
var folderPath = 'data/photos/';
//Path Folder + File
var internalFilePath = folderPath + fileName ;
//Bucket File Ref
var file = bucket.file(internalFilePath);
const request = require('request');
const writeStream = file.createWriteStream({
metadata: {
contentType: 'image/jpg'
}
});
return new Promise((resolve, reject) => {
request.get(fileUrl)
.pipe(writeStream)
.on("error", (err) => {
console.error(`Error occurred`);
reject();
})
.on('finish', () => {
console.info(`Photo saved`);
resolve(file);
});
});
}
The Image that is saved/uploaded/streamed to my Cloud Storage file looks like this:
I have tried using node-fetch and request and rewrote my function in several ways, but always turn out with this result. I'm sure it has something to do with how I use my Promise, because if I omit the Promise the file actually completes but then the main code keeps executing instead of waiting for this Promise.
This has the same result (incomplete file):
return await fetch(fileUrl).then(res => {
const contentType = res.headers.get('content-type');
const writeStream = file.createWriteStream({
metadata: {
contentType
}
});
let p = new Promise((resolve, reject) => {
res.body.pipe(writeStream);
writeStream.on('finish', function() {
console.log("Stream finished")
resolve(file);
});
writeStream.on('error', function() {
reject(new Error("Whoops!"));
});
});
return p.then(
function(file) {
console.log('Photo saved');
return file},
function(error) {
console.error(error);
return;
});
});
And outright returning the stream writes a complete file, but my main code is not waiting for the file (and I need to handle the file)..
return res.body.pipe(writeStream)
.on('finish', () => {
return file;
console.log('Photo')
})
.on('error', err => {
return console.error(err);
});
Thanks for any help on this!
So this is the code that finally worked for me.
return new Promise((resolve, reject) => {
const req = request(fileUrl);
req.pause();
req.on('response', res => {
const writeStream = file.createWriteStream({
metadata: {
contentType: res.headers['content-type']
}
});
req.pipe(writeStream)
.on('finish', () => {
console.log('Photo saved');
resolve(file);
})
.on('error', err => {
writeStream.end();
console.error(err);
reject();
});
req.resume();
});
req.on('error', err => console.error(err));
});

Categories

Resources