rimraf Error: invalid rimraf options. Unable to delete files automatically - javascript

I have an issue of multer depositing some files after it has been uploaded to cloudinay. I then found a post In Node, delete all files older than an hour? that introduced me to rimraf. I have not been able to implement it and this is the error I have from the console
Please have a look at the code, the error is line 16 from the stack trace which is => return rimraf(path.join(uploadsDir, file), function (err)
const rimraf = require("rimraf");
const path = require("path");
const fs = require("fs");
var uploadsDir = path.normalize(__dirname + "/public/images");
fs.readdir(uploadsDir, function (err, files) {
files.forEach(function (file, index) {
fs.stat(path.join(uploadsDir, file), function (err, stat) {
var endTime, now;
if (err) {
return console.error(err);
}
now = new Date().getTime();
endTime = new Date(stat.ctime).getTime() + 3600000;
if (now > endTime) {
return rimraf(path.join(uploadsDir, file), function (err) {
if (err) {
return console.error(err);
}
console.log("successfully deleted");
});
}
});
});
});
I am quiet amazed because I can't find the issue and not one has reported my experience. Please help

from the documentation, it seems the second parameter( if you are using version 4) that should be passed to the function is the options object, while you are passing a function, check this URL

Related

How to prevent an if condition from executing until readdir finishes in nodejs/express?

app.get('/students/:record_id', function (req, res) {
var found =false;
var record_id = req.params.record_id;
var dirname = "students/";
filenames = fs.readdirSync(dirname);
filenames.forEach(filename => {
fs.readFile(dirname + filename, "utf-8", function (err, data ) {
if (err) {
onError(err);
return;
}
if (data.includes(record_id)) {
found = true;
return res.status(200).send(data);
}
});
});
if(found == false){
return res.status(500).send({ "message": "error - no student found" });
}
});
I am using GET searching by a student name to retrieve a students file. If I remove the if condition it is successful. But if the student does not exist then it does not work.
The if condition fixes this by making found true when it finds it in the file. The problem is that the if condition executes before the file is read.
I tried using a callback but after a couple hours of research I can not figure out how to implement it in this since the readfile callback is being used to retrieve student info.
I tried using a promise but the promise is then only fulfilled if the student is found and I do not know how to implement it where it is fulfilled when it is not found.
Since you've tried with Promises, I'll show you how to do it with Promises
Using async/await especially makes this really easy
Note: since I don't know how you are using fs the import/require only gets the promise versions of readdir and readFile, and uses it like readdir rather than fs.readdir - so other existing code won't break
const {readFile, readdir} = require('fs/promises');
// if you use import (modules) instead of require (commonjs)
// import { readFile, readdir } from 'fs/promises';
app.get('/students/:record_id', async function (req, res) {
var record_id = req.params.record_id;
var dirname = "students/";
try {
const filenames = await readdir(dirname);
for (let filename of filenames) {
const data = await readFile(dirname + filename, "utf-8");
if (data.includes(record_id)) {
return res.status(200).send(data);
}
}
res.status(404).send({"message": "error - student not found"});
} catch (err) {
onError(err);
res.status(500).send({"message": "internal server error"});
}
});
Note: I wouldn't send a 500 (Internal Server Error) if a student is not found - I changed the logic to send 404 (not Found) instead - which is more appropriate
If you want pure promise (no async/await) - I believe the following is one way to do it, probably not the nicest code, but it should work (haven't tested though)
const {readFile, readdir} = require('fs/promises');
// if you use import (modules) instead of require (commonjs)
// import { readFile, readdir } from 'fs/promises';
app.get('/students/:record_id', function (req, res) {
var record_id = req.params.record_id;
var dirname = "students/";
let p = Promise.resolve(false);
for (let filename of filenames) {
p = p
.then(found =>
found ? found : readFile(dirname + filename, "utf-8")
.then(
data =>
data.includes(record_id) ? data : false
)
);
}
.then(data => data ? res.status(200).send(data) : res.status(404).send({"message": "error - student not found"}))
.catch(err => {
onError(err);
res.status(500).send({"message": "internal server error"});
})
});
And, finally - without using Promises at all
app.get('/students/:record_id', function (req, res) {
let found = false;
const record_id = req.params.record_id;
const dirname = "students/";
const filenames = fs.readdirSync(dirname);
const count = filenames.length;
const checkFile = index => {
if (index < count) {
const filename = filenames[index];
fs.readFile(dirname + filename, "utf-8", function (err, data) {
if (err) {
onError(err);
res.status(500).send({"message": "internal server error"});
} else if (data.includes(record_id)) {
res.status(200).send(data);
} else {
checkFile(index + 1)
}
});
} else {
res.status(404).send({"message": "error - student not found"});
}
}
checkFile(0);
});

FTP in AWS Lambda - Issues Downloading Files (Async/Await)

I have been struggling with various FTP Node modules to try and get anything working in AWS Lambda. The best and most popular seems to be "Basic-FTP" that also supports async/await. But I just cannot get it to download files when any code is added beneath the FTP function.
I don't want to add the fs functions within the FTP async function as I need to solve what is causing the break when any code below is added and I also have other bits of code to add and work with the downloaded file and it's content later:
FTP SUCCESS - When the async function is used only with no fs code beneath it
FTP FAILURE - Adding the fs readdir/readFile functions or any other code below
ERROR Error: ENOENT: no such file or directory, open '/tmp/document.txt'
https://github.com/patrickjuchli/basic-ftp
const ftp = require("basic-ftp");
const fs = require("fs");
var FileNameWithExtension = "document.txt";
var ftpTXT;
exports.handler = async (event, context, callback) => {
example();
async function example() {
const client = new ftp.Client();
//client.ftp.verbose = true;
try {
await client.access({
host: host,
user: user,
password: password,
//secure: true
});
console.log(await client.list());
await client.download(fs.createWriteStream('/tmp/' + FileNameWithExtension), FileNameWithExtension);
}
catch (err) {
console.log(err);
}
client.close();
}
// Read the content from the /tmp/ directory to check FTP was succesful
fs.readdir("/tmp/", function (err, data) {
if (err) {
return console.error("There was an error listing the /tmp/ contents.");
}
console.log('Contents of AWS Lambda /tmp/ directory: ', data);
});
// Read TXT file and convert into string format
fs.readFile('/tmp/' + FileNameWithExtension, 'utf8', function (err, data) {
if (err) throw err;
ftpTXT = data;
console.log(ftpTXT);
});
// Do other Node.js coding with the downloaded txt file and it's contents
};
The problem is that you are getting lost when creating an async function inside your handler. Since example() is async, it returns a Promise. But you don't await on it, so the way it has been coded, it's kind of a fire and forget thing. Also, your Lambda is being terminated before your callbacks are triggered, so even if it got to download you would not be able to see it.
I suggest you wrap your callbacks in Promises so you can easily await on them from your handler function.
I have managed to make it work: I have used https://dlptest.com/ftp-test/ for testing, so change it accordingly. Furthermore, see that I have uploaded the file myself. So if you want to replicate this example, just create a readme.txt on the root of your project and upload it. If you already have this readme.txt file on your FTP server, just delete the line where it uploads the file.
Here's a working example:
const ftp = require("basic-ftp");
const fs = require("fs");
const FileNameWithExtension = "readme.txt";
module.exports.hello = async (event) => {
const client = new ftp.Client();
try {
await client.access({
host: 'ftp.dlptest.com',
user: 'dlpuser#dlptest.com',
password: 'puTeT3Yei1IJ4UYT7q0r'
});
console.log(await client.list());
await client.upload(fs.createReadStream(FileNameWithExtension), FileNameWithExtension)
await client.download(fs.createWriteStream('/tmp/' + FileNameWithExtension), FileNameWithExtension);
}
catch (err) {
console.log('logging err')
console.log(err);
}
client.close();
console.log(await readdir('/tmp/'))
console.log(await readfile('/tmp/', FileNameWithExtension))
return {
statusCode: 200,
body: JSON.stringify({message: 'File downloaded successfully'})
}
};
const readdir = dir => {
return new Promise((res, rej) => {
fs.readdir(dir, function (err, data) {
if (err) {
return rej(err);
}
return res(data)
});
})
}
const readfile = (dir, filename) => {
return new Promise((res, rej) => {
fs.readFile(dir + filename, 'utf8', function (err, data) {
if (err) {
return rej(err);
}
return res(data)
})
})
}
Here is the output of the Lambda function:
And here are the complete CloudWatch logs:
My file contains nothing but a 'hello' inside it. You can see it on the logs.
Do keep in mind that, in Lambda Functions, you have a 512MB limit when downloading anything to /tmp. You can see the limits in the docs

Download file via FTP, write to /tmp/ and output .txt contents to the console with AWS Lambda

I am using just a single Node package, basic-ftp to try and download a TXT file and write the contents to the console. Further down the line I will be editing the text so will need to use fs. Just struggling to work with the output from createWriteStream from within the FTP program.
Can anyone help me write a TXT file to the /tmp/ file within AWS Lambda and then the correct syntax to open and edit the file after createWriteStream has been used?
var fs = require('fs');
const ftp = require("basic-ftp")
var path = require('path');
exports.handler = (event, context, callback) => {
var fullPath = "/home/example/public_html/_uploads/15_1_5c653e6f6780f.txt"; // File Name FULL PATH -------
const extension = path.extname(fullPath); // Used to calculate filenames below
const wooFileName = path.basename(fullPath, extension); // Uploaded filename with no path or extension eg. filename
const myFileNameWithExtension = path.basename(fullPath); // Uploaded filename with the file extension eg. filename.txt
const FileNameWithExtension = path.basename(fullPath); // Uploaded filename with the file extension eg. filename.txt
example()
async function example() {
const client = new ftp.Client()
client.ftp.verbose = true
try {
await client.access({
host: "XXXX",
user: "XXXX",
password: "XXXX",
//secure: true
})
await client.download(fs.createWriteStream('./tmp/' + myFileNameWithExtension), myFileNameWithExtension)
}
catch(err) {
console.log(err)
}
client.close()
}
//Read the content from the /tmp directory to check it's empty
fs.readdir("/tmp/", function (err, data) {
console.log(data);
console.log('Contents of AWS Lambda /tmp/ directory');
});
/*
downloadedFile = fs.readFile('./tmp/' + myFileNameWithExtension)
console.log(downloadedFile)
console.log("Raw text:\n" + downloadedFile.Body.toString('ascii'));
*/
}
Pretty sure your fs.createWriteStream() has to use an absolute path to /tmp in Lambdas. Your actual working directory is var/task not /.
Also, if you're using fs.createWriteStream() you'll need to wait for the finish event before reading from the file. Somethign like this...
async function example() {
var finalData = '';
const client = new ftp.Client()
client.ftp.verbose = true
try {
await client.access({
host: "XXXX",
user: "XXXX",
password: "XXXX",
//secure: true
})
let writeStream = fs.createWriteStream('/tmp/' + myFileNameWithExtension);
await client.download(writeStream, myFileNameWithExtension)
await finalData = (()=>{
return new Promise((resolve, reject)=> {
writeStream
.on('finish', ()=>{
fs.readFile("/tmp/"+myFileNameWithExtension, function (err, data) {
if (err) {
reject(err)
} else {
console.log('Contents of AWS Lambda /tmp/ directory', data);
resolve(data);
}
});
})
.on('error', (err)=> {
console.log(err);
reject(err);
})
})
})();
}
catch(err) {
console.log(err)
}
client.close();
return finalData;
}
You'll also need to access the file using fs.readFile(). What you were using fs.readdir() gives you a list of files in the directory, not the file's contents.
If you want to used readdir() you could do it like this, but as you can see it is redundant in your case. To handle errors I would suggest just handling the error event in the initial createWriteStream() instead of adding this extra overhead (added to previous example)...
writeStream
.on('finish', ()=>{
fs.readdir('/tmp',(err, files)=> {
let saved = files.find(file => file === myFileNameWithExtension);
fs.readFile("/tmp/"+saved, function (err, data) {
if (err) throw new Error();
console.log(data);
console.log('Contents of AWS Lambda /tmp/ directory');
});
})
})
.on('error', (err)=> {
console.log(err);
throw new Error();
})
NOTE: Please log out the result of saved, I can't remember if the files array is absolute of relative paths.

Error: ENOENT: no such file or directory, uv_chdir at process.chdir when creating a directory and changing into it

I'm trying to write a small app that installs some files and modules in a new folder, but I keep getting this error:
{ Error: ENOENT: no such file or directory, uv_chdir
at process.chdir (/home/aboardwithabag/LaunchProject/node_modules/graceful-fs/polyfills.js:20:9)
at cd (/home/aboardwithabag/LaunchProject/index.js:26:13)
Below is my code. Can someone help me out?
// node LaunchProject projectName
// Installs a server, node modules, and index page.
// not working due to issues with chdir.
const cp = require('child_process');
const fse = require('fs-extra');
// const path = require('path');
const project = process.argv[2];
let server ="";
let home = "";
function make (cb){
fse.mkdirs(project, function(err){
if (err){
console.error(err);
}
});
cb;
}
function cd(cb){
try{
process.chdir('/'+project);
cb;
} catch (err) {
console.error(err);
return;
}}
function install(cb){
cp.exec('npm install express', function(err){
if (err){
console.error(err);
} else {
console.log('Express Installed.');
cp.exec('npm install ejs', function(err){
if (err){
console.error(err);
} else{
console.log('Ejs Installed.');
fse.outputFile('index.js', server);
fse.outputFile('public/index.html', home);
}});
}
});
cb;
}
make(cd(install(console.log(project + ' created.'))));
unless the folder name you assign to the project variable (in this case it seems to be "uv_chdir") is located at the root folder of your HDD, below line will give the error:
process.chdir('/'+project);
make sure you give correct path to the program arguments. (in this case argv[2])
Or you may remove the leading '/' and make the path relative.
It seems there are some issues with this code.
cb callbacks provided as function arguments need to be called not after the async calls, but inside the callbacks of these calls. For example:
function make (cb){
fse.mkdirs(project, function(err){
if (err){
console.error(err);
}
cb();
});
}
The last call chain make(cd(install(console.log(project + ' created.')))); would work only with sync calls in reversed order and only if they returned needed callbacks.
That is why your new dir is not ready when you try to use it: your async functions do not actually wait for each other.
You do not call your callbacks as cb(), just mention them as cb. You should call them.
With minimal changess, your code can be refactored in this way:
'use strict';
const cp = require('child_process');
const fse = require('fs-extra');
const project = process.argv[2];
let server = '';
let home = '';
make(cd, install, () => { console.log(project + ' created.'); });
function make(cb1, cb2, cb3) {
fse.mkdirs(project, (err) => {
if (err) {
console.error(err);
}
cb1(cb2, cb3);
});
}
function cd(cb1, cb2) {
try {
process.chdir('/' + project);
cb1(cb2);
} catch (err) {
console.error(err);
}
}
function install(cb1) {
cp.exec('npm install express', (err) => {
if (err) {
console.error(err);
} else {
console.log('Express Installed.');
cp.exec('npm install ejs', (err) => {
if (err) {
console.error(err);
} else {
console.log('Ejs Installed.');
fse.outputFile('index.js', server);
fse.outputFile('public/index.html', home);
cb1();
}
});
}
});
}
But it is rather brittle and unnecessarily complicated in this form. Maybe it would be simpler to inline your functions each in other.
when I use PM2,i got this error "no such file or directory, uv_chdir"
the resolvent is :
first,I use 'pm2 delete' to delete old process
second,I use 'pm2 start',then ok
ps : just change your code or use 'pm2 reload' or 'pm2 restart' would not be ok.
more detail , you can see "https://blog.csdn.net/u013934914/article/details/51145134"

Permissions trouble on AWS Lambda, can't spawn child process

So I've created this nice little lambda, which runs great locally, however not so much when actually out in the wild.
The lambda takes an event, with html in the event source, converts that html to a PDF (using the html-pdf node module), passes that pdf to an s3 bucket, and then hands back a signed url that expires in 60 seconds.
Or at least that is what ought to happen (again, works locally). When testing on Lambda, I get the following error:
{
"errorMessage": "spawn EACCES",
"errorType": "Error",
"stackTrace": [
"exports._errnoException (util.js:870:11)",
"ChildProcess.spawn (internal/child_process.js:298:11)",
"Object.exports.spawn (child_process.js:362:9)",
"PDF.PdfExec [as exec] (/var/task/node_modules/html-pdf/lib/pdf.js:87:28)",
"PDF.PdfToFile [as toFile] (/var/task/node_modules/html-pdf/lib/pdf.js:83:8)",
"/var/task/index.js:72:43",
"Promise._execute (/var/task/node_modules/bluebird/js/release/debuggability.js:272:9)",
"Promise._resolveFromExecutor (/var/task/node_modules/bluebird/js/release/promise.js:473:18)",
"new Promise (/var/task/node_modules/bluebird/js/release/promise.js:77:14)",
"createPDF (/var/task/index.js:71:19)",
"main (/var/task/index.js:50:5)"
]
}
Here's the code itself (not compiled, there's a handy gulp task for that)
if(typeof regeneratorRuntime === 'undefined') {
require("babel/polyfill")
}
import fs from 'fs'
import pdf from 'html-pdf'
import md5 from 'md5'
import AWS from 'aws-sdk'
import Promise from 'bluebird'
import moment from 'moment'
const tempDir = '/tmp'
const config = require('./config')
const s3 = new AWS.S3()
export const main = (event, context) => {
console.log("Got event: ", event)
AWS.config.update({
accessKeyId: config.awsKey,
secretAccessKey: config.awsSecret,
region: 'us-east-1'
})
const filename = md5(event.html) + ".pdf"
createPDF(event.html, filename).then(function(result) {
uploadToS3(filename, result.filename).then(function(result) {
getOneTimeUrl(filename).then(function(result) {
return context.succeed(result)
}, function(err) {
console.log(err)
return context.fail(err)
})
}, function(err) {
console.log(err)
return context.fail(err)
})
}, function(err) {
console.log(err)
return context.fail(err)
})
}
const createPDF = (html, filename) => {
console.log("Creating PDF")
var promise = new Promise(function(resolve, reject) {
pdf.create(html).toFile(filename, function(err, res) {
if (err) {
reject(err)
} else {
resolve(res)
}
})
})
return promise
}
const uploadToS3 = (filename, filePath) => {
console.log("Pushing to S3")
var promise = new Promise(function(resolve, reject) {
var fileToUpload = fs.createReadStream(filePath)
var expiryDate = moment().add(1, 'm').toDate()
var uploadParams = {
Bucket: config.pdfBucket,
Key: filename,
Body: fileToUpload
}
s3.upload(uploadParams, function(err, data) {
if(err) {
reject(err)
} else {
resolve(data)
}
})
})
return promise
}
const getOneTimeUrl = (filename) => {
var promise = new Promise(function(resolve, reject) {
var params = {
Bucket: config.pdfBucket,
Key: filename,
Expires: 60
}
s3.getSignedUrl('getObject', params, function(err, url) {
if (err) {
reject(err)
} else {
resolve(url)
}
})
})
return promise
}
Seems like a problem within html-pdf. I thought it might be a problem with PhantomJS (which html-pdf depends on) due to some reading I did here: https://engineering.fundingcircle.com/blog/2015/04/09/aws-lambda-for-great-victory/ , however, since Lambda has bumped the max zip size to 50mb, I don't have a problem uploading the binary.
Any thoughts?
html-pdf uses phantomjs under the hood, which needs to compile some binaries when being installed. I guess your problem is that you are deploying those locally compiled binaries but Lambda needs the binaries compiled on Amazon Linux.
You can solve this problem by building your deploy package on an EC2 instance that is running Amazon Linux and then e.g. directly deploy it from there like it is explained in this tutorial.
Also check out this answer on a similar problem.

Categories

Resources