How to download a file through FTP with a firebase function? - javascript

QUESTION:
Unfortunately, my function finishes execution within seconds instead of executing in full. This is apparently due to the fact that listeners are declared to stream the data: they are not promises I can await to my knowledge.
How may I have my firebase function execute in full ?
CODE:
exports.fifteenMinutesData = functions
.runWith(runtimeOpts)
.pubsub
.schedule('*/15 * * * *')
.timeZone('Etc/UTC')
.onRun((context) => {
return (async() => {
try {
const Client = require('ftp');
const c = new Client();
c.connect({
host: "...",
user: "..."
});
c.on('ready', async function () {
c.get('text.txt', async function (err, stream) {
if (err)
throw err;
var content = '';
stream.on('data', function (chunk) {
content += chunk.toString();
});
stream.on('end', function () {
(async () => {
try {
let data = content;
//etc....
}
catch(err) {
console.log("ERR: "+err);
}
})()
})
})
})
}
catch(err) {
console.log("ERR: "+err)
}
})()
});

You will need to promisify the result so the module is aware the value is asynchronous. Currently, your callback is not informing the module of anything so the execution exits immediately, you will want a format like
exports.fifteenMinutesData = functions
.runWith(runtimeOpts)
.pubsub
.schedule('*/15 * * * *')
.timeZone('Etc/UTC')
.onRun((context) => new Promise((resolve, reject) =>
{
});
Where you call resolve(data); for the success path and reject(err); for all error execution paths.

Related

s3.getObject not working from dev environment

This is my code, which works fine if i run it from my local using local aws account , but it doesn't work from my dev environment. S3.getobject api doesnt get executed and code prints the next log skipping the getobject call :
const unzipFromS3 = (key) => {
return new Promise(async (resolve, reject) => {
log.info("inside unzipfroms3");
var zlib = require('zlib');
// let fileName = _.replace(key, 'Root/', '');
let options = {
'Bucket': config.bucketName,
'Key': "Root/" + key,
}
log.info("Key:", options);
await s3.getObject(options).on('error', error => {
log.error(error) }).promise().then((res) => {
yauzl.fromBuffer(res.body, { lazyEntries: true }, function (err, zipfile) {
log.info("Inside Yauzl")
if (err) throw err;
zipfile.readEntry();
zipfile.on("entry", function (entry) {
if (/\/$/.test(entry.fileName)) {
zipfile.readEntry();
} else {
zipfile.openReadStream(entry, function (err, readStream) {
if (err) throw err;
// readStream.pipe(fs.createWriteStream(`result/${entry.fileName}`));
readStream
.pipe(uploadFromStream(s3));
function uploadFromStream(s3) {
log.info("Inside uploadFromStream")
var pass = new Stream.PassThrough();
let options = {
'Bucket': config.bucketName,
'Key': entry.fileName,
}
var params = { ...options, Body: pass };
s3.upload(params, function (err, data) {
log.error(err, data);
});
return pass;
}
readStream.on("end", function () {
zipfile.readEntry();
});
});
}
});
});
});
});
};
In order to use await, i.e. the promised based version of S3.getObject(), you must add the promise() method to your method call as explained in the Using JavaScript Promises chapter of the AWS SDK developer guide. Moreover, there is also an Using async/await chapter that you can look into.
In your case, the code can be modified to something like:
await s3.getObject(options).promise()
.then((res) => {
yauzl.fromBuffer(/* more code */);
});

ffmpeg - on end function probably not working correctly?

got this code to loop through some .mp4 files and create a screenshot of them:
files.forEach(file => {
console.log(file);
if(!fs.existsSync('img/' + file.replace('.mp4', '.png'))) {
ffmpeg({ source: 'movies/' + file})
.takeScreenshots({ timemarks: [ '50' ], size: '150x100' }, 'img/', function(err, filenames) {
})
.on('end', function() {
fs.rename('img/tn.png', 'img/' + file.replace('.mp4', '.png'), function(err) {if (err) console.log('Error: ' + err) });
sleep(1000);
})
}
});
Now i got the problem that it seems like that .on(end) is sometimes not working, Error:
ENOENT: no such file or directory, rename
i think it´s because the process of saving the tn.png is slower than the renaming...
I wouldn't mix callback, sync, sleep and loop togather. You can use fs promise version to convert all your callback style to promise style. You could do sequentially or parallely.
Also, I would say convert the screenshot code to wrap in promise.
here is the code
const fs = require("fs").promises;
function takeScreenshot(file) {
return new Promise((resolve, reject) => {
ffmpeg({"source": `movies/${file}`})
.takeScreenshots({"timemarks": ["50"], "size": "150x100"}, "img/", function (err, filenames) {
})
.on("end", function () {
resolve();
})
.on("error", err => {
reject(err);
});
});
}
// execute one by one
async function sequential(files) {
for (const file of files) {
const fileExists = await fs.stat(`img/${file.replace(".mp4", ".png")}`);
if (fileExists) {
await takeScreenshot(file);
await fs.rename("img/tn.png", `img/${ file.replace(".mp4", ".png")}`);
}
}
}
// execute in parallel
async function parallel(files) {
return Promise.all(files.map(async file => {
const fileExists = await fs.stat(`img/${file.replace(".mp4", ".png")}`);
if (fileExists) {
await takeScreenshot(file);
await fs.rename("img/tn.png", `img/${ file.replace(".mp4", ".png")}`);
}
}));
}
Hope this helps.

I called then() on a TypeScript promise but it is still pending. Why is this? How can I get it to resolve?

Here is the index.ts script I am running (based on something I found on reddit):
const path = require("path");
const sql = require("mssql");
const config = require(path.resolve("./config.json"));
let db1;
const connect = () => {
return new Promise((resolve, reject) => {
db1 = new sql.ConnectionPool(config.db, err => {
if (err) {
console.error("Connection failed.", err);
reject(err);
} else {
console.log("Database pool #1 connected.");
resolve();
}
});
});
};
const selectProjects = async (name) => {
const query = `
select * from [Time].ProjectData where [Name] like concat('%', concat(#name, '%'))`;
const request = new sql.Request(db1);
const result = await request
.input("name", name)
.query(query);
return result.recordset;
};
module.exports = {
connect,
selectProjects
};
connect().then(function() {
console.log(selectProjects('General'));
}).catch(function(err) {
console.log(err);
});
When I run the script using node index (after compiling it of course), I get this in the console:
Database pool #1 connected.
Promise { <pending> }
And then the script hangs.
Apparently the await keyword creates an implicit promise; I had to change the last function call to:
connect().then(function() {
selectProjects('General').then(function(data) {
console.log(data);
});
}).catch(function(err) {
console.log(err);
});

Async await of a promise

I have to wait to func1 to be termined to run func2. But Since func1/2/3 contains promises it prints "termined" to early.
async function executeAsyncTask () {
const res1 = await func1(a,b,c)
const res2 = await func2(a,b,c)
const res3 = await func2(a,b,c)
return console.log(res1 , res2 , res3 )
}
executeAsyncTask ()
func1
class A{
promise_API_CALL(params){
//some code here..
}
func1(a,b,c){
//so work here...
this.promise_API_CALL(params, function( data, err ) {
if(err){console.error(err)}
console.log( data );
return data;
});
//so work here...
console.log("termined")
}
EDIT: promise_API_CALL is a function of an external library
Try wrapping the api call in a promise. Otherwise I can't see this working the way you want it to:
func1(a, b, c) {
return new Promise((resolve, reject) => {
this.promise_API_CALL(params, function(data, err) {
if (err) {
console.error(err)
reject(err);
}
console.log(data);
resolve(data);
});
//so work here...
console.log("termined")
});
}
In order to improve your code, the definition of executeAsyncTask should be like this:
async function executeAsyncTask () {
try {
const res1 = await func1(a,b,c)
const res2 = await func2(a,b,c)
const res3 = await func3(a,b,c)
return [res1, res2, res3]; // Return all values from 'each await' as an array
} catch (err) {
throw 'Promise Rejected';
}
}
As you can see, it uses try and catch even to handle the errors. In other words, if one of the await functions is rejected, then catch throws the error automatically.
// This 'func1 code' from 'Carl Edwards' is the same
func1(a, b, c) {
return new Promise((resolve, reject) => {
promise_API_CALL(params, function(data, err) {
if (err) {
console.error(err)
reject(err);
}
console.log(data);
resolve(data);
});
//so work here...
console.log("termined")
});
}
And finally you call executeAsyncTask like this:
executeAsyncTask().then(function(result) {
console.log("result => " + result); // Result of 'res1, res2, res3'
}).catch(function(error) {
console.log("error => " + error); // Throws 'Promise Rejected'
});
And remember:
Every async function returns a Promise object. The await statement operates on a Promise, waiting until the Promise
resolves or rejects.
You can use await as many times as you like.
BONUS:
If you want all your promises (func1, func2, func3) execute in parallel (not one after another), you can modify your executeAsyncTask function like this:
async function executeAsyncTask () {
try {
return [ res1, res2, res3 ] = await Promise.all([
func1(a,b,c),
func2(a,b,c),
func3(a,b,c)
])
} catch (err) {
throw 'Promise Rejected';
}
}
In order for you code to work func1 would have to be like this:
async func1(a,b,c){
const res = await promise_API_CALL(params, function( data, err ) {
if(err){console.error(err)}
console.log( data );
return data;
});
console.log("termined");
return res;
}
Then running this would work
async function executeAsyncTask () {
const res1 = await func1(a,b,c);
const res2 = await func2(a,b,c);
const res3 = await func2(a,b,c);
//yada yada yada
}
This answer is very closely related to Carl Edward's answer but builds on node.js' conventions.
It's really unfortunate that promise_API_CALL()'s callback doesn't pass the error first. Otherwise you could have used util.promisify(). One alternative is to follow node.js' Custom promisified functions. It would look something like this:
const util = require("util");
promise_API_CALL[util.promisify.custom] = function (params) {
return new Promise((resolve, reject) => {
promise_API_CALL(params, function (data, err) {
if (err) {
return reject(err);
}
resolve(data);
});
});
};
The only issue that I see is that doing this mutates the original function (which isn't yours and is a little rude bad practice). But the issue is slightly mitigated since it uses ES6's new Symbol type which should mean that you won't clobber each other.
Here is a complete example:
const util = require("util");
/**
* Use to force the API along the failure path
* #constant {Boolean}
*/
const SHOULD_FAIL = false;
/**
* Callback to deal with API responses
* #callback apiCallback
* #param {Object} data The data of the response
* #param {Error} [err] Optional error that says something went wrong
*/
/**
* Dummy API calling function
* #param {Object} kwargs api arguments
* #param {apiCallback} cb The callback that handles the response
*/
function apiCall(kwargs, cb) {
setTimeout(() => {
// Allow testing of failure path
if (SHOULD_FAIL) {
return cb(undefined, new Error("Purposefull failure"));
}
// Success path
cb({
foo: "bar"
});
}, 1000);
}
/*
* Create a function that wraps the apiCall function in a Promise
* and attach it to apiCall's util.promisify.custom Symbol
*/
apiCall[util.promisify.custom] = function (kwargs) {
return new Promise((resolve, reject) => {
apiCall(kwargs, (data, err) => {
if (err) {
return reject(err);
}
resolve(data);
});
});
};
// Create shorthand function to the promisified function
const asyncApiCall = util.promisify(apiCall);
// Sanity check to make sure that they are the same
console.log(`Are promisifies the same? ${asyncApiCall === apiCall[util.promisify.custom]}`);
// Run tester function
(async function main() {
// Do some stuff
console.log("Started");
// Use the async func
let some_data_from_api;
try {
some_data_from_api = await asyncApiCall({
fizz: "buzz"
});
} catch (err) {
console.error(err);
}
// Print the data after we have it
console.log(some_data_from_api);
//so work here...
console.log("Done")
}());

s3.getObject().createReadStream() : How to catch the error?

I am trying to write a program to get a zip file from s3, unzip it, then upload it to S3.
But I found two exceptions that I can not catch.
1. StreamContentLengthMismatch: Stream content length mismatch. Received 980323883 of 5770104761 bytes. This occurs irregularly.
2. NoSuchKey: The specified key does not exist. This happens when I input the wrong key.
When these two exceptions occur, this program crashes.
I'd like to catch and handle these two exceptions correctly.
I want to prevent a crash.
const unzipUpload = () => {
return new Promise((resolve, reject) => {
let rStream = s3.getObject({Bucket: 'bucket', Key: 'hoge/hoge.zip'})
.createReadStream()
.pipe(unzip.Parse())
.on('entry', function (entry) {
if(entry.path.match(/__MACOSX/) == null){
// pause
if(currentFileCount - uploadedFileCount > 10) rStream.pause()
currentFileCount += 1
var fileName = entry.path;
let up = entry.pipe(uploadFromStream(s3,fileName))
up.on('uploaded', e => {
uploadedFileCount += 1
console.log(currentFileCount, uploadedFileCount)
//resume
if(currentFileCount - uploadedFileCount <= 10) rStream.resume()
if(uploadedFileCount === allFileCount) resolve()
entry.autodrain()
}).on('error', e => {
reject()
})
}
}).on('error', e => {
console.log("unzip error")
reject()
}).on('finish', e => {
allFileCount = currentFileCount
})
rStream.on('error', e=> {
console.log(e)
reject(e)
})
})
}
function uploadFromStream(s3,fileName) {
var pass = new stream.PassThrough();
var params = {Bucket: "bucket", Key: "hoge/unzip/" + fileName, Body: pass};
let request = s3.upload(params, function(err, data) {
if(err) pass.emit('error')
if(!err) pass.emit('uploaded')
})
request.on('httpUploadProgress', progress => {
console.log(progress)
})
return pass
}
This is the library I use when unzipping.
https://github.com/mhr3/unzip-stream
Help me!!
If you'd like to catch the NoSuchKey error thrown by createReadStream you have 2 options:
Check if key exists before reading it.
Catch error from stream
First:
s3.getObjectMetadata(key)
.promise()
.then(() => {
// This will not throw error anymore
s3.getObject().createReadStream();
})
.catch(error => {
if (error.statusCode === 404) {
// Catching NoSuchKey
}
});
The only case when you won't catch error if file was deleted in a split second, between parsing response from getObjectMetadata and running createReadStream
Second:
s3.getObject().createReadStream().on('error', error => {
// Catching NoSuchKey & StreamContentLengthMismatch
});
This is a more generic approach and will catch all other errors, like network problems.
You need to listen for the emitted error earlier. Your error handler is only looking for errors during the unzip part.
A simplified version of your script.
s3.getObject(params)
.createReadStream()
.on('error', (e) => {
// handle aws s3 error from createReadStream
})
.pipe(unzip)
.on('data', (data) => {
// retrieve data
})
.on('end', () => {
// stream has ended
})
.on('error', (e) => {
// handle error from unzip
});
This way, you do not need to make an additional call to AWS to find out if out if it exists.
You can listen to events (like error, data, finish) in the stream you are receiving back. Read more on events
function getObjectStream (filePath) {
return s3.getObject({
Bucket: bucket,
Key: filePath
}).createReadStream()
}
let readStream = getObjectStream('/path/to/file.zip')
readStream.on('error', function (error) {
// Handle your error here.
})
Tested for "No Key" error.
it('should not be able to get stream of unavailable object', function (done) {
let filePath = 'file_not_available.zip'
let readStream = s3.getObjectStream(filePath)
readStream.on('error', function (error) {
expect(error instanceof Error).to.equal(true)
expect(error.message).to.equal('The specified key does not exist.')
done()
})
})
Tested for success.
it('should be able to get stream of available object', function (done) {
let filePath = 'test.zip'
let receivedBytes = 0
let readStream = s3.getObjectStream(filePath)
readStream.on('error', function (error) {
expect(error).to.equal(undefined)
})
readStream.on('data', function (data) {
receivedBytes += data.length
})
readStream.on('finish', function () {
expect(receivedBytes).to.equal(3774)
done()
})
})
To prevent a crash, you need to asynchronously listen to the object's head metadata, where it does not return the whole object, which will take less time. Try this one!
isObjectErrorExists = async functions () => {
try {
const s3bucket = {
secret key: '',
client id: ''
}
const params = {
Bucket: 'your bucket name',
Key: 'path to object'
};
await s3bucket.headObject(params).promise(); // adding promise will let you add await to listen to process untill it completes.
return true;
} catch (err) {
return false; // headObject threw error.
}
throw new Error(err.message);
}
}
public yourFunction = async() => {
if (await this.isObjectErrorExists()) {
s3Bucket.getObject().createReadStream(); // works smoothly
}
}

Categories

Resources