Issue at teardown with Jest + mongodb-memory-server - javascript

I'm trying to run a typescript + jest + mongodb-memory-server test environment but I'm getting the following error at teardown:
events.js:189
throw err; // Unhandled 'error' event
^
Error [ERR_UNHANDLED_ERROR]: Unhandled error. ({ MongoNetworkError: read ECONNRESET
at Socket.<anonymous> (C:\Plataforma\Estrutura\Final\node_modules\mongodb\lib\core\connection\connection.js:321:24)
at Object.onceWrapper (events.js:286:20)
at Socket.emit (events.js:198:13)
at emitErrorNT (internal/streams/destroy.js:91:8)
at emitErrorAndCloseNT (internal/streams/destroy.js:59:3)
at process._tickCallback (internal/process/next_tick.js:63:19)
errno: 'ECONNRESET',
code: 'ECONNRESET',
syscall: 'read',
name: 'MongoNetworkError',
errorLabels: [ 'TransientTransactionError' ],
[Symbol(mongoErrorContextSymbol)]: {} })
at Connection.emit (events.js:187:17)
at Socket.<anonymous> (C:\Plataforma\Estrutura\Final\node_modules\mongodb\lib\core\connection\connection.js:321:10)
at Object.onceWrapper (events.js:286:20)
at Socket.emit (events.js:198:13)
at emitErrorNT (internal/streams/destroy.js:91:8)
at emitErrorAndCloseNT (internal/streams/destroy.js:59:3)
at process._tickCallback (internal/process/next_tick.js:63:19)
error Command failed with exit code 1.
Here are my config files:
Jest config:
//jest.config.js
module.exports = {
globalSetup: './setup',
globalTeardown: './teardown.js',
testEnvironment: './mongo-environment.js',
transform: {
'^.+\\.jsx?$': 'babel-jest',
'^.+\\.ts?$': 'ts-jest',
},
roots: ['<rootDir>/test'],
moduleFileExtensions: ['ts', 'js', 'json', 'node'],
preset: 'ts-jest',
};
Here's the setup config
//setup.js
const path = require('path');
const fs = require('fs');
const MongodbMemoryServer = require('mongodb-memory-server');
const globalConfigPath = path.join(__dirname, 'globalConfig.json');
const mongod = new MongodbMemoryServer.default({
instance: {
dbName: 'StructureGenerator',
debug: true,
},
binary: {
version: '3.2.18',
}
});
module.exports = async function () {
const mongoConfig = {
mongoDBName: 'StructureGenerator',
mongoUri: await mongod.getConnectionString(),
};
// Write global config to disk because all tests run in different contexts.
fs.writeFileSync(globalConfigPath, JSON.stringify(mongoConfig));
console.log('Config is written');
// Set reference to mongod in order to close the server during teardown.
global.MONGOD = mongod;
process.env.MONGO_URL = mongoConfig.mongoUri;
};
Here's the teardown config
//teardown.js
module.exports = async function () {
await global.MONGOD.stop();
};
Here's the mongo environment
//mongo-environment.js
const NodeEnvironment = require('jest-environment-node');
const path = require('path');
const fs = require('fs');
const globalConfigPath = path.join(__dirname, 'globalConfig.json');
module.exports = class MongoEnvironment extends NodeEnvironment {
constructor(config) {
super(config);
}
async setup() {
console.log('Setup MongoDB Test Environment');
await super.setup();
const globalConfig = JSON.parse(fs.readFileSync(globalConfigPath, 'utf-8'));
this.global.MONGO_URI = globalConfig.mongoUri;
this.global.MONGO_DB_NAME = globalConfig.mongoDBName;
}
async teardown() {
console.log('Teardown MongoDB Test Environment');
await super.teardown();
}
runScript(script) {
return super.runScript(script);
}
};
I tried to find help on google but I didn't manage to find any similar issues. I'm hoping someone can shed me some light on this issue.
Thanks

Related

How to create file on SFTP server in Cypress test

When I call the createFile() function in my Cypress test, I am trying to create a file on a hosted SFTP server.
Currently, when I call it, I get the following error message:
The following error was thrown by a plugin. We stopped running your tests because a plugin crashed. Please check your plugins file (C:\Dev\SFTP_POC\cypress\plugins\index.js)
Error: put: Internal server error.
sftp://myHost.com#sftp.next.rec-test.com:2022/reports/
at fmtError (C:\Dev\SFTP_POC\node_modules\ssh2-sftp-client\src\utils.js:55:18)
at WriteStream. (C:\Dev\SFTP_POC\node_modules\ssh2-sftp-client\src\index.js:728:18)
at Object.onceWrapper (events.js:418:26)
at WriteStream.emit (events.js:323:22)
at Object.cb (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\SFTP.js:3629:12)
at 101 (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\SFTP.js:2622:11)
at SFTP.push (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\SFTP.js:278:11)
at CHANNEL_DATA (C:\Dev\SFTP_POC\node_modules\ssh2\lib\client.js:525:23)
at 94 (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\handlers.misc.js:859:16)
at Protocol.onPayload (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\Protocol.js:2025:10)
at AESGCMDecipherNative.decrypt (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\crypto.js:987:26)
at Protocol.parsePacket [as _parse] (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\Protocol.js:1994:25)
at Protocol.parse (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\Protocol.js:293:16)
at Socket. (C:\Dev\SFTP_POC\node_modules\ssh2\lib\client.js:713:21)
at Socket.emit (events.js:311:20)
at addChunk (_stream_readable.js:294:12)
at readableAddChunk (_stream_readable.js:275:11)
at Socket.Readable.push (_stream_readable.js:209:10)
at TCP.onStreamRead (internal/stream_base_commons.js:186:23)
Below is my current index.js code:
module.exports = (on, config) => {
on('task', {
createFile() {
const fs = require('fs');
let data = fs.createReadStream('C:/Dev/SFTP_POC/cypress/fixtures/example.json');
let remote = 'sftp://myHost.com:2022/reports/';
let Client = require('ssh2-sftp-client');
let sftp = new Client();
const config = {
host: 'myHost.com',
port: '2022',
username: 'myUsername',
password: 'myPassword'
};
sftp.connect(config).then(() => {
sftp.put(data, remote);
})
.then(data => {
console.log('Success');
})
.then(() => {
sftp.end();
})
.catch(err => {
console.log(err);
})
return null;
}
})
}
My remote variable is a folder that exists on the server.
However, after the test, a new file isn't being added.
Can someone please tell me what I'm doing wrong here, & how to resolve it

Heroku Nodejs Reddit bot works fine locally but not online

I have a reddit bot written in Nodejs with Snoowrap and Snoostorm and deployed to Heroku.
My logs keeps churning this error out:
2020-03-13T06:02:53.784219+00:00 app[web.1]: (node:4) UnhandledPromiseRejectionWarning: RequestError: Error: ESOCKETTIMEDOUT
2020-03-13T06:02:53.784229+00:00 app[web.1]: at new RequestError (/app/node_modules/request-promise-core/lib/errors.js:14:15)
2020-03-13T06:02:53.784230+00:00 app[web.1]: at Request.plumbing.callback (/app/node_modules/request-promise-core/lib/plumbing.js:87:29)
2020-03-13T06:02:53.784231+00:00 app[web.1]: at Request.RP$callback [as _callback] (/app/node_modules/request-promise-core/lib/plumbing.js:46:31)
2020-03-13T06:02:53.784231+00:00 app[web.1]: at self.callback (/app/node_modules/request/request.js:185:22)
2020-03-13T06:02:53.784234+00:00 app[web.1]: at Request.emit (events.js:311:20)
2020-03-13T06:02:53.784234+00:00 app[web.1]: at ClientRequest.<anonymous> (/app/node_modules/request/request.js:819:16)
2020-03-13T06:02:53.784235+00:00 app[web.1]: at Object.onceWrapper (events.js:417:28)
2020-03-13T06:02:53.784235+00:00 app[web.1]: at ClientRequest.emit (events.js:311:20)
2020-03-13T06:02:53.784236+00:00 app[web.1]: at TLSSocket.emitRequestTimeout (_http_client.js:714:9)
2020-03-13T06:02:53.784237+00:00 app[web.1]: at Object.onceWrapper (events.js:417:28)
2020-03-13T06:02:53.784237+00:00 app[web.1]: at TLSSocket.emit (events.js:311:20)
2020-03-13T06:02:53.784237+00:00 app[web.1]: at TLSSocket.Socket._onTimeout (net.js:478:8)
2020-03-13T06:02:53.784238+00:00 app[web.1]: at listOnTimeout (internal/timers.js:549:17)
2020-03-13T06:02:53.784238+00:00 app[web.1]: at processTimers (internal/timers.js:492:7)
2020-03-13T06:02:53.784281+00:00 app[web.1]: (node:4) UnhandledPromiseRejectionWarning: Unhandled promise rejection. This error originated either by throwing inside of an async function without a catch block, or by rejecting a promise which was not handled with .catch(). To terminate the node process on unhandled promise rejection, use the CLI flag `--unhandled-rejections=strict` (see https://nodejs.org/api/cli.html#cli_unhandled_rejections_mode). (rejection id: 3)`
Here is my app.js file:
require('dotenv').config();
const MyUtil = require("./myutil.js")
const { CommentStream, SubmissionStream, ModMailStream, InboxStream } = require("snoostorm");
const Snoowrap = require('snoowrap');
const Snoostorm = require('snoostorm');
const WatchJS = require("melanke-watchjs")
const r = new Snoowrap({
userAgent: 'abcde',
clientId: process.env.CLIENT_ID,
clientSecret: process.env.CLIENT_SECRET,
refreshToken: process.env.REFRESH_TOKEN
});
const BOT_START = Date.now() / 1000;
var webController;
function initializeBot(controller){
configReddit();
watchRateLimit();
initCommentStream();
initPostStream();
initInboxStream();
initModMailStream();
webController = controller;
return module.exports
}
function configReddit(){
r.config({continueAfterRatelimitError: true});
console.info("Finished Reddit configuration.")
}
function watchRateLimit(){
//WATCH JS
// var watch = WatchJS.watch;
// var unwatch = WatchJS.unwatch;
// var callWatchers = WatchJS.callWatchers;
WatchJS.watch(r, "ratelimitRemaining",
() => {
if(r.ratelimitRemaining < 50){
console.warn("Rate limit remaining:" +r.ratelimitRemaining);
}
if(webController){
webController.broadcast(r.ratelimitRemaining);
}
});
}
function initCommentStream(){
console.info("Trying to establish comment stream!");
var streamOpts;
try {
streamOpts = JSON.parse(process.env.COMMENT_STREAM_OPTION);
if(!streamOpts || !streamOpts.receiving){
console.info("Comment Stream was disabled, enable through the environment variable.")
return;
}
} catch (error) {
console.log(error);
console.info("COMMENT_STREAM_OPTION unavailable/wrong format.");
return;
}
const comments = new CommentStream(r, streamOpts);
comments.on("item", comment => {
if(comment.created_utc < BOT_START) return;
if(comment.body.toLowerCase().includes("!resolved")){
console.log("New resolved comment!: "+comment.link_id.substring(3))
let flair = {flair_template_id:MyUtil.FLAIR_ID.RESOLVED}
let sub = r.getSubmission(comment.link_id.toString().substring(3));
let reply = ""; //TODO
sub.selectFlair(flair).then(sub.reply())
}
})
console.info("Comment Stream established.");
}
function initPostStream(){
console.info("Trying to establish post stream!");
var streamOpts
try {
streamOpts = JSON.parse(process.env.POST_STREAM_OPTION);
if(!streamOpts || !streamOpts.receiving){
console.info("Post Stream was disabled, enable through the environment variable.")
return;
}
} catch (error) {
console.log(error)
console.info("POST_STREAM_OPTION unavailable/wrong format.");
return;
}
const posts = new Snoostorm.SubmissionStream(r, streamOpts);
//*Listen for items (posts)
posts.on("item", post =>{
if(post.created_utc < BOT_START) return;
console.log("New POST");
console.log(post.body);
notifyNewPost("/u/abcde", post)
//TODO
})
console.info("Post Stream established!");
}
function initInboxStream(){
//TODO
}
function initModMailStream(){
//TODO
}
async function notifyNewPost(peopleList, post){
if(!post._hasFetched){
try {
post = await post.fetch();
} catch (error) {
console.log("Error at notifyNewPost")
console.log(error);
return
}
}
var sendFunction = function(people, post){
r.composeMessage({
to:people,
subject:"New post in r/GoogleAppsScript",
text:`There's a new post in r/GoogleAppsScript:
[${post.title}](${post.url}) posted by ${post.author.name}
`
}).then(()=>{console.log(`New post notification sent to ${people}.`)})
.catch((err)=>{console.log("Error on sending noti to abcde:\n"+err)});
}
if(typeof peopleList == "string"){
sendFunction(peopleList, post);
}else{
for (const people of peopleList) {
sendFunction(people, post);
}
}
}
module.exports={
initializeBot,
r
}
This is web.js:
const controller = {};
var express = require('express');
var app = express();
const wakeDyno = require("woke-dyno");
const bot = require("./app").initializeBot(controller);
var http = require('http').createServer(app);
var io = require('socket.io')(http);
var port = process.env.PORT || 3000;
app.get('/', function(req, res){
res.sendFile(__dirname + '/html/index.html');
});
io.on('connection', function(socket){
console.log('a user connected');
});
http.listen(port, ()=>{
wakeDyno("https://gas-lighter-bot.herokuapp.com/").start();
console.log('listening on *:'+port);
});
controller.broadcast = (msg) => {
io.emit("ratelimitChanged",msg);
};
The weird thing is the log don't tell me where the error originated in my code, so I am a bit clueless.
I've been running it for a few hours on my machine, but when deployed to Heroku, it throw this error after like 1 minute....
Please help :(
EDIT: This is the error object, it doesn't help me, but maybe it will help you solve this :(
{
name: 'RequestError',
message: 'Error: ESOCKETTIMEDOUT',
cause: Error: ESOCKETTIMEDOUT
at ClientRequest.<anonymous> (/app/node_modules/request/request.js:816:19)
at Object.onceWrapper (events.js:417:28)
at ClientRequest.emit (events.js:311:20)
at TLSSocket.emitRequestTimeout (_http_client.js:714:9)
at Object.onceWrapper (events.js:417:28)
at TLSSocket.emit (events.js:311:20)
at TLSSocket.Socket._onTimeout (net.js:478:8)
at listOnTimeout (internal/timers.js:549:17)
at processTimers (internal/timers.js:492:7) {
code: 'ESOCKETTIMEDOUT',
connect: false
},
error: Error: ESOCKETTIMEDOUT
at ClientRequest.<anonymous> (/app/node_modules/request/request.js:816:19)
at Object.onceWrapper (events.js:417:28)
at ClientRequest.emit (events.js:311:20)
at TLSSocket.emitRequestTimeout (_http_client.js:714:9)
at Object.onceWrapper (events.js:417:28)
at TLSSocket.emit (events.js:311:20)
at TLSSocket.Socket._onTimeout (net.js:478:8)
at listOnTimeout (internal/timers.js:549:17)
at processTimers (internal/timers.js:492:7) {
code: 'ESOCKETTIMEDOUT',
connect: false
},
options: {
gzip: true,
json: true,
headers: { 'user-agent': 'myuseragent' },
baseUrl: 'https://oauth.reddit.com',
qs: { raw_json: 1 },
auth: { bearer: 'myrefreshtoken' },
resolveWithFullResponse: true,
timeout: 5000,
transform: [Function: transform],
uri: 'comments/fhxl4r',
method: 'GET',
callback: [Function: RP$callback],
simple: true,
transform2xxOnly: false
},
response: undefined
}

How to do testing with Jest and Knex in TypeScript?

I'm trying to test GraphQL server with Jest and Knex. I had a hard time figuring out how to use knexfile in typescript. But now everything is working fine for development and production envs, except for testing.
Here's my current knexfile.ts:
// knexfile.ts
const defaults = {
client: 'pg',
connection: {
host: DB_HOST,
user: DB_USER,
password: DB_PASSWORD,
database: DB_DATABASE
},
pool: {
min: 2,
max: 10
},
migrations: {
extension: 'ts',
directory: './migration',
tableName: 'knex_migrations'
},
seeds: {
extension: 'ts',
directory: './seed'
}
};
interface KnexConfig {
[key: string]: object;
}
const knexConfig: KnexConfig = {
local: {
client: 'sqlite3',
connection: {
filename: './dev.sqlite3'
}
},
development: {
...defaults,
debug: true,
useNullAsDefault: true
},
production: {
...defaults
}
};
/**
* `export default` does not work, causes `client` missing problem
* at database migration.
*/
export = knexConfig;
This is global setup for Jest:
// globalSetup.ts
export = async () => {
try {
// Start http server
await httpServer.listen(PORT);
// Rollback and migrate
// await knex.migrate.rollback().then(() => knex.migrate.latest());
knex.migrate.latest();
} catch (err) {
// Log the error
logger.error('', err);
}
};
This is global teardown:
// globalTeardown.ts
export = async () => {
try {
await knex.migrate.rollback();
// Shutdown server
httpServer.close(() => logger.info('Server closed'));
} catch (err) {
// Log the error
logger.error('', err);
}
};
It keeps giving me error:
Unhandled rejection SyntaxError: Unexpected token *
/home/my/knex-graphql/migration/20190821235716_create_user.ts:1
import * as Knex from 'knex';
^
SyntaxError: Unexpected token *
at Module._compile (internal/modules/cjs/loader.js:872:18)
at Object.Module._extensions..js (internal/modules/cjs/loader.js:947:10)
at Module.load (internal/modules/cjs/loader.js:790:32)
at Function.Module._load (internal/modules/cjs/loader.js:703:12)
at Function.<anonymous> (/home/my/knex-graphql/node_modules/#sentry/node/src/integrations/console.ts:37:43)
at Function._load (/home/my/knex-graphql/node_modules/#sentry/node/src/integrations/http.ts:73:43)
at Module.require (internal/modules/cjs/loader.js:830:19)
at require (internal/modules/cjs/helpers.js:68:18)
at FsMigrations.getMigration (/home/my/knex-graphql/node_modules/knex/lib/migrate/sources/fs-migrations.js:84:12)
at /home/my/knex-graphql/node_modules/knex/lib/migrate/Migrator.js:82:69
at arrayFilter (/home/my/knex-graphql/node_modules/lodash/lodash.js:582:11)
at filter (/home/my/knex-graphql/node_modules/lodash/lodash.js:9173:14)
at /home/my/knex-graphql/node_modules/knex/lib/migrate/Migrator.js:81:13
at tryCatcher (/home/my/knex-graphql/node_modules/bluebird/js/release/util.js:16:23)
at Promise._settlePromiseFromHandler (/home/my/knex-graphql/node_modules/bluebird/js/release/promise.js:517:31)
at Promise._settlePromise (/home/my/knex-graphql/node_modules/bluebird/js/release/promise.js:574:18)
From previous event:
at Migrator.latest (/home/my/knex-graphql/node_modules/knex/lib/migrate/Migrator.js:71:8)
at /home/my/knex-graphql/test/global/setup.ts:24:32
at Generator.next (<anonymous>)
at fulfilled (/home/my/knex-graphql/test/global/setup.ts:5:58)
at processTicksAndRejections (internal/process/task_queues.js:85:5)
Tech stack: Apollo-server-express, TypeScript, Knex.js, PostgreSQL, Jest
You need to add ts-jest, which will transpile your ts files for jest.
Install it
npm install --save-dev ts-jest
Add default ts-jest config
npx ts-jest config:init
we use something a bit like this in our knexfile.js
require('ts-node/register');
require('dotenv').config();
const {
SERVER_HOST,
SERVER_USER,
SERVER_PASSWORD,
SERVER_DATABASE,
SERVER_DATABASE_TEST,
} = process.env;

How to copy files between pods or execute in a pod?

I'm trying to execute a terminal command in a pod to get the file content.
It works fine on local:
const Client = require('kubernetes-client').Client;
const Config = require('kubernetes-client/backends/request').config;
const Request = require('kubernetes-client/backends/request');
const config = Config.fromKubeconfig();
const client = new Client({ config: config, version: '1.13' });
const podResponse = await client.api.v1.namespaces(config.namespace).pods(<pod name>).exec.get({
qs: {
command: ['cat', 'README.md'],
container: <container name>,
stdout: true,
stderr: true,
},
});
console.log(podResponse.body);
When I run my Node.js app on Cluster with following changes:
const config = Request.config.getInCluster();
const backend = new Request(config);
const client = new Client({ backend });
it still works fine. I can get pods/services info (Node.js app run on same cluster/namespace)..
But .exec.get doesn't work. It fails with:
{ Error: Unexpected server response: 401
at ClientRequest.req.on (/usr/local/app/node_modules/ws/lib/websocket.js:579:7)
at ClientRequest.emit (events.js:182:13)
at HTTPParser.parserOnIncomingClient [as onIncoming] (_http_client.js:556:21)
at HTTPParser.parserOnHeadersComplete (_http_common.js:109:17)
at TLSSocket.socketOnData (_http_client.js:442:20)
at TLSSocket.emit (events.js:182:13)
at addChunk (_stream_readable.js:283:12)
at readableAddChunk (_stream_readable.js:264:11)
at TLSSocket.Readable.push (_stream_readable.js:219:10)
at TLSWrap.onStreamRead (internal/stream_base_commons.js:94:17) messages: [] }
Again, I don't need to pass any auth info.. It works fine to get pods/services details.
Seems the issue will be fixed in new release:
kubernetes-client repo
So wondering if there is other way to copy a file from a pod by JavaScript, i.e. analog of:
kubectl cp <file-spec-src> <file-spec-dest>
const kc = new k8s.KubeConfig();
kc.loadFromDefault();
//Copy File from Local folder to POD
const namespaceName = "default";
const podName = "logging-poc-standalone-app-694db6684c-6dsw4";
const containerName = "logging-poc-standalone-app";
const cp = new k8s.Cp(kc);
cp.cpToPod(namespaceName, podName, containerName, "log.log", "/var/log").catch(
(err) => {
console.log(err);
}
);

What is the easiest method to know the duration of a video stored in Firebase Storage in a Firebase Cloud Function?

I have a trigger-based cloud function that should find the duration of a video uploaded to Firebase Storage.
I tried using the following npm module: get-video-duration which takes url, the file itself, or stream.
Using the public url of my file doesn't work, my catch logs:
{ Error: spawn ffprobe ENOENT
at exports._errnoException (util.js:1020:11)
at Process.ChildProcess._handle.onexit (internal/child_process.js:197:32)
at onErrorNT (internal/child_process.js:376:16)
at _combinedTickCallback (internal/process/next_tick.js:80:11)
at process._tickDomainCallback (internal/process/next_tick.js:128:9)
code: 'ENOENT',
errno: 'ENOENT',
syscall: 'spawn ffprobe',
path: 'ffprobe',
spawnargs:
[ '-v',
'error',
'-show_format',
'-show_streams',
'https://storage.googleapis.com/adboard-dev.appspot.com/5HRuyysoMxe9Tb5vPLDbhEaHtkH2%2F-LAve5VogdAr4ZohU-DE%2FSampleVideo_1280x720_1mb.mp4?GoogleAccessId=firebase-adminsdk-3lthu#adboard-dev.iam.gserviceaccount.com&Expires=16447017600&Signature=cbhn%2BtY2%2FtvcRkvsFp1ywhHKiz%2FLfabfMk6HbD4TEGd%2Brf4njcMz1mQVf6H8nyulTBoRHIgC2uENFEPoEjtON6Um0Jb9P9jgikj6PdhS98m1sPDpTjMiFCTWk6ICjTI%2B%2BWuSVGgDX0tRuq3fADZABKaEcl3CEAI17DCVH98a40XttIDZqeqxIDu1iLi%2F8apQy44pAPJsmVR2dkYHk8Am8e7jIT1OnXG3adO34U3TNhsziPryIIpzo68QANENeieulvleic2BEi7KUhN1K8IxzJXxAfkt9RAFbdrwh%2FOpQ7zTGPRzTC3Vz2FnmKSXVtdKtmftg7BlEXrRr3D7ELJ53g%3D%3D' ],
stdout: '',
stderr: '',
failed: true,
signal: null,
cmd: 'ffprobe -v error -show_format -show_streams https://storage.googleapis.com/adboard-dev.appspot.com/5HRuyysoMxe9Tb5vPLDbhEaHtkH2%2F-LAve5VogdAr4ZohU-DE%2FSampleVideo_1280x720_1mb.mp4?GoogleAccessId=firebase-adminsdk-3lthu#adboard-dev.iam.gserviceaccount.com&Expires=16447017600&Signature=cbhn%2BtY2%2FtvcRkvsFp1ywhHKiz%2FLfabfMk6HbD4TEGd%2Brf4njcMz1mQVf6H8nyulTBoRHIgC2uENFEPoEjtON6Um0Jb9P9jgikj6PdhS98m1sPDpTjMiFCTWk6ICjTI%2B%2BWuSVGgDX0tRuq3fADZABKaEcl3CEAI17DCVH98a40XttIDZqeqxIDu1iLi%2F8apQy44pAPJsmVR2dkYHk8Am8e7jIT1OnXG3adO34U3TNhsziPryIIpzo68QANENeieulvleic2BEi7KUhN1K8IxzJXxAfkt9RAFbdrwh%2FOpQ7zTGPRzTC3Vz2FnmKSXVtdKtmftg7BlEXrRr3D7ELJ53g%3D%3D',
timedOut: false,
killed: false }
Downloading the file then passing it directly don't work too:
{ Error: spawn ffprobe ENOENT
at exports._errnoException (util.js:1020:11)
at Process.ChildProcess._handle.onexit (internal/child_process.js:197:32)
at onErrorNT (internal/child_process.js:376:16)
at _combinedTickCallback (internal/process/next_tick.js:80:11)
at process._tickDomainCallback (internal/process/next_tick.js:128:9)
code: 'ENOENT',
errno: 'ENOENT',
syscall: 'spawn ffprobe',
path: 'ffprobe',
spawnargs:
[ '-v',
'error',
'-show_format',
'-show_streams',
'/tmp/SampleVideo_1280x720_1mb.mp4' ],
stdout: '',
stderr: '',
failed: true,
signal: null,
cmd: 'ffprobe -v error -show_format -show_streams /tmp/SampleVideo_1280x720_1mb.mp4',
timedOut: false,
killed: false }
Finally, I created a stream using fs then I passed it, and it it gave me a Duration Not Found! error:
{ AssertionError: No duration found!
at ffprobe.then (/user_code/node_modules/get-video-duration/index.js:34:3)
at process._tickDomainCallback (internal/process/next_tick.js:135:7)
name: 'AssertionError',
actual: null,
expected: true,
operator: '==',
message: 'No duration found!',
generatedMessage: false }
My cloud function code:
exports.recordUploadedFile = functions.storage.object().onFinalize(object => {
let fileType = object.contentType;
if (fileType.startsWith("image/") || fileType.startsWith("video/")) {
let dir = object.name.split("/");
let name = dir.pop();
let fileID = dir.pop();
let uid = dir.pop();
return admin
.storage()
.bucket()
.file(object.name)
.getSignedUrl({
action: "read",
expires: "03-09-2491"
})
.then(urls => {
let file = {
name: name,
link: urls[0],
type: fileType,
duration: 0
}
if (fileType.startsWith("video/")) {
const tempFilePath = path.join(os.tmpdir(), name);
return admin.storage().bucket().file(object.name).download({
destination: tempFilePath
}).then(() => {
const stream = fs.createReadStream(tempFilePath);
return getDuration(stream).then(duration => {
console.log(duration);
file.duration = duration;
return setFile(file, uid, fileID);
}).catch(error => {
console.log(error);
});
});
} else {
return setFile(file, uid, fileID);
}
});
} else {
return admin.storage().bucket().file(object.name).delete();
}
});
I tried multiple video files of multiple sizes, and none of them work.
If there is a better solution to know the video duration, I would love to know it too.
Thank you.
Try using library called fluent-ffmpeg: https://github.com/fluent-ffmpeg/node-fluent-ffmpeg
var ffmpeg = require('fluent-ffmpeg');
ffmpeg.ffprobe(tempFilePath, function(err, metadata) {
//console.dir(metadata); // all metadata
console.log(metadata.format.duration);
});
I ended up using faruk suggested library: fluent-mmpeg but to get it work on Firebase you need to do the following:
You need to use bluebird to "promisify" fluent-mmpeg like this: const ffprobe = Promise.promisify(require("fluent-ffmpeg").ffprobe);
You need to install the static binaries of both ffmpeg and ffprobe, so include them in your pacakge npm i --save #ffmpeg-installer/ffmpeg, #ffprobe-installer/ffprobe
Lastly, set the paths: const ffmpegPath = require("#ffmpeg-installer/ffmpeg").path;
const ffprobePath = require("#ffprobe-installer/ffprobe").path;
ffmpeg.setFfmpegPath(ffmpegPath);
ffmpeg.setFfprobePath(ffprobePath);
Here's some tested and working code:
os = require('os');
path = require('path');
gcs = require('#google-cloud/storage')();
const filePath = object.name;
const const fileBucket = object.bucket;
var Promise = require("bluebird");
var ffmpeg = Promise.promisify(require("fluent-ffmpeg"));
var ffmpegPath = require("#ffmpeg-installer/ffmpeg").path;
var ffprobePath = require("#ffprobe-installer/ffprobe").path;
ffmpeg.setFfmpegPath(ffmpegPath);
ffmpeg.setFfprobePath(ffprobePath);
const fileName = filePath.split('/').pop();
const tempFilePath = path.join(os.tmpdir(), fileName);
const bucket = gcs.bucket(fileBucket);
bucket.file(filePath).download({
destination: tempFilePath,
validation: false
}).then(function() {
ffmpeg.ffprobe(tempFilePath, function(err, metadata) {
if (err) {
reject(err);
} else {
if (metadata) {
console.log(metadata.format.duration);
console.log(metadata.streams[0].width);
console.log(metadata.streams[0].height);
console.log(metadata);
resolve();
} else {
reject();
}
}
})
}).catch(function(error) {
console.error(error); reject();
})
From package.json:
"#ffmpeg-installer/ffmpeg": "^1.0.17",
"#ffprobe-installer/ffprobe": "^1.0.9",
"#google-cloud/storage": "^1.1.1",
"bluebird": "^3.5.3",
"fluent-ffmpeg": "^2.1.2"

Categories

Resources