NodeJS - Run seperate functions depending on OS (Closed) - javascript

How do we run seperate functions based on the users Operating System?
I have two functions for returning relative paths to given files by searching for them recursively within subdirectories of given working directory's.
I have one for Windows, see below..
WINDOWS variant
console.log("Locating File...");
exec('set-location ' + folder + ';' + ' gci -path ' + folder + ' -recurse -filter ' +
file + ' -file | resolve-path -relative', { 'shell':'powershell.exe' }, (error, stdout, stderr) => {
var filePath = stdout.substring(stdout.indexOf(".\\") + 2).trim("\n");
if (error !== null) {
console.log("Cannot Locate the file...");
return;
}
And one for Linux & macOS, see below...
LINUX/OSX variant
console.log("Locating File...")
console.log();
exec('find -name "' + file + '"', { cwd: folder }, (error, stdout, stderr) => {
var filePath = stdout.substring(stdout.indexOf("./") + 2).trim("\n");
if (error !== null) {
console.log("Cannot Locate the file...");
return;
}
I want to execute these functions based on the users Operating System if that's possible?
I've had a look at the StackOverflow question..
How do I determine the current operating system with Node.js
which is how I found out how to detect the current user OS, as well as the StackOverflow Question..
python: use different function depending on os.
Aside from the 2nd issue referenced being for python, I am basically trying to achieve the same thing from the 2nd issue but with nodeJS, by using the solution from the 1st issue.
My latest attempt was the following below, but for some reason it doesn't return anything but a blank console.
const fs = require("fs");
const platform = require("process");
const path = require("path");
var exec = require("child_process").exec
var folder = "just/some/folder/location"
var file = "file.txt"
// Windows process
if (platform === "win32") {
console.log("Locating File...");
exec('set-location ' + folder + ';' + ' gci -path ' + folder + ' -recurse -filter ' +
file + ' -file | resolve-path -relative', { 'shell':'powershell.exe' }, (error, stdout, stderr) => {
// relative path output for Windows
var filePath = stdout.substring(stdout.indexOf(".\\") + 2).trim("\n");
if (error !== null) {
console.log("Cannot Locate the file...");
return;
} else if (process.platform === "linux" + "darwin") {
// Linux & Unix process
console.log("Locating File...")
console.log();
exec('find -name "' + file + '"', { cwd: folder }, (error, stdout, stderr) => {
// relative path output for Linux & macOS
var filePath = stdout.substring(stdout.indexOf("./") + 2).trim("\n");
if (error !== null) {
console.log("Cannot Locate file... \n\n" + error);
return;
};
console.log("found file: " + filePath);
// Read the file and show it's output to confirm everything ran correctly.
fs.readFile(path.join(folder, file), (error, data) => {
if (error) {
console.log("error reading file \n\n" + error);
return;
}
});
});
};
});
};

Problem solved, after doing a bit of research, I was using if else else if statements & process.platform wrong...
First, when I was declaring process.platform I was using...
const platform = require("process");
when according to process.platform documentation for CJS it should be...
const { platform } = require("node:process");
Second, I was using if else else if statements wrong!
In my issue stated above I was using if else else if statements with process.platform like so..
} else if (process.platform === "linux" || "darwin") {
// Linux & macOS code to execute
}
which was completely wrong!
I should have been using if else else if statements with process.platform like so...
if (process.platform === "win32") {
// Windows code to be executed!
} else if (process.platform === "linux") {
// Linux Code to be executed!
} else {
process.platform === "darwin";
// macOS Code to be executed!
}
I've managed to produce the following code which works on Windows, Linux and macOS closest to the way I need it to.
Hopefully this helps anyone new to JS like me.
const fs = require("fs");
const { platform } = require("node:process");
const path = require("path");
var exec = require("child_process").exec
var folder = "just/some/folder/location"
var file = "file.txt"
// Windows Process
if (process.platform === "win32") {
// executes Powershell's "set-location" & "gci" commands...
//...to locate a given file recursively...
//...and returns its relative path from inside the given...
//...working directory's subdirectories.
exec('set-location "' + '"' + folder + '"'
+ ';' + ' gci -path ' + '"' + folder + '"'
+ ' -recurse -filter ' + '"' + file + '"' +
' -file | resolve-path -relative', (error, stderr, stdout) => {
// sets the relative file path from ".\this\way\to\the\file.txt",
// to "this\way\to\the\file.txt".
var filePath = stdout.substring(stdout.indexOf(".\\") + 2).trim("\n");
// throw an error if the file isnt found at all!
if (error !== null) {
console.log("Cannot locate the given file... \n" + error);
return;
};
// read the file after it's been found.
fs.readFile(path.join(folder, filePath), 'utf8', (error, data) => {
if (error) {
console.log("There was a problem reading the given file... \n" + error);
return;
};
// Then print the data from fs.readFile...
//...to confirm everthing ran correctly.
console.log(data);
});
});
} else if (process.platform === "linux") {
// executes findUtil's "find" command...
//...to locate a given file recursively...
//...and return its relative path from inside the given...
//...working directory's subdirectories.
exec('find -name "' + file + '"', { cwd: folder }, (error, stderr, stdout) => {
// sets the relative file path from "./this/way/to/the/file.txt",
// to "this/way/to/the/file.txt".
var filePath = stdout.substring(stdout.indexOf("./") + 2).trim("\n");
// throw an error if the file isnt found at all!
if (error !== null) {
console.log("Cannot locate the given file... \n" + error);
return;
};
// read the file after it's been found.
fs.readFile(path.join(folder, filePath), 'utf8', (error, data) => {
if (error) {
console.log("There was a problem reading the given file... \n" + error);
return;
};
// Then print the data from fs.readFile...
//...to confirm everthing ran correctly.
console.log(data);
});
});
} else {
// Uses APPLES built BSD findUtils 'find' command to locate the file inside the folder and subdirectories
exec('find ' + '"' + apkFolder + '"' + ' -name ' + '"' + file + '"', (error, stderr, stdout) => {
// sets the relative file path from "./this/way/to/the/file.txt",
// to "this/way/to/the/file.txt".
var filePath = stdout.substring(stdout.indexOf("./") + 2).trim("\n");
// throw an error if the file isnt found at all!
if (error !== null) {
console.log("Cannot locate the given file... \n" + error);
return;
};
// read the file after it's been found.
fs.readFile(path.join(folder, filePath), 'utf8', (error, data) => {
if (error) {
console.log("There was a problem reading the given file... \n" + error);
return;
};
// Then print the data from fs.readFile...
//...to confirm everthing ran correctly.
console.log(data);
});
});
}

Related

Change video resolution with JS

I'm developing a video storage service for users and I need that large videos (v.g. 4K) can be compressed to 1080p before saving them. Is there a JS library (browser or Node) that helps with this task? Maybe a webservice?
I also accept language suggestions.
When it comes to downscaling video, the most accessible option is ffmpeg.
There is a package that makes using ffmpeg in node.js easier: https://www.npmjs.com/package/fluent-ffmpeg
For example, downscaling a video to 1080p and 720p:
var ffmpeg = require('fluent-ffmpeg');
function baseName(str) {
var base = new String(str).substring(str.lastIndexOf('/') + 1);
if(base.lastIndexOf(".") != -1) {
base = base.substring(0, base.lastIndexOf("."));
}
return base;
}
var args = process.argv.slice(2);
args.forEach(function (val, index, array) {
var filename = val;
var basename = baseName(filename);
console.log(index + ': Input File ... ' + filename);
ffmpeg(filename)
.output(basename + '-1280x720.mp4')
.videoCodec('libx264')
.size('1280x720')
.output(basename + '-1920x1080.mp4')
.videoCodec('libx264')
.size('1920x1080')
.on('error', function(err) {
console.log('An error occurred: ' + err.message);
})
.on('progress', function(progress) {
console.log('... frames: ' + progress.frames);
})
.on('end', function() {
console.log('Finished processing');
})
.run();
});
(source: https://gist.github.com/dkarchmer/635496ff9280011b3eef)
You don't need any node packages to run ffmpeg, you could make use of the child_process API in node.js.
The ffmpeg package has to be installed on the server that will be running your application.
.format("h264") √
.format("mp4") ×
or add
.outputOptions(['-movflags isml+frag_keyframe'])
.format("mp4")

NodeJS finish writing the file with pipe before continuing with the next iteration

Similar to this question,
I have a script that downloads a file to a given url via http.get.
How can I make sure the pipe is finished before continuing to the next iteration with just the http/https module??
//nodejs default libs
var fs = require("fs");
var http = require('https');
function dlFile(fullFilePath, dlUrl, fsize, fname){
var file = fs.createWriteStream(fullFilePath); //fullFilePath will dictate where we will save the file + filename.
var rsult ='';
var downloadedFsize;
var stats; //stats of the file will be included here
var request = http.get( dlUrl, function(response) {
let rsult = response.statusCode;
//will respond with a 200 if the file is present
//404 if file is missing
response.pipe(file);
/*pipe writes the file...
how do we stop the iteration while it is not yet finished writing?
*/
console.log(" \n FILE : " + fname);
console.log("File analysis finished : statusCode: " + rsult + " || Saved on " + fullFilePath);
console.log(' \n Downloaded from :' + dlUrl);
console.log(' \n SQL File size is : ' + fsize);
//identify filesize
stats = fs.statSync(fullFilePath);
downloadedFsize = stats["size"]; //0 because the pipe isn't finished yet...
console.log(' actual file size is : ' + downloadedFsize);
}).on('error', function(e) {
console.error(e);
//log that an error happened to the file
}).on('end', function(e){
//tried putting the above script here but nothing happens
});
return rsult;
}
Is there a cleaner approach similar to what I have in mind above? or should I approach this differently? I tried putting the code on .on('end' but it does nothing
The end event is not triggered on the request, instead it is triggered on the response (docs):
response.on("end", function() {
console.log("done");
});
As #Jonas Wilms says, the trigger was indeed on response.
//nodejs default libs
var fs = require("fs");
var http = require('https');
function dlFile(fullFilePath, dlUrl, fsize, fname){
var file = fs.createWriteStream(fullFilePath); //fullFilePath will dictate where we will save the file + filename.
var rsult ='';
var downloadedFsize;
var stats; //stats of the file will be included here
var request = http.get( dlUrl, function(response) {
let rsult = response.statusCode;
//will respond with a 200 if the file is present
//404 if file is missing
response.pipe(file).on('finish', function(e){
console.log(" \n FILE : " + fname);
console.log("File analysis finished : statusCode: " + rsult + " || Saved on " + fullFilePath);
console.log(' \n Downloaded from :' + dlUrl);
console.log(' \n SQL File size is : ' + fsize);
//identify filesize
stats = fs.statSync(fullFilePath);
downloadedFsize = stats["size"];
console.log(' actual file size is : ' + downloadedFsize);
});
/*pipe writes the file above, and output the results once it's done */
}).on('error', function(e) {
console.error(e);
//log that an error happened to the file
}).on('end', function(e){
//tried putting the above script here but nothing happens
});
return rsult;
}

Javascript - accessing password in ssh2 connection

I have a class which is being used to connect to a device. I have made in instance of the class in my application
app.js
myConn = new myConnection();
myConnection.js
function myConnection(){
this.settings = {
host: '192.168.225.195',
port: 22,
username: 'sysadmin',
password: 'pass'
};
}
I have a function within said class that executes a command on the remote device but that requires a password. When this happens stderr.on is executued and I send the password and a newline char.
myConnection.prototype.installPatch = function(callback){
this.conn.exec('sudo -S bash /tmp/update.sh', function(err, stream){
var standardMsgs = '';
var errorMsgs = '';
if(err) throw err;
stream.on('close', function(code, signal) {
callback(standardMsgs, errorMsgs);
}).on('data', function(data) {
standardMsgs += "<br>" + data;
console.log('STDOUT: ' + data);
}).stderr.on('data', function(data) {
errorMsgs += data;
console.log('STDERR: ' + data);
stream.write(myConn.conn.config.password + '\n');
});
});
}
While this works I am not a fan of accessing the password with
stream.write(myConn.conn.config.password + '\n');
since a change to the name "myConn" in app.js would required the same change in the "installPatch" function.
I had intended to use
stream.write(this.settings.password + '\n');
Do I have any other options that will allow me to retrieve the password from within the myConnection class? I hope I am just overlooking the obvious.
Ok, so I believe it was starring me in the face.
Change
stream.write(myConn.conn.config.password + '\n');
to
stream.write(stream._client.config.password + '\n');

Uploaded images are not saving in targeted path in Node.js

I am just trying to upload and save images in my public/images folder, I am getting the details of file by req.files, after that I am getting this type of Error: EXDEV, rename 'c:\Users\abdul\AppData\Local\Temp\3348-qiy7kl.jpg'
here is my stuff
app.post('/upload',function(req,res){
var tmp_path = req.files.file.path;
var target_path = './public/images/' + req.files.file.originalFilename;
fs.rename(tmp_path, target_path, function(err) {
if (err) throw err;
fs.unlink(tmp_path, function() {
if (err) throw err;
res.send('File uploaded to: ' + target_path + ' - ' + req.files.file.size + ' bytes');
});
});
})
};
Can any body give any suggestion or give me any reference so that I can handle it?
Try this one
console.log("File Name " + req.files.file.name);
console.log("File Path " + req.files.file.path);
fs.readFile(req.files.file.path, function (err, data) {
var newPath = "public/images/" + req.files.file.originalFilename;
console.log(newPath);
/// write file to uploads/fullsize folder
fs.writeFile(newPath, data, function (err) {
/// let's see it
});

node js azure SDK getBlobToStream uses lots of memory

I am writing a backup script that simply downloads all the blobs in all the blob containers of a specific Azure account.
The script uses async.js to make sure only so much threads can run at the same time so it doesn't overload the server. When I run this script it works fine, but when it hits large files it runs out of memory. I'm guessing the download runs faster than the disk can write, and it eventually fills up the in-memory buffer so badly that I run out of memory entirely, but debugging the exact cause has been impossible so far.
The specific function which appears to use a lot of memory is called as follows:
blobService.getBlobToStream(
containerName,
blob.name,
fs.createWriteStream(fullPath),
function(error) {
if(error){ //Something went wrong, write it to the console but finish the queue item and continue.
console.log("Failed writing " + blob.name + " (" + error + ")");
callback();
}
else if(!error) { //Write the last modified date and finish the queue item silently
fs.writeFile(fullPath + ".date", blobLastModified, function(err)
{ if(err) console.log("Couldn't write .date file: " + err); });
callback();
}
});
Even a single 700MB download will easily fill up 1GB of memory on my side.
Is there any way around this? Am I missing a parameter which magically prevents the Azure SDK from buffering everything and the kitchen sink?
Full code:
#!/usr/bin/env node
//Requires
var azure = require('azure');
var fs = require('fs');
var mkdirp = require('mkdirp');
var path = require('path');
var async = require('async');
var maxconcurrency = 1; //Max amount of simultaneous running threads of getBlobsAndSaveThem() running through async.js.
var blobService = azure.createBlobService();
backupPrefix='/backups/azurebackup/' //Always end with a '/'!!
//Main flow of the script is near the bottom of the file.
var containerProcessingQueue = async.queue(
function getBlobsAndSaveThem(containerName) {
console.log(containerName); //DEBUG
blobService.listBlobs(containerName,
function(error, blobs) {
if(!error){
var blobProcessingQueue =
async.queue(function(index,callback) {
var blob = blobs[index];
console.log(blob); //DEBUG
var fullPath = backupPrefix + containerName + '/' + blob.name;
var blobLastModified = new Date(blob.properties['last-modified']);
//Only create if the directoy doesn't exist, since mkdirp fails if the directory exists.
if(!fs.existsSync(path.dirname(fullPath))){ //And do it sync, because otherwise it'll check 99999 times if the directory exists simultaneously, doesn't find it, then fails to create it 99998 times.
mkdirp.sync(path.dirname(fullPath), function(err) { console.log('Failed to create directory ' + path.dirname(fullPath) + " ("+ err + ")"); });
}
if(fs.existsSync(fullPath + ".date")){
if(blobLastModified == fs.readFileSync(fullPath + ".date").toString()) {
callback();
return; //If the file is unmodified, return. No this won't exit the program, because it's called within a function definition (async.queue(function ...))
}
}
blobService.getBlobToStream(
containerName,
blob.name,
fs.createWriteStream(fullPath),
function(error) {
if(error){ //Something went wrong, write it to the console but finish the queue item and continue.
console.log("Failed writing " + blob.name + " (" + error + ")");
callback();
}
else if(!error) { //Write the last modified date and finish the queue item silently
fs.writeFile(fullPath + ".date", blobLastModified, function(err)
{ if(err) console.log("Couldn't write .date file: " + err); });
callback();
}
});
},maxconcurrency);
for(var blobindex in blobs){
blobProcessingQueue.push(blobindex);
} //Push new items to the queue for processing
}
else {
console.log("An error occurred listing the blobs: " + error);
}
});
},1);
blobService.listContainers(function(err, result){
for(var i=0;i<result.length;i++) {
containerProcessingQueue.push(result[i].name);
}
});
For all those now curious the variables for the start and end have changed. They are now just rangeStart and rangeEnd.
Here is the azure node documentation for more help
http://dl.windowsazure.com/nodestoragedocs/BlobService.html
One thing that you could possibly do is read only a chunk of data into stream instead of whole blob data, append that to the file and read next chunk. Blob Storage service supports that. If you look at the source code for getBlobToStream (https://github.com/WindowsAzure/azure-sdk-for-node/blob/master/lib/services/blob/blobservice.js), you can specify from/to bytes in the options - rangeStartHeader and rangeEndHeader. See if that helps.
I have hacked some code which does just that (as you can see from my code, my knowledge about node.js is quite primitive :)). [Please use this code just to get an idea about how you can do chunked download as I think it still has some glitches]
var azure = require('azure');
var fs = require('fs');
var blobService = azure.createBlobService("account", "accountkey");
var containerName = "container name";
var blobName = "blob name";
var blobSize;
var chunkSize = 1024 * 512;//chunk size -- we'll read 512 KB at a time.
var startPos = 0;
var fullPath = "D:\\node\\";
var blobProperties = blobService.getBlobProperties(containerName, blobName, null, function (error, blob) {
if (error) {
throw error;
}
else {
blobSize = blob.contentLength;
fullPath = fullPath + blobName;
console.log(fullPath);
doDownload();
}
}
);
function doDownload() {
var stream = fs.createWriteStream(fullPath, {flags: 'a'});
var endPos = startPos + chunkSize;
if (endPos > blobSize) {
endPos = blobSize;
}
console.log("Downloading " + (endPos - startPos) + " bytes starting from " + startPos + " marker.");
blobService.getBlobToStream("test", blobName, stream,
{ "rangeStartHeader": startPos, "rangeEndHeader": endPos-1 }, function(error) {
if (error) {
throw error;
}
else if (!error) {
startPos = endPos;
if (startPos <= blobSize - 1) {
doDownload();
}
}
});
}

Categories

Resources