node.js changes made in files not reflecting until the second run - javascript

I am converting all .png files to .jpg files in a directory and then running some manipulations on them which can be only to to jpeg files. But node.js doesn't seem to notice the converted files and the deleted png files until I run the same script again.
const fs = require('fs')
const pngToJpeg = require('png-to-jpeg');
let dirrCont = fs.readdirSync( dir );
files = dirrCont.filter( ( elm ) => /.*\.(png|jpg)/gi.test(elm) );
for (i in files)
{
let file = files[i]
let file_name_without_ext = file.replace(/\.[^/.]+$/, "")
let extension = file.match(/\.[^/.]+$/)
if (extension[0] == '.png')
{
console.log('found')
let buffer = fs.readFileSync(dir+file);
pngToJpeg({quality: 100})(buffer)
.then(output => fs.writeFileSync(dir+file_name_without_ext+'.jpg', output));
fs.unlinkSync(dir+file)
extension = '.jpg'
}
let target_file = target + file_name_without_ext + '.' + suffix + extension
// do some manipulations on dir+file_name_without_ext+extension
I always receive the error that the new jpg files are not found thus the manipulations don't work although the png files get converted to jpg files. When I run the same script again since now all the files are jpeg the file manipulations run this time.
EDIT
as suggested in one of the answers by #CertainPerformance
I changed the code to do most of my stuff inside the then block but again hit the same error
for (i in files)
{
let file = files[i]
let file_name_without_ext = file.replace(/\.[^/.]+$/, "")
let extension = file.match(/\.[^/.]+$/)
if (extension[0] == '.png')
{
console.log('found')
let buffer = fs.readFileSync(dir+file);
pngToJpeg({quality: 100})(buffer)
.then(output => {
fs.writeFileSync(dir+file_name_without_ext+'.jpg', output);
extension = '.jpg'
//
let target_file = target + file_name_without_ext + '.' + suffix + extension
// Do some manipulations
// I am done with the manipulations and I now want to delete
// the jpg file I just created
fs.unlinkSync(dir+file_name_without_ext+'.jpg') // Gives me back the same error
});
}
NOTE: There is a little bit of change up in the edit and I am deleting the jpg file instead of the png file (which I was doing originally)

As you can see by the .then, pngToJpeg is asynchronous - if you want to do work on dir+file_name_without_ext, you have to wait for the initial .then and the writeFileSync to resolve first. Put everything that depends on the asynchronous operations inside the then. For example:
if (extension[0] == '.png') {
console.log('found');
const buffer = fs.readFileSync(dir + file);
pngToJpeg({ quality: 100 })(buffer)
.then(output => {
fs.unlinkSync(dir + file);
fs.writeFileSync(dir + file_name_without_ext + '.jpg', output);
extension = '.jpg';
const target_file = target + file_name_without_ext + '.' + suffix + extension;
// do some manipulations on dir+file_name_without_ext+extension
});
}
(You should also take care not to implicitly create global variables. For example, use for (const i in files) instead, or perhaps for (const file of files) to avoid having to fiddle with the unimportant indicies)

Related

Downloading Image locally from GitHub Raw link using fs.writeFileSync() JS

Currently trying to download image from GitHub locally. Everything seems to work, the fetch goes through with a 200 OK response, however, I don't understand how to store image itself:
const rawGitLink = "https://raw.githubusercontent.com/cardano-foundation/CIPs/master/CIP-0001/CIP_Flow.png"
const folder = "/Folder"
const imageName = "/Test"
const imageResponse = await axios.get(rawGitLink)
fs.writeFileSync(___dirname + folder + imageName, imageResponse, (err) => {
//Error handling
}
)
Four problems had to be fixed:
Image name must include png format for this case
The response must be in the correct format as a buffer for an image
You must write the response data and not the object itself
__dirname only needs two underscores
const rawGitLink = "https://raw.githubusercontent.com/cardano-foundation/CIPs/master/CIP-0001/CIP_Flow.png"
const folder = "/Folder"
const imageName = "/Test.png"
const imageResponse = await axios.get(rawGitLink, { responseType: 'arraybuffer' });
fs.writeFileSync(__dirname + folder + imageName, imageResponse.data)
Axios returns a special object: https://github.com/axios/axios#response-schema
let {data} = await axios.get(...)
await fs.writeFile(filename, data) // you can use fs.promises instead of sync
As #Leau said you should include the extension on the filename
Another sugestion is to use the path module to create the filename:
filename = path.join(__dirname, "/Folder", "Test.png")

Is there a way to unzip a file from a url request and parse its content in node?

I want to download a zip file from a url and parse its contents in node. I do not want to save the file on disk. The zip file is a directory of csv files that I want to process. How can I approach this? The only package that has an option for unzipping from a URL is unzipper but it does not work for me. Every other package lets you unzip a file on disk by providing the path to the file but not a url.
I am downloading the file like so:
const res = await this.get(test)
But what can I do now? There are packages like AdmZip that can extract zip files but need a path as a string to a file on disk. Is there a way I can pass/ stream my res object above to the below?
var AdmZip = require('adm-zip');
// reading archives
var zip = new AdmZip("./my_file.zip");
var zipEntries = zip.getEntries(); // an array of ZipEntry records
zipEntries.forEach(function(zipEntry) {
console.log(zipEntry.toString()); // outputs zip entries information
if (zipEntry.entryName == "my_file.txt") {
console.log(zipEntry.getData().toString('utf8'));
}
});
Here's a simple example of downloading a .zip file and unzipping using adm-zip. As #anonymouze points out, you can pass a buffer to the AdmZip constructor.
const axios = require("axios");
const AdmZip = require('adm-zip');
async function get(url) {
const options = {
method: 'GET',
url: url,
responseType: "arraybuffer"
};
const { data } = await axios(options);
return data;
}
async function getAndUnZip(url) {
const zipFileBuffer = await get(url);
const zip = new AdmZip(zipFileBuffer);
const entries = zip.getEntries();
for(let entry of entries) {
const buffer = entry.getData();
console.log("File: " + entry.entryName + ", length (bytes): " + buffer.length + ", contents: " + buffer.toString("utf-8"));
}
}
getAndUnZip('https://www.learningcontainer.com/wp-content/uploads/2020/05/sample-zip-file.zip');
In this case I'm simply using axios to download a zip file buffer, then parsing it with AdmZip.
Each entry's data can be accessed with entry.getData(), which will return a buffer.
In this case we'll see an output something like this:
File: sample.txt, length (bytes): 282, contents: I would love to try or hear the sample audio your app can produce. I do...
Here's another example, this time using node-fetch:
const fetch = require('node-fetch');
const AdmZip = require('adm-zip');
async function get(url) {
return fetch(url).then(res => res.buffer());
}
async function getAndUnZip(url) {
const zipFileBuffer = await get(url);
const zip = new AdmZip(zipFileBuffer);
const entries = zip.getEntries();
for(let entry of entries) {
const buffer = entry.getData();
console.log("File: " + entry.entryName + ", length (bytes): " + buffer.length + ", contents: " + buffer.toString("utf-8"));
}
}
getAndUnZip('https://www.learningcontainer.com/wp-content/uploads/2020/05/sample-zip-file.zip');

unzipping downloaded zip repo from remote repository before saving in browser

I download a zip file from remote repo. Now I want to unzip that before saving to local system. How to do that in browser itself ? No need to unzip explicitly outside of browser
You can use a library like JSZip or fflate to unzip in the browser. You download the file, unzip, and save. Since fflate is faster, I'll use it.
const downloadFilesFromZip = async url => {
console.log('Downloading from ' + url + '...');
const unzipper = new fflate.Unzip();
unzipper.register(fflate.AsyncUnzipInflate);
unzipper.onfile = file => {
console.log("Got", file.name);
if (file.originalSize) {
console.log("Original size:", file.originalSize);
}
const rs = new ReadableStream({
start(controller) {
file.ondata = (err, dat, final) => {
controller.enqueue(dat);
if (final) controller.close();
}
file.start();
}
});
streamSaver.createWriteStream(
file.name,
rs
);
}
const res = await fetch(url);
const reader = res.body.getReader();
while (true) {
const { value, done } = await reader.read();
if (done) {
unzipper.push(new Uint8Array(0), true);
break;
}
unzipper.push(value);
}
}
// Call downloadFilesFromZip to download all files from a URL
// downloadFilesFromZip('https://your-url.com/file.zip');
downloadFilesFromZip(
'https://cors-anywhere.herokuapp.com/' +
'https://github.com/101arrowz/fflate/archive/master.zip'
)
<script src="https://cdn.jsdelivr.net/npm/streamsaver#2.0.3/StreamSaver.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/fflate#0.6.4/umd/index.js"></script>
StackOverflow doesn't allow downloads from answers, so it doesn't work properly, but the console will show the files that fflate found. Also, you need to go to this link and allow access before running that snippet.

NodeJs API sending blank response while converting larger files

I am currently working on a piece of code that uploads mp4 videos from my localhost onto a server. The basic thing is that if the video is a .mp4 it is directly uploaded, else it is converted to .mp4 and then uploaded. I'm using the video converter using handbrake-js.
All works fine except for a tiny part. When the file isn't that huge, say less than 70-80 Mb, it works like a charm. But the problem is with larger files. Even though I am explicitly calling the res.end / res.send in the .on(end) callback, I receive some blank response in my angular controller, even before the conversion has finished. I have noticed it happens at around 30 to 40% of the conversion. It has a readystate which is equal to XMLHttpRequest.DONE and also status = 200.
Here is the Node side code:
try {
if (fs.existsSync(uploadPath + filename.substring(0, filename.lastIndexOf('.')) + '.mp4')) {
res.end('<b><i>' + filename + '</i></b> already exists in the directory.');
}
else {
const fstream = fs.createWriteStream(path.join(cfg.tempStoragePath, filename));
file.pipe(fstream);
console.log("\nfile_type: " + file_type);
filename = filename.substring(0, filename.lastIndexOf('.'));
// On finish of the copying file to temp location
fstream.on('close', () => {
hbjs.spawn({
input: cfg.tempStoragePath + filename + '.' + file_type,
output: uploadPath + filename + '.mp4'
})
.on('error', err => {
// invalid user input, no video found etc
console.log('error! No input video found at: \n: ' + cfg.tempStoragePath + filename + '.' + file_type);
res.send('Conversion of the file, <b><i>' + filename + '</i></b>, from <b>.' + file_type + '</b>' + ' to <b>.mp4</b> failed because no input video was found at: \n: ' + cfg.tempStoragePath + filename + '.' + file_type);
})
.on('progress', progress => {
progress_percent = (Number(progress.percentComplete) * 2 <= 100) ? Number(progress.percentComplete) * 2 : 100;
eta = progress.eta.split(/[a-zA-Z]/);
minutes = ((+eta[0]) * 60 + (+eta[1])) / 2;
console.log('Percent complete: %d, ETA: %d ///// %s ==> mp4', progress_percent, minutes, file_type);
})
.on('end', end => {
console.log('Conversion from .' + file_type + ' to .mp4 complete.');
//delete the temp file
fs.unlink(cfg.tempStoragePath + filename + '.' + file_type);
let new_path = uploadPath + filename + '.mp4';
let stat = fs.statSync(new_path);
console.log(`Upload of '${filename}' finished`);
if(Number(progress_percent) === Number(100))
res.send('The file, <b><i>' + filename + '</i></b>, has been converted from <b>.' + file_type + '</b>' + ' to <b>.mp4</b> complete.');
})
});
}
}
catch (err) {
res.end(err);
}
Following is part of my angular controller:
request = new XMLHttpRequest();
request.onreadystatechange = function () {
if (request.readyState === XMLHttpRequest.DONE && request.status === 200) {
showConversionModal('<p>' + request.responseText + '</p>', 'done');
}
};
showSomeModal('something');
request.open("POST", client.clientHost + ":" + client.clientPort + "/uploadVideoService");
formData = new FormData();
formData.append("file", files[0], files[0].name);
request.send(formData);
NOTE : All the data which is do a console.log() inside the node is as expected. And even the res.end/send works fine for smaller files which take lesser time. But the problem arises only for those which conversion takes longer than the smaller files.
Also, the if loop which checks for existing file scenario doesn't work as expected for these larger files. I thought at least that should work, because it doesn't even get into the handbrake part. But that is not the case.
And in the browser I get this error:
Failed to load resource: net::ERR_CONNECTION_RESET
which points to the request.send(formData); line, and I have also tried almost all solution from this SO article, with no good effect. But still, the conversion happens fine.
P.S.: Also note that the conversion and upload happens without a problem even for the larger files, it's just the response I'm receiving on the client side that has been my headache.
UPDATE: I tried using the debugger in VS Code, and saw that the breakpoints are rightly hitting the res.end() inside the if loop which checks for existing file scenario, but for some strange reason, the angular controller isn't reacting to it. And this happens only for the larger files.
I figured it out myself after a long time. Turns out there's something like a buffer in Busboy.
Here, I used to link the busboy and express like this:
app.use(busboy());
Just setting the field called highWaterMark to an arbitrary size which would be greater than the file size did the trick.
app.use(busboy({
highWaterMark: 2048 * 1024 * 1024, // Set buffer size in MB
}));
I don't know what might have caused this problem, or what the new field does that solves it, but, it just works is all that I know. It would be helpful if someone could elaborate on this a bit.

Node.js transform stream - Find the source name in _transform function

I have installed node.js on Windows 7, and I am using basic example of Transform stream to unzip log files in a directory and read log files asynchronously using _transform function.
This process is very fast, but now I have a problem:
I need to map source filename to the filtered lines. I tried appending filename to the filtered line in on.readable event but its incorrect since data comes out asynchronously from multiple files so maps everything to a single filename. I tried some code like below but this throws error saying too many event listeners created.
source.pipe(gunzip).pipe(liner).pipe(new Upper(filename).pipe(process.stdout));
Found the solution basically need to create separate pipeline for each line, the improvised code is as below:
var gunzip = [];
var source = [];
var upper = [];
var outputfile = [];
var liner = [];
.
.
.
function file(i) {
var filename = files[i];
gunzip[i] = zlib.createGunzip();
source[i] = fs.createReadStream(__dirname + '/' + filename);
upper[i] = new Upper();
liner[i] = new LineSplitter();
outputfile[i] = fs.createWriteStream(__dirname + '/' + '_' + i + '_outputfile.txt');
source[i].pipe(gunzip[i]).pipe(liner[i]).pipe(upper[i]).pipe(outputfile[i]);
}
As further steps, looking to use async module to run these pipelines in parallel.

Categories

Resources