iteration over createreadstream in node js using webdav downloads empty files - javascript

im trying to download some files from a folder on nextcloud,using webdav.
i want to iterate over folder and download all the files in it,here is my code:
let dir = "/foo"
let folder = await WebDav.getDirectoryContents("bar")
folder is returned as an array
for (let i = 0; i < folder.length; i++) {
await WebDav.createReadStream(folder[0].filename).pipe(
fs.createWriteStream(`${dir}/${folder[0].basename}`)
);
}
});
the files are created with correct names,but they have no content in them and their size are zero KBs
but when i put pipe outside of the for loop, it works fine and downloads the file.

Use this code , it worked for me
let fs = require('fs');
let { createClient } = require("webdav");
let start = async () => {
let client = createClient(
"https://*/on/demandware.servlet/webdav/Sites/Logs",
{
username: "*",
password: "*"
}
);
let directories = await client
.getDirectoryContents("/");
let files = await directories;
//console.log(files);
for(let i =0; i< files.length ; i++) {
let fileName = files[i].filename;
if(fileName.includes('.log')){
let readStream = client.createReadStream(fileName);
// wait for 5 seconds, then pipe
setTimeout(function() {
let writeStream = readStream.pipe(fs.createWriteStream("C:/Users/*"+ fileName));
writeStream.on('finish', function() {
console.log('all done!');
});
writeStream.on('error', function(err) {
console.error(err);
});
}, 5000);
}
}
}
start();

Related

Can't download multiple images from array using axios

const Fs = require('fs')
const Path = require('path')
const Axios = require('axios')
var dir = './tmp';
async function downloadImage () {
if (!Fs.existsSync(dir)){
Fs.mkdirSync(dir);
}
var arr = ['https://reaperscans.com/wp-content/uploads/WP-manga/data/manga_6295b8da2aa90/5461fc34b58cd174c806625056c6e0dc/01-copy.jpg','https://reaperscans.com/wp-content/uploads/WP-manga/data/manga_6295b8da2aa90/5461fc34b58cd174c806625056c6e0dc/02-copy.jpg','https://reaperscans.com/wp-content/uploads/WP-manga/data/manga_6295b8da2aa90/5461fc34b58cd174c806625056c6e0dc/03-copy.jpg']
for(var i=0;i<arr.length;i++){
var url = arr[i]
var name = i + '.jpg'
var path = Path.resolve(__dirname,dir, name)
var writer = Fs.createWriteStream(path)
var response = await Axios({
url,
method: 'GET',
responseType: 'stream'
})
response.data.pipe(writer)
return new Promise((resolve, reject) => {
writer.on('finish', resolve)
writer.on('error', reject)
})
}
}
downloadImage()
This is the above code I am using to download images when I tried downloading multiple images whose links are in array it only downloads the image of first array i.e. arr[0] and can't figure out what's the problem it doesn't give any error to and i can individually download single images but not bulky.random manga generator
So, I played around with this code snippet and I was able to fix by removing the promise callback at the bottom of your for loop. You were right, it would only complete one GET request, and then terminate. It now runs through all three, and saves it in your ./tmp directory.const
Fs = require("fs");
const Path = require("path");
const Axios = require("axios");
var dir = "./tmp";
async function downloadImage() {
if (!Fs.existsSync(dir)) {
Fs.mkdirSync(dir);
}
var arr = [
"https://reaperscans.com/wp-content/uploads/WP-manga/data/manga_6295b8da2aa90/5461fc34b58cd174c806625056c6e0dc/01-copy.jpg",
"https://reaperscans.com/wp-content/uploads/WP-manga/data/manga_6295b8da2aa90/5461fc34b58cd174c806625056c6e0dc/02-copy.jpg",
"https://reaperscans.com/wp-content/uploads/WP-manga/data/manga_6295b8da2aa90/5461fc34b58cd174c806625056c6e0dc/03-copy.jpg"
];
for (var i = 0; i < arr.length; i++) {
var url = arr[i];
// for debugging
console.log(arr[i])
var name = i + ".jpg";
var path = Path.resolve(__dirname, dir, name);
var writer = Fs.createWriteStream(path);
var response = await Axios({
url,
method: "GET",
responseType: "stream"
});
// for debugging
console.log(response)
}
}
downloadImage();

Memory leak in JSZip

I am using JSZip in my little React application. I need to fetch attachments from sharepoint list. Attachments size about 3 gb so I made a decision to download it in parts (200mb). But I got big RAM consumption (more than 3500 mb). I can't find memory leak
Source code. This function outside of React component:
var JSZip = require("jszip");
async function testUploadAllAttachments() {
const ceilSizebytes = 209715200;
// actually it count blobs sizes
let blobCounter = 0;
const filterItemsDate = await sp.web.lists.getByTitle("Reports").items.getAll();
console.log("filterItemsDate: ", filterItemsDate);
let zip = new JSZip();
for (const item of filterItemsDate) {
let allItemAttachments = await sp.web.lists.getByTitle("Reports").items.getById(item.Id).attachmentFiles();
let itemFolder = zip.folder(item.Id);
console.log("itemFolder: ", itemFolder);
console.log("blobCounter: ", blobCounter);
for (const attach of allItemAttachments) {
let urlToFetch = attach.ServerRelativePath.DecodedUrl;
let blob = await fetch(urlToFetch).then(response => {
return response.blob();
});
// Blob size control
blobCounter += blob.size;
if (blobCounter > ceilSizebytes) {
blobCounter = 0;
zip.remove(item.Id);
await zip.generateAsync({ type: "blob",
compression: "DEFLATE",
compressionOptions:{
level: 6
} })
.then(function (content) {
// saveAs(content, "examplePart.zip");
// content = null;
})
zip = null;
zip = new JSZip();
// Recreate missing item on removing step
let itemFolderReset = zip.folder(item.Id);
for (let i = 0; i < allItemAttachments.length; i++) {
let urlToFetchReset = allItemAttachments[i].ServerRelativePath.DecodedUrl;
let blobReset = await fetch(urlToFetch).then(response => {
return response.blob();
});
itemFolderReset.file(allItemAttachments[i].FileName, blobReset );
blobCounter += blobReset.size;
}
continue;
}
else {
itemFolder.file(attach.FileName, blob);
}
}
}
await zip.generateAsync({ type: "blob" })
.then(function (content) {
saveAs(content, "example.zip");
})
}

How to read a file line by line in Javascript and store it in an array

I have a file in which the data is in the form like
abc#email.com:name
ewdfgwed#gmail.com:nameother
wertgtr#gmsi.com:onemorename
I want to store the emails and names in arrays like
email = ["abc#email.com","ewdfgwed#gmail.com","wertgtr#gmsi.com"]
names = ["name","nameother","onemorename"]
Also, guys, the file is a little bit large around 50 MB so also I want to do it without using a lot of resources
I have tried this to work but can't make things done
// read contents of the file
const data = fs.readFileSync('file.txt', 'UTF-8');
// split the contents by new line
const lines = data.split(/\r?\n/);
// print all lines
lines.forEach((line) => {
names[num] = line;
num++
});
} catch (err) {
console.error(err);
}
Maybe this will help you.
Async Version:
const fs = require('fs')
const emails = [];
const names = [];
fs.readFile('file.txt', (err, file) => {
if (err) throw err;
file.toString().split('\n').forEach(line => {
const splitedLine = line.split(':');
emails.push(splitedLine[0]);
names.push(splitedLine[1]);
});
});
Sync Version:
const fs = require('fs')
const emails = [];
const names = [];
fs.readFileSync('file.txt').toString().split('\n').forEach(line => {
const splitedLine = line.split(':');
emails.push(splitedLine[0]);
names.push(splitedLine[1]);
})
console.log(emails)
console.log(names)
You can directly use line-reader :
fileData.js :
const lineReader = require('line-reader');
class FileData {
constructor(filePath) {
this.emails = [];
this.names = [];
lineReader.eachLine(filePath, function(line) {
console.log(line);
const splitedLine = line.split(':');
emails.push(splitedLine[0]);
names.push(splitedLine[1]);
});
}
getEmails(){
return this.emails;
}
getNames(){
return this.names;
}
}
module.exports = FileData
Whrerever You want:
const FileData = require('path to fileData.js');
const fileData = new FileData('test.txt');
console.log(fileData.getEmails())

module.exports is returning undefined

I am currently new to Node JS, and today I was trying to read data from a file data.json.
Here is the JSON file:
{"username":"rahul_v7","password":"9673"} {"username":"7vik","password":"3248"} {"username":"pradypot_2","password":"6824"} {"username":"ad_1","password":"9284"} {"username":"premchand_4","password":"4346"}
And, I was using the below code present in a file GetData.js, to read the data present in the data.json:
'use strict';
const fs = require('fs');
let res = '', resObjs = [];
let fin = fs.createReadStream('F:/RahulVerma/NodeJS/data.json', 'utf-8');
fin.on('data', data => {
if(data.length > 0) res += data;
}).on('end', () => {
if(res.length > 0) {
let resArr = res.trim().split(' ');
for(let i = 0; i < resArr.length; i++) {
resObjs.push(JSON.parse(resArr[i]));
}
module.exports.objects = resObjs;
}
});
As you can see, I am exporting the resObjs array, which is actually an array of objects, to an another file named AppendData.js, which is given below:
'use strict';
const fs = require('fs');
const getObjs = require('./GetData');
console.log(getObjs.objects);
But, when I run AppendData.js in Node.js 9.3.0 (ia32), it gives the following output:
You're trying to use the objects before they've been read. Remember that your code reading the stream runs asynchronously, and nothing in your code attempts to coordinate it with module loading. So AppendData.js isn't seeing the objects export because it doesn't exist yet as of when that code runs.
Instead, return a promise of the objects that AppendData.js can consume; see *** comments:
'use strict';
const fs = require('fs');
// *** Export the promise
module.exports.objectsPromise = new Promise((resolve, reject) => {
let res = '', resObjs = [];
let fin = fs.createReadStream('F:/RahulVerma/NodeJS/data.json', 'utf-8');
fin.on('data', data => {
if(data.length > 0) res += data;
}).on('end', () => {
if(res.length > 0) {
let resArr = res.trim().split(' ');
for(let i = 0; i < resArr.length; i++) {
resObjs.push(JSON.parse(resArr[i]));
}
resolve(resObjs); // *** Resolve the promise
}
}).on('error', error => {
reject(error); // *** Reject the promise
});
});
Note I added a handler for errors.
And then:
'use strict';
const fs = require('fs');
const getObjs = require('./GetData');
getObjs.objectsPromise
.then(console.log)
.catch(error => {
// Do something
});
Again note the error handler.
The problem happens because you're trying to use the objects in AppendData.js before they are loaded on GetData.js due to fs.createReadStream being asynchronous. To fix this just make module.exports be a function that expect a callback in GetData.js like:
'use strict';
const fs = require('fs');
module.exports = function(callback) {
let res = '', resObjs = [];
let fin = fs.createReadStream('F:/RahulVerma/NodeJS/data.json', 'utf-8');
fin.on('data', data => {
if(data.length > 0) res += data;
}).on('end', () => {
if(res.length > 0) {
let resArr = res.trim().split(' ');
for(let i = 0; i < resArr.length; i++) {
resObjs.push(JSON.parse(resArr[i]));
}
callback(resObjs); // call the callback with the array of results
}
});
}
Which you can then use like this in AppendData.js:
'use strict';
const fs = require('fs');
const getObjs = require('./GetData'); // getObjs is now a function
getObjs(function(objects) {
console.log(objects);
});

NodeJS Waiting for asynchronous function to complete foreach

Hi so i have been trying to make this tried using async module didn't really know how to convert this to one tried promising it didn't really work well i think i did it wrong so i reverted the function to the way it was at first
Basically i want to wait till the ReadJson() function is done with reading all the json files that are in the array then do other functions like editjson etc
Code:
App.js
const Reader = require('./Reader');
Reader.ReadJson();
Reader.js
const fsp = require('fs-promise');
const JsonFiles = ['json1.json', 'json2.json', 'json3.json', 'json4.json'];
const JsonContents = [];
class Reader {
static ReadJson() {
JsonFiles.forEach(name => {
let FileDir = "D:\\Development\\Java\\" + name;
fsp.readJson(FileDir).then(contents => {
if (contents) {
JsonContents.push(contents);
console.log(`Loaded >> ${name} ${Reader.JsonContents.length}/${JsonFiles.length}`);
}
});
});
console.log('Done Reading Json Content!');
//Other functions
}
}
Reader.JsonContents = JsonContents;
module.exports = Reader;
So basically the output is:
Done Reading Json Content!
Loaded >> json1.json 1/4
Loaded >> json2.json 2/4
Loaded >> json3.json 3/4
Loaded >> json4.json 4/4
When i need it to be:
Loaded >> json1.json 1/4
Loaded >> json2.json 2/4
Loaded >> json3.json 3/4
Loaded >> json4.json 4/4
Done Reading Json Content!
Thank you :)
Return a promise, track your progress in the forEach and resolve it only when JsonContents length is the same as JsonFiles length.
const fsp = require('fs-promise');
const JsonFiles = ['json1.json', 'json2.json', 'json3.json', 'json4.json'];
const JsonContents = [];
class Reader {
static ReadJson() {
return new Promise((resolve, reject) => {
JsonFiles.forEach(name => {
let FileDir = "D:\\Development\\Java\\" + name;
fsp.readJson(FileDir).then(contents => {
if (contents) {
JsonContents.push(contents);
console.log(`Loaded >> ${name} ${Reader.JsonContents.length}/${JsonFiles.length}`);
}
if (JsonContents.length == JsonFile.length) {
return resolve(JsonContents);
}
}).catch(err => {
return reject(err);
});
});
});
}
}
Reader.JsonContents = JsonContents;
module.exports = Reader;
And then use it in your app:
const Reader = require('./Reader');
Reader.ReadJson().then(() => { console.log('Done Reading Json Content!'); });
Another option is using Promise.all, because you are using fs-promise, but although it can be done with forEach, a regular for loop is better here.
const fsp = require('fs-promise');
const JsonFiles = ['json1.json', 'json2.json', 'json3.json', 'json4.json'];
const JsonContents = [];
class Reader {
static ReadJson() {
var promises = [];
for (let i = 0; i < JsonFiles.length; i++) {
let FileDir = "D:\\Development\\Java\\" + JsonFiles[i];
promises.push(fsp.readJson(FileDir).then(contents => {
if (contents) {
JsonContents.push(contents);
console.log(`Loaded >> ${JsonFiles[i]} ${Reader.JsonContents.length}/${JsonFiles.length}`);
}
}));
}
return Promise.all(promises);
}
}
Reader.JsonContents = JsonContents;
module.exports = Reader;
As an addendum to Ron Dadon's Promise.all method....
The Bluebird promise library provides some helper functions like Promise.map and Promise.filter that can remove a lot of the boiler plate of Promise array processing code.
const Promise = require('bluebird');
const fsp = require('fs-promise');
const path = require('path');
class Reader {
static readFiles(jsonPath, jsonFiles){
let fileReadCount = 0;
return Promise.map(jsonFiles, name => {
let filePath = path.join(jsonPath, name);
return fsp.readJson(filePath);
})
.filter((content, index, length) => {
if (!content) return false;
console.log(`Loaded >> ${jsonFiles[index]} ${index+1} / ${length}`);
return true;
})
}
static readJson() {
return this.readFiles(this.jsonPath, this.jsonFiles).then(contents => {
console.log('Done Reading Json Content!', contents);
return this.jsonContents = contents;
})
}
}
Reader.jsonFiles = ['json1.json', 'json2.json', 'json3.json', 'json4.json'];
Reader.jsonPath = 'D:\\Development\\Java';
module.exports = Reader;

Categories

Resources