Webpack add files before compiling through plugin - javascript

I am trying to build a webpack plugin and I want to add some files to be processed before the whole compilation step.
My intention is to add some files from a folder to be passed by the normal build process using the desired plugins and loaders.
I realize that if I create a new asset I can do this:
SomePlugin.prototype.apply = function(compiler) {
compiler.plugin("emit", function(compilation, callback) {
compilation.assets['newAsset.js'] = {
source: function() {
return 'content';
},
size: function() {
return 'content'.length;
}
};
callback();
});
}
But I don't know how to add for example a .scss on the list of files to be processed so webpack can handle the scss file based on the loaders.

Turns out that using what webpack internally uses can do the trick, suppose you want to add a markdown called test.md to be bundled with the same stuff that you are using, basing it the EntryOptionPlugin (What is used to decide how to bundle deppending on the entry options). You can do it like this
MyPlugin.prototype.apply = function(compiler) {
compiler.plugin("entry-option", function(context, entry) {
function itemToPlugin(item, name) {
if(Array.isArray(item))
return new MultiEntryPlugin(context, item, name);
else
return new SingleEntryPlugin(context, item, name);
}
if(typeof entry=== 'string'){
entry = [entry,__dirname+'/test.md']
}
else if (Array.isArray(entry)){
entry.push(__dirname+'/test.md')
}
else{
entry['SOME_KEY'] = __dirname+'/test.md'
}
if( Array.isArray(entry)) {
compiler.apply(itemToPlugin(entry, "main"));
} else if(typeof entry === "object") {
Object.keys(entry).forEach(function(name) {
compiler.apply(itemToPlugin(entry[name], name));
});
}
return true;
});
}
The resulting file will be shimmed on your pipeline and handled by the loaders and plugins that you already included.

Related

How Discord checks if a Javascript File is a folder on client side? [duplicate]

I haven't seen any examples that do this. Is this not allowed in the API spec?
I am searching for an easy drag-drop solution for uploading an entire folder tree of photos.
It's now possible, thanks to Chrome >= 21.
function traverseFileTree(item, path) {
path = path || "";
if (item.isFile) {
// Get file
item.file(function(file) {
console.log("File:", path + file.name);
});
} else if (item.isDirectory) {
// Get folder contents
var dirReader = item.createReader();
dirReader.readEntries(function(entries) {
for (var i=0; i<entries.length; i++) {
traverseFileTree(entries[i], path + item.name + "/");
}
});
}
}
dropArea.addEventListener("drop", function(event) {
event.preventDefault();
var items = event.dataTransfer.items;
for (var i=0; i<items.length; i++) {
// webkitGetAsEntry is where the magic happens
var item = items[i].webkitGetAsEntry();
if (item) {
traverseFileTree(item);
}
}
}, false);
More info: https://protonet.info/blog/html5-experiment-drag-drop-of-folders/
As a note (from the comments) this code is not complete if more than 100 entries are returned, some iteration is required, see https://stackoverflow.com/a/53058574/885922
Unfortunately none of the existing answers are completely correct because readEntries will not necessarily return ALL the (file or directory) entries for a given directory. This is part of the API specification (see Documentation section below).
To actually get all the files, we'll need to call readEntries repeatedly (for each directory we encounter) until it returns an empty array. If we don't, we will miss some files/sub-directories in a directory e.g. in Chrome, readEntries will only return at most 100 entries at a time.
Using Promises (await/ async) to more clearly demonstrate the correct usage of readEntries (since it's asynchronous), and breadth-first search (BFS) to traverse the directory structure:
// Drop handler function to get all files
async function getAllFileEntries(dataTransferItemList) {
let fileEntries = [];
// Use BFS to traverse entire directory/file structure
let queue = [];
// Unfortunately dataTransferItemList is not iterable i.e. no forEach
for (let i = 0; i < dataTransferItemList.length; i++) {
// Note webkitGetAsEntry a non-standard feature and may change
// Usage is necessary for handling directories
queue.push(dataTransferItemList[i].webkitGetAsEntry());
}
while (queue.length > 0) {
let entry = queue.shift();
if (entry.isFile) {
fileEntries.push(entry);
} else if (entry.isDirectory) {
queue.push(...await readAllDirectoryEntries(entry.createReader()));
}
}
return fileEntries;
}
// Get all the entries (files or sub-directories) in a directory
// by calling readEntries until it returns empty array
async function readAllDirectoryEntries(directoryReader) {
let entries = [];
let readEntries = await readEntriesPromise(directoryReader);
while (readEntries.length > 0) {
entries.push(...readEntries);
readEntries = await readEntriesPromise(directoryReader);
}
return entries;
}
// Wrap readEntries in a promise to make working with readEntries easier
// readEntries will return only some of the entries in a directory
// e.g. Chrome returns at most 100 entries at a time
async function readEntriesPromise(directoryReader) {
try {
return await new Promise((resolve, reject) => {
directoryReader.readEntries(resolve, reject);
});
} catch (err) {
console.log(err);
}
}
Complete working example on Codepen: https://codepen.io/pen/QWmvxwV
FWIW I only picked this up because I wasn't getting back all the files I expected in a directory containing 40,000 files (many directories containing well over 100 files/sub-directories) when using the accepted answer.
Documentation:
This behaviour is documented in FileSystemDirectoryReader. Excerpt with emphasis added:
readEntries()
Returns a an array containing some number of the
directory's entries. Each item in the array is an object based on
FileSystemEntry—typically either FileSystemFileEntry or
FileSystemDirectoryEntry.
But to be fair, the MDN documentation could make this clearer in other sections. The readEntries() documentation simply notes:
readEntries() method retrieves the directory entries within the directory being read and delivers them in an array to the provided callback function
And the only mention/hint that multiple calls are needed is in the description of successCallback parameter:
If there are no files left, or you've already called readEntries() on
this FileSystemDirectoryReader, the array is empty.
Arguably the API could be more intuitive as well.
It's also worth noting that DataTransferItem.webkitGetAsEntry() is a non-standard feature and may change e.g. renamed getAsEntry(). Its usage is necessary to handle uploading files nested within directories.
Related:
johnozbay comments that on Chrome, readEntries will return at most 100 entries for a directory (verified as of Chrome 64).
Xan explains the correct usage of readEntries quite well in this answer (albeit without code).
Pablo Barría Urenda's answer correctly calls readEntries in a asynchronous manner without BFS. He also notes that Firefox returns all the entries in a directory (unlike Chrome) but we can't rely on this given the specification.
This function will give you a promise for array of all dropped files, like <input type="file"/>.files:
function getFilesWebkitDataTransferItems(dataTransferItems) {
function traverseFileTreePromise(item, path='') {
return new Promise( resolve => {
if (item.isFile) {
item.file(file => {
file.filepath = path + file.name //save full path
files.push(file)
resolve(file)
})
} else if (item.isDirectory) {
let dirReader = item.createReader()
dirReader.readEntries(entries => {
let entriesPromises = []
for (let entr of entries)
entriesPromises.push(traverseFileTreePromise(entr, path + item.name + "/"))
resolve(Promise.all(entriesPromises))
})
}
})
}
let files = []
return new Promise((resolve, reject) => {
let entriesPromises = []
for (let it of dataTransferItems)
entriesPromises.push(traverseFileTreePromise(it.webkitGetAsEntry()))
Promise.all(entriesPromises)
.then(entries => {
//console.log(entries)
resolve(files)
})
})
}
Usage:
dropArea.addEventListener("drop", function(event) {
event.preventDefault();
var items = event.dataTransfer.items;
getFilesFromWebkitDataTransferItems(items)
.then(files => {
...
})
}, false);
NPM package:
https://www.npmjs.com/package/datatransfer-files-promise
Usage example:
https://github.com/grabantot/datatransfer-files-promise/blob/master/index.html
In this message to the HTML 5 mailing list Ian Hickson says:
HTML5 now has to upload many files at
once. Browsers could allow users to
pick multiple files at once, including
across multiple directories; that's a
bit out of scope of the spec.
(Also see the original feature proposal.)
So it's safe to assume he considers uploading folders using drag-and-drop also out of scope. Apparently it's up to the browser to serve individual files.
Uploading folders would also have some other difficulties, as described by Lars Gunther:
This […] proposal must have two
checks (if it is doable at all):
Max size, to stop someone from uploading a full directory of several
hundred uncompressed raw images...
Filtering even if the accept attribute is omitted. Mac OS metadata
and Windows thumbnails, etc should be
omitted. All hidden files and
directories should default to be
excluded.
Now you can upload directories with both drag and drop and input.
<input type='file' webkitdirectory >
and for drag and drop(For webkit browsers).
Handling drag and drop folders.
<div id="dropzone"></div>
<script>
var dropzone = document.getElementById('dropzone');
dropzone.ondrop = function(e) {
var length = e.dataTransfer.items.length;
for (var i = 0; i < length; i++) {
var entry = e.dataTransfer.items[i].webkitGetAsEntry();
if (entry.isFile) {
... // do whatever you want
} else if (entry.isDirectory) {
... // do whatever you want
}
}
};
</script>
Resources:
http://updates.html5rocks.com/2012/07/Drag-and-drop-a-folder-onto-Chrome-now-available
Firefox now supports folder upload, as of November 15, 2016, in v50.0: https://developer.mozilla.org/en-US/Firefox/Releases/50#Files_and_directories
You can drag and drop folders into Firefox or you can browse and select a local folder to upload. It also supports folders nested in subfolders.
That means you can now use either Chrome, Firefox, Edge or Opera to upload folders. You can't use Safari or Internet Explorer at present.
Here's a complete example of how to use the file and directory entries API:
var dropzone = document.getElementById("dropzone");
var listing = document.getElementById("listing");
function scanAndLogFiles(item, container) {
var elem = document.createElement("li");
elem.innerHTML = item.name;
container.appendChild(elem);
if (item.isDirectory) {
var directoryReader = item.createReader();
var directoryContainer = document.createElement("ul");
container.appendChild(directoryContainer);
directoryReader.readEntries(function(entries) {
entries.forEach(function(entry) {
scanAndLogFiles(entry, directoryContainer);
});
});
}
}
dropzone.addEventListener(
"dragover",
function(event) {
event.preventDefault();
},
false
);
dropzone.addEventListener(
"drop",
function(event) {
var items = event.dataTransfer.items;
event.preventDefault();
listing.innerHTML = "";
for (var i = 0; i < items.length; i++) {
var item = items[i].webkitGetAsEntry();
if (item) {
scanAndLogFiles(item, listing);
}
}
},
false
);
body {
font: 14px "Arial", sans-serif;
}
#dropzone {
text-align: center;
width: 300px;
height: 100px;
margin: 10px;
padding: 10px;
border: 4px dashed red;
border-radius: 10px;
}
#boxtitle {
display: table-cell;
vertical-align: middle;
text-align: center;
color: black;
font: bold 2em "Arial", sans-serif;
width: 300px;
height: 100px;
}
<p>Drag files and/or directories to the box below!</p>
<div id="dropzone">
<div id="boxtitle">
Drop Files Here
</div>
</div>
<h2>Directory tree:</h2>
<ul id="listing"></ul>
webkitGetAsEntry is supported by Chrome 13+, Firefox 50+ and Edge.
Source: https://developer.mozilla.org/en-US/docs/Web/API/DataTransferItem/webkitGetAsEntry
Does HTML5 allow drag-drop upload of folders or a folder tree?
Only Chrome supports this feature. It has failed to have any traction and is likely to be removed.
Ref : https://developer.mozilla.org/en/docs/Web/API/DirectoryReader#readEntries
UPDATE: Since 2012 a lot has changed, see answers above instead. I leave this answer here for the sake of archeology.
The HTML5 spec does NOT say that when selecting a folder for upload, the browser should upload all contained files recursively.
Actually, in Chrome/Chromium, you can upload a folder, but when you do it, it just uploads a meaningless 4KB file, which represents the directory. Some servers-side applications like Alfresco can detect this, and warn the user that folders can not be uploaded:
Recently stumbled upon the need to implement this in two of my projects so I created a bunch of utility functions to help with this.
One creates a data-structure representing all the folders, files and relationship between them, like so 👇
{
folders: [
{
name: string,
folders: Array,
files: Array
},
/* ... */
],
files: Array
}
While the other just returns an Array of all the files (in all folders and sub-folders).
Here's the link to the package: https://www.npmjs.com/package/file-system-utils
I had been happy copy/pasting #grabantot 's solution until I met the 100 file limit issue.
#xlm 's solution overcomes the 100-file-limit, and it returns an array of FileEntry objects.
However in my project I need to extract the file paths from fileEntry objects.
This works if you have access to the ChromeFileSystem api:
const getAllPaths = async (dataTransferItems) =>{
async function getAllFileEntries(dataTransferItemList) {
let fileEntries = [];
// Use BFS to traverse entire directory/file structure
let queue = [];
for (let i = 0; i < dataTransferItemList.length; i++) {
queue.push(dataTransferItemList[i].webkitGetAsEntry());
}
while (queue.length > 0) {
let entry = queue.shift();
if (entry.isFile) {
fileEntries.push(entry);
} else if (entry.isDirectory) {
queue.push(...await readAllDirectoryEntries(entry.createReader()));
}
}
return fileEntries;
}
// Get all the entries (files or sub-directories) in a directory
// by calling readEntries until it returns empty array
async function readAllDirectoryEntries(directoryReader) {
let entries = [];
let readEntries = await readEntriesPromise(directoryReader);
while (readEntries.length > 0) {
entries.push(...readEntries);
readEntries = await readEntriesPromise(directoryReader);
}
return entries;
}
// Wrap readEntries in a promise to make working with readEntries easier
// readEntries will return only some of the entries in a directory
// e.g. Chrome returns at most 100 entries at a time
async function readEntriesPromise(directoryReader) {
try {
return await new Promise((resolve, reject) => {
directoryReader.readEntries(resolve, reject);
});
} catch (err) {
console.log(err);
}
}
const getDisplayPath = (entry)=>{
return new Promise((resolve, reject) =>{
chrome.fileSystem.getDisplayPath(entry, (path)=>{
if(chrome.runtime.lastError) {
reject(chrome.runtime.lastError)
}else {
resolve(path);
}
})
})
}
const fileEnties = await getAllFileEntries(dataTransferItems);
const files = await Promise.all(fileEnties.map(async(x)=>{
return (await getDisplayPath(x))
}))
return files;
}

Recursively read directory and create object in NodeJS

I've been struggling with trying to automate and clean up how I utilize sprite generation and loading in my HTML5 game using a NodeJS socket.io server to send an object containing the data needed to generate the sprites.
What I want to do to achieve this is to read through a directory /img and all its subdirectories (/assets1, /assets2, /assets3, etc) and create an object based on the data and structure of them. The problem I came across was that I couldn't find a nice way to handle the sub directories of, say, /assets3. Here's how my assets are setup as an example:
And here's the object example that I want to achieve but haven't without just using endless if/elses which honestly doesn't seem appealing to me and there has got to be a better way with the usage of a library.
var outputWeWant = {
assets1: {
img1: '/img/assets1/img1.png',
img2: '/img/assets1/img2.png',
},
assets2: {
img1: '/img/assets2/img1.png',
img2: '/img/assets2/img2.png',
},
assets3: {
img1: '/img/assets3/img1.png',
img2: '/img/assets3/img2.png',
assets4: {
img1: '/img/assets3/assets4/img1.png'
}
}
}
Below is just a little bit of brainstorming I did, but this isn't as effective as I want down the road and it looks disgusting having all the is a directory check as we add a new directory into assets4
fs.readdirSync('/img/').map(dirName => {
fs.readdirSync('/img/' + dirName).map(fileName => {
if (fs.statSync('/img/' + dirName + '/' + fileName).isDirectory()) {
// Read the new directory and add the files to our object
} else {
// It's not a directory, just add it to our object
}
});
});
This kind of potentially infinite operation calls for a recursive function. I’m going to assume this function is to be written for Node, and I’ll leave the filesystem details to the OP. This snippet should be treated as pseudo-code.
function parseDirectory(directory) {
return directory.getItems().reduce((out, item) => {
switch (item.type) {
case 'file':
out[item.name] = item.path;
break;
case 'directory':
out[item.name] = parseDirectory(item.path);
break;
}
return out;
}, {});
}
With the added fs code in the OP, here’s a (theoretically) working function:
function parseDirectory(directory) {
return fs.readdirSync(directory).reduce((out, item) => {
const itemPath = `${directory}/${item}`;
if (fs.statSync(itemPath).isDirectory()) {
out[item] = parseDirectory(itemPath);
} else {
out[item] = itemPath;
}
return out;
}, {});
}
Of if the syntax of reduce() is too contrived for your liking, try this:
function parseDirectory(directory) {
let out = {};
fs.readdirSync(directory).forEach(item => {
const itemPath = `${directory}/${item}`;
if (fs.statSync(itemPath).isDirectory()) {
out[item] = parseDirectory(itemPath);
} else {
out[item] = itemPath;
}
});
return out;
}

Uncaught (in promise) Error: Unexpected token < - Implementing systemJS

I'm trying to integrate systemJS in my current project which is using angular 1.5 syntax. I want to load sum.js file using system js , which in turn calls reduce.js and add.js using commonJS syntax .
currently all the build files are being generated though gulp.
I'm not using any transpiler because as per my understanding they are needed for ES6.
My Systemjs.config.js file looks like this:
console.log("in systemjs config");
SystemJS.config({
map: {
'wire': 'app/Test/js'
},
packages: {
'wire': {
main: './sum.js'
}
}
}
});
SystemJS.import('wire').then(function(wire) {
console.log('Module', wire);
});
console.log("end of systemjs config");
sum.js file has this content:
console.log("inside sum file first line");
var reduce = require("./reduce");
var add = require("./add");
function sum(list){
return reduce(list, add, 0);
};
module.exports = sum;
console.log("inside sum file");
reduce.js has this content:
function reduce(list, iteratee, memo) {
list.forEach(function (item) {
memo = iteratee(item, memo);
});
return memo;
}
module.exports = reduce;
add.js looks like this:
function add(a,b){
return a + b;
}
module.exports = add;
I can see system JS file being loaded in debugger(network tab) but not sum.js file, neither i can see any console message , which I have added in sum.js for debugging.
I think I'm missing some basic configuration steps. Please help

Yui Test - Organize files

i'm looking to implement 'Yui Test' in my website to use the TDD methodology. I've also installed the yuitest CLI (npm install -g yuitest).
I don't know how organize my files, i thought to leave the js code in my 'www/js/functions.js' file and create a new file 'www/js/tests.js' where to put my tests.
The issue is that i don'w know how connect different files. I'll try to explain.
In my 'www/js/tests.js' file i've this code (by example on website):
var testCase = new Y.Test.Case({
name: "TestCase Name",
//---------------------------------------------
// Special instructions
//---------------------------------------------
_should: {
error: {
testSortArray: true //this test should throw an error
}
},
//---------------------------------------------
// Tests
//---------------------------------------------
testSortArray: function () {
sortArray(12); //this should throw an error
}
});
and in my 'www/js/functions.js' file i've this function:
function sortArray(array) {
if (array instanceof Array){
array.sort();
} else {
throw new TypeError("Expected an array");
}
}
Obviously it didn't works because when i run the test 'yuitest www/js/tests.js' it didn't see my function in 'www/js/functions.js' file.
Obviously if i move the function from 'www/js/functions.js' file to 'www/js/tests.js' file, it works.
But i need to leave separate these files. Any suggestions?
Thanks!
I use a tests.html page to bring in all necessary resources and to execute the tests.
so tests.html will:
include YUI
include functions.js
include tests.js
create a Y.Test.Suite which includes all my tests
call Y.Test.Runner.add(mySuite)
create a Y.Test.Console to render my results
call Y.Test.Runner.run();
You could also probably use the YUI loader to load up your functions.js too, but I've not tried that (largely because my code under test is all YUI modules, so the loader can laod and work with them just fine)
One alternative is to use modules. Add a module to YUI and use it in your test. If you use namespaces, it could look like this:
In www/js/functions.js:
YUI.add('my-module', function (Y) {
Y.namespace('MyNamespace');
Y.MyNamespace.SortArray = function(array) {
if (array instanceof Array){
array.sort();
} else {
throw new TypeError("Expected an array");
}
}
});
In www/js/tests.js:
YUI().use('test', 'my-module', function (Y) {
var testCase = new Y.Test.Case({
name: "TestCase Name",
//---------------------------------------------
// Tests
//---------------------------------------------
testSortArray: function () {
Y.MyNamespace.SortArray(12); //this should throw an error
}
});
});
See Creating YUI Modules.
To make the Loader aware of the my-module, look at this example, from the same page:
YUI.GlobalConfig: {
groups: {
mymodules: {
base: '/www/js/',
modules: {
'my-module': {
path: 'functions.js'
}
}
}
}
};
For Loader config options, take a look at the docs.

node.js server reading files order?

I have 2 js files in my node.js app
File a defines global object foo
In file b I can refer to foo.
But if file b is loaded before file a, then an error occurs.
I'm using node's fs module to read the files. I use readdir, and then forEach require on every file. On my system the files are always read alphabetically, so there is never any problem.
Can I depend on those files being read alphabetically?
You can sort your array of files by name instead to be sure.
fs.readdir(path, function(err, files) {
files.sort(function(a, b) {
return a < b ? -1 : 1;
}).forEach(function(file, key) {
// stuff
});
});
Because you're working with an array of files, you best bet is to sort the array then work through it; Only starting a new file read when the previous one has completed.
Ignoring for the moment the fact that global variables are usually a bad idea...:-)
If you already know the filepaths you need to load, you might wish to use a structure like:
var fs=require('fs'),
vm=require('vm');
/**
* loads and evals two initialization files in order
* #param {callback} onComplete called on init completion
* #param {callback} on Error called on error (optional)
* #return null
*/
function init_app(onComplete,onError){
var first_filepath = './file1.js',
second_filepath = './file2.js';
// read/eval first file
fs.readFile(first_filepath,function(first_err,first_result){
if (first_err) {
if(onError) {
onError(first_err.message);
return;
}
else {
throw first_err;
}
}
// evaluate first result
try {
vm.createScript(first_result).runInThisContext();
}
catch (e) {
err_msg='failed to eval source from first file:' + e.type;
if(onError) {
onError(err_msg);
return;
}
else {
throw err_msg;
}
}
// check for existence of foo global
if(foo == undefined) {
var msg='foo is undefined after first file read';
if(onError) {
onError(err_msg);
return;
}
else {
throw err_msg;
}
}
// read/eval second (dependent) file
fs.readFile(second_filepath, function(second_err,second_result){
if (second_err) {
if(onError) {
onError(second_err.message);
return;
}
else {
throw second_err;
}
}
// evaluate second, dependent result
try {
vm.createScript(second_result).runInThisContext();
}
catch (e) {
err_msg='failed to eval source from second file:' + e.type;
if(onError) {
onError(err_msg);
return;
}
else {
throw err_msg;
}
}
// trigger callback
if(onComplete) onComplete();
}); // end fs.readFile(second_filepath,...
}); // end fs.readFile(first_filepath,...
}; // end init_app()
Used as:
var onComplete = function(){ console.log('init complete'); },
onError = function(msg){ console.error(msg); };
init_app(onComplete,onError);
Then again, if you only load these files once at the start of your application, you should use require().

Categories

Resources