flow like the river

This commit is contained in:
root 2025-11-07 00:06:12 +01:00
commit 013fe673f3
42435 changed files with 5764238 additions and 0 deletions

9
VISUALIZACION/node_modules/parcel-bundler/src/.babelrc generated vendored Executable file
View file

@ -0,0 +1,9 @@
{
"presets": [["env", {
"targets": {
"node": "6"
}
}]],
"plugins": ["transform-async-super"],
"ignore": ["builtins"]
}

View file

@ -0,0 +1,6 @@
{
"extends": "../.eslintrc.json",
"rules": {
"no-console": 0
}
}

216
VISUALIZACION/node_modules/parcel-bundler/src/Asset.js generated vendored Executable file
View file

@ -0,0 +1,216 @@
const URL = require('url');
const path = require('path');
const fs = require('./utils/fs');
const objectHash = require('./utils/objectHash');
const md5 = require('./utils/md5');
const isURL = require('./utils/is-url');
const config = require('./utils/config');
let ASSET_ID = 1;
/**
* An Asset represents a file in the dependency tree. Assets can have multiple
* parents that depend on it, and can be added to multiple output bundles.
* The base Asset class doesn't do much by itself, but sets up an interface
* for subclasses to implement.
*/
class Asset {
constructor(name, pkg, options) {
this.id = ASSET_ID++;
this.name = name;
this.basename = path.basename(this.name);
this.relativeName = path.relative(options.rootDir, this.name);
this.package = pkg || {};
this.options = options;
this.encoding = 'utf8';
this.type = path.extname(this.name).slice(1);
this.processed = false;
this.contents = options.rendition ? options.rendition.value : null;
this.ast = null;
this.generated = null;
this.hash = null;
this.parentDeps = new Set();
this.dependencies = new Map();
this.depAssets = new Map();
this.parentBundle = null;
this.bundles = new Set();
this.cacheData = {};
this.buildTime = 0;
this.bundledSize = 0;
}
shouldInvalidate() {
return false;
}
async loadIfNeeded() {
if (this.contents == null) {
this.contents = await this.load();
}
}
async parseIfNeeded() {
await this.loadIfNeeded();
if (!this.ast) {
this.ast = await this.parse(this.contents);
}
}
async getDependencies() {
if (
this.options.rendition &&
this.options.rendition.hasDependencies === false
) {
return;
}
await this.loadIfNeeded();
if (this.contents && this.mightHaveDependencies()) {
await this.parseIfNeeded();
await this.collectDependencies();
}
}
addDependency(name, opts) {
this.dependencies.set(name, Object.assign({name}, opts));
}
addURLDependency(url, from = this.name, opts) {
if (!url || isURL(url)) {
return url;
}
if (typeof from === 'object') {
opts = from;
from = this.name;
}
const parsed = URL.parse(url);
const resolved = path.resolve(
path.dirname(from),
decodeURIComponent(parsed.pathname)
);
this.addDependency(
'./' + path.relative(path.dirname(this.name), resolved),
Object.assign({dynamic: true}, opts)
);
parsed.pathname = this.options.parser
.getAsset(resolved, this.package, this.options)
.generateBundleName();
return URL.format(parsed);
}
async getConfig(filenames, opts = {}) {
// Resolve the config file
let conf = await config.resolve(opts.path || this.name, filenames);
if (conf) {
// Add as a dependency so it is added to the watcher and invalidates
// this asset when the config changes.
this.addDependency(conf, {includedInParent: true});
if (opts.load === false) {
return conf;
}
return await config.load(opts.path || this.name, filenames);
}
return null;
}
mightHaveDependencies() {
return true;
}
async load() {
return await fs.readFile(this.name, this.encoding);
}
parse() {
// do nothing by default
}
collectDependencies() {
// do nothing by default
}
async pretransform() {
// do nothing by default
}
async transform() {
// do nothing by default
}
async generate() {
return {
[this.type]: this.contents
};
}
async process() {
if (!this.generated) {
await this.loadIfNeeded();
await this.pretransform();
await this.getDependencies();
await this.transform();
this.generated = await this.generate();
this.hash = await this.generateHash();
}
return this.generated;
}
async postProcess(generated) {
return generated;
}
generateHash() {
return objectHash(this.generated);
}
invalidate() {
this.processed = false;
this.contents = null;
this.ast = null;
this.generated = null;
this.hash = null;
this.dependencies.clear();
this.depAssets.clear();
}
invalidateBundle() {
this.parentBundle = null;
this.bundles.clear();
this.parentDeps.clear();
}
generateBundleName() {
// Generate a unique name. This will be replaced with a nicer
// name later as part of content hashing.
return md5(this.name) + '.' + this.type;
}
replaceBundleNames(bundleNameMap) {
for (let key in this.generated) {
let value = this.generated[key];
if (typeof value === 'string') {
// Replace temporary bundle names in the output with the final content-hashed names.
for (let [name, map] of bundleNameMap) {
value = value.split(name).join(map);
}
this.generated[key] = value;
}
}
}
generateErrorMessage(err) {
return err;
}
}
module.exports = Asset;

274
VISUALIZACION/node_modules/parcel-bundler/src/Bundle.js generated vendored Executable file
View file

@ -0,0 +1,274 @@
const Path = require('path');
const crypto = require('crypto');
/**
* A Bundle represents an output file, containing multiple assets. Bundles can have
* child bundles, which are bundles that are loaded dynamically from this bundle.
* Child bundles are also produced when importing an asset of a different type from
* the bundle, e.g. importing a CSS file from JS.
*/
class Bundle {
constructor(type, name, parent) {
this.type = type;
this.name = name;
this.parentBundle = parent;
this.entryAsset = null;
this.assets = new Set();
this.childBundles = new Set();
this.siblingBundles = new Set();
this.siblingBundlesMap = new Map();
this.offsets = new Map();
this.totalSize = 0;
this.bundleTime = 0;
}
static createWithAsset(asset, parentBundle) {
let bundle = new Bundle(
asset.type,
Path.join(asset.options.outDir, asset.generateBundleName()),
parentBundle
);
bundle.entryAsset = asset;
bundle.addAsset(asset);
return bundle;
}
addAsset(asset) {
asset.bundles.add(this);
this.assets.add(asset);
}
removeAsset(asset) {
asset.bundles.delete(this);
this.assets.delete(asset);
}
addOffset(asset, line) {
this.offsets.set(asset, line);
}
getOffset(asset) {
return this.offsets.get(asset) || 0;
}
getSiblingBundle(type) {
if (!type || type === this.type) {
return this;
}
if (!this.siblingBundlesMap.has(type)) {
let bundle = new Bundle(
type,
Path.join(
Path.dirname(this.name),
Path.basename(this.name, Path.extname(this.name)) + '.' + type
),
this
);
this.childBundles.add(bundle);
this.siblingBundles.add(bundle);
this.siblingBundlesMap.set(type, bundle);
}
return this.siblingBundlesMap.get(type);
}
createChildBundle(entryAsset) {
let bundle = Bundle.createWithAsset(entryAsset, this);
this.childBundles.add(bundle);
return bundle;
}
createSiblingBundle(entryAsset) {
let bundle = this.createChildBundle(entryAsset);
this.siblingBundles.add(bundle);
return bundle;
}
get isEmpty() {
return this.assets.size === 0;
}
getBundleNameMap(contentHash, hashes = new Map()) {
if (this.name) {
let hashedName = this.getHashedBundleName(contentHash);
hashes.set(Path.basename(this.name), hashedName);
this.name = Path.join(Path.dirname(this.name), hashedName);
}
for (let child of this.childBundles.values()) {
child.getBundleNameMap(contentHash, hashes);
}
return hashes;
}
getHashedBundleName(contentHash) {
// If content hashing is enabled, generate a hash from all assets in the bundle.
// Otherwise, use a hash of the filename so it remains consistent across builds.
let ext = Path.extname(this.name);
let hash = (contentHash
? this.getHash()
: Path.basename(this.name, ext)
).slice(-8);
let entryAsset = this.entryAsset || this.parentBundle.entryAsset;
let name = Path.basename(entryAsset.name, Path.extname(entryAsset.name));
let isMainEntry = entryAsset.options.entryFiles[0] === entryAsset.name;
let isEntry =
entryAsset.options.entryFiles.includes(entryAsset.name) ||
Array.from(entryAsset.parentDeps).some(dep => dep.entry);
// If this is the main entry file, use the output file option as the name if provided.
if (isMainEntry && entryAsset.options.outFile) {
let extname = Path.extname(entryAsset.options.outFile);
if (extname) {
ext = this.entryAsset ? extname : ext;
name = Path.basename(entryAsset.options.outFile, extname);
} else {
name = entryAsset.options.outFile;
}
}
// If this is an entry asset, don't hash. Return a relative path
// from the main file so we keep the original file paths.
if (isEntry) {
return Path.join(
Path.relative(
entryAsset.options.rootDir,
Path.dirname(entryAsset.name)
),
name + ext
);
}
// If this is an index file, use the parent directory name instead
// which is probably more descriptive.
if (name === 'index') {
name = Path.basename(Path.dirname(entryAsset.name));
}
// Add the content hash and extension.
return name + '.' + hash + ext;
}
async package(bundler, oldHashes, newHashes = new Map()) {
let promises = [];
let mappings = [];
if (!this.isEmpty) {
let hash = this.getHash();
newHashes.set(this.name, hash);
if (!oldHashes || oldHashes.get(this.name) !== hash) {
promises.push(this._package(bundler));
}
}
for (let bundle of this.childBundles.values()) {
if (bundle.type === 'map') {
mappings.push(bundle);
} else {
promises.push(bundle.package(bundler, oldHashes, newHashes));
}
}
await Promise.all(promises);
for (let bundle of mappings) {
await bundle.package(bundler, oldHashes, newHashes);
}
return newHashes;
}
async _package(bundler) {
let Packager = bundler.packagers.get(this.type);
let packager = new Packager(this, bundler);
let startTime = Date.now();
await packager.setup();
await packager.start();
let included = new Set();
for (let asset of this.assets) {
await this._addDeps(asset, packager, included);
}
await packager.end();
this.bundleTime = Date.now() - startTime;
for (let asset of this.assets) {
this.bundleTime += asset.buildTime;
}
}
async _addDeps(asset, packager, included) {
if (!this.assets.has(asset) || included.has(asset)) {
return;
}
included.add(asset);
for (let depAsset of asset.depAssets.values()) {
await this._addDeps(depAsset, packager, included);
}
await packager.addAsset(asset);
const assetSize = packager.getSize() - this.totalSize;
if (assetSize > 0) {
this.addAssetSize(asset, assetSize);
}
}
addAssetSize(asset, size) {
asset.bundledSize = size;
this.totalSize += size;
}
getParents() {
let parents = [];
let bundle = this;
while (bundle) {
parents.push(bundle);
bundle = bundle.parentBundle;
}
return parents;
}
findCommonAncestor(bundle) {
// Get a list of parent bundles going up to the root
let ourParents = this.getParents();
let theirParents = bundle.getParents();
// Start from the root bundle, and find the first bundle that's different
let a = ourParents.pop();
let b = theirParents.pop();
let last;
while (a === b && ourParents.length > 0 && theirParents.length > 0) {
last = a;
a = ourParents.pop();
b = theirParents.pop();
}
if (a === b) {
// One bundle descended from the other
return a;
}
return last;
}
getHash() {
let hash = crypto.createHash('md5');
for (let asset of this.assets) {
hash.update(asset.hash);
}
return hash.digest('hex');
}
}
module.exports = Bundle;

701
VISUALIZACION/node_modules/parcel-bundler/src/Bundler.js generated vendored Executable file
View file

@ -0,0 +1,701 @@
const fs = require('./utils/fs');
const Resolver = require('./Resolver');
const Parser = require('./Parser');
const WorkerFarm = require('./workerfarm/WorkerFarm');
const Path = require('path');
const Bundle = require('./Bundle');
const Watcher = require('./Watcher');
const FSCache = require('./FSCache');
const HMRServer = require('./HMRServer');
const Server = require('./Server');
const {EventEmitter} = require('events');
const logger = require('./Logger');
const PackagerRegistry = require('./packagers');
const localRequire = require('./utils/localRequire');
const config = require('./utils/config');
const emoji = require('./utils/emoji');
const loadEnv = require('./utils/env');
const PromiseQueue = require('./utils/PromiseQueue');
const installPackage = require('./utils/installPackage');
const bundleReport = require('./utils/bundleReport');
const prettifyTime = require('./utils/prettifyTime');
const getRootDir = require('./utils/getRootDir');
const glob = require('glob');
/**
* The Bundler is the main entry point. It resolves and loads assets,
* creates the bundle tree, and manages the worker farm, cache, and file watcher.
*/
class Bundler extends EventEmitter {
constructor(entryFiles, options = {}) {
super();
this.entryFiles = this.normalizeEntries(entryFiles);
this.options = this.normalizeOptions(options);
this.resolver = new Resolver(this.options);
this.parser = new Parser(this.options);
this.packagers = new PackagerRegistry();
this.cache = this.options.cache ? new FSCache(this.options) : null;
this.delegate = options.delegate || {};
this.bundleLoaders = {};
this.addBundleLoader('wasm', {
browser: require.resolve('./builtins/loaders/browser/wasm-loader'),
node: require.resolve('./builtins/loaders/node/wasm-loader')
});
this.addBundleLoader('css', {
browser: require.resolve('./builtins/loaders/browser/css-loader'),
node: require.resolve('./builtins/loaders/node/css-loader')
});
this.addBundleLoader('js', {
browser: require.resolve('./builtins/loaders/browser/js-loader'),
node: require.resolve('./builtins/loaders/node/js-loader')
});
this.pending = false;
this.loadedAssets = new Map();
this.watchedAssets = new Map();
this.farm = null;
this.watcher = null;
this.hmr = null;
this.bundleHashes = null;
this.errored = false;
this.buildQueue = new PromiseQueue(this.processAsset.bind(this));
this.rebuildTimeout = null;
logger.setOptions(this.options);
}
normalizeEntries(entryFiles) {
// Support passing a single file
if (entryFiles && !Array.isArray(entryFiles)) {
entryFiles = [entryFiles];
}
// If no entry files provided, resolve the entry point from the current directory.
if (!entryFiles || entryFiles.length === 0) {
entryFiles = [process.cwd()];
}
// Match files as globs
return entryFiles
.reduce((p, m) => p.concat(glob.sync(m, {nonull: true})), [])
.map(f => Path.resolve(f));
}
normalizeOptions(options) {
const isProduction =
options.production || process.env.NODE_ENV === 'production';
const publicURL = options.publicUrl || options.publicURL || '/';
const watch =
typeof options.watch === 'boolean' ? options.watch : !isProduction;
const target = options.target || 'browser';
return {
production: isProduction,
outDir: Path.resolve(options.outDir || 'dist'),
outFile: options.outFile || '',
publicURL: publicURL,
watch: watch,
cache: typeof options.cache === 'boolean' ? options.cache : true,
cacheDir: Path.resolve(options.cacheDir || '.cache'),
killWorkers:
typeof options.killWorkers === 'boolean' ? options.killWorkers : true,
minify:
typeof options.minify === 'boolean' ? options.minify : isProduction,
target: target,
hmr:
target === 'node'
? false
: typeof options.hmr === 'boolean'
? options.hmr
: watch,
https: options.https || false,
logLevel: isNaN(options.logLevel) ? 3 : options.logLevel,
entryFiles: this.entryFiles,
hmrPort: options.hmrPort || 0,
rootDir: getRootDir(this.entryFiles),
sourceMaps:
typeof options.sourceMaps === 'boolean' ? options.sourceMaps : true,
hmrHostname:
options.hmrHostname ||
(options.target === 'electron' ? 'localhost' : ''),
detailedReport: options.detailedReport || false,
global: options.global,
autoinstall:
typeof options.autoinstall === 'boolean'
? options.autoinstall
: !isProduction,
contentHash:
typeof options.contentHash === 'boolean'
? options.contentHash
: isProduction
};
}
addAssetType(extension, path) {
if (typeof path !== 'string') {
throw new Error('Asset type should be a module path.');
}
if (this.farm) {
throw new Error('Asset types must be added before bundling.');
}
this.parser.registerExtension(extension, path);
}
addPackager(type, packager) {
if (this.farm) {
throw new Error('Packagers must be added before bundling.');
}
this.packagers.add(type, packager);
}
addBundleLoader(type, paths) {
if (typeof paths === 'string') {
paths = {node: paths, browser: paths};
} else if (typeof paths !== 'object') {
throw new Error('Bundle loaders should be an object.');
}
for (const target in paths) {
if (target !== 'node' && target !== 'browser') {
throw new Error(`Unknown bundle loader target "${target}".`);
}
if (typeof paths[target] !== 'string') {
throw new Error('Bundle loader should be a string.');
}
}
if (this.farm) {
throw new Error('Bundle loaders must be added before bundling.');
}
this.bundleLoaders[type] = paths;
}
async loadPlugins() {
let relative = Path.join(this.options.rootDir, 'index');
let pkg = await config.load(relative, ['package.json']);
if (!pkg) {
return;
}
try {
let deps = Object.assign({}, pkg.dependencies, pkg.devDependencies);
for (let dep in deps) {
const pattern = /^(@.*\/)?parcel-plugin-.+/;
if (pattern.test(dep)) {
let plugin = await localRequire(dep, relative);
await plugin(this);
}
}
} catch (err) {
logger.warn(err);
}
}
async bundle() {
// If another bundle is already pending, wait for that one to finish and retry.
if (this.pending) {
return new Promise((resolve, reject) => {
this.once('buildEnd', () => {
this.bundle().then(resolve, reject);
});
});
}
let isInitialBundle = !this.entryAssets;
let startTime = Date.now();
this.pending = true;
this.errored = false;
logger.clear();
logger.status(emoji.progress, 'Building...');
try {
// Start worker farm, watcher, etc. if needed
await this.start();
// If this is the initial bundle, ensure the output directory exists, and resolve the main asset.
if (isInitialBundle) {
await fs.mkdirp(this.options.outDir);
this.entryAssets = new Set();
for (let entry of this.entryFiles) {
let asset = await this.resolveAsset(entry);
this.buildQueue.add(asset);
this.entryAssets.add(asset);
}
}
// Build the queued assets.
let loadedAssets = await this.buildQueue.run();
// The changed assets are any that don't have a parent bundle yet
// plus the ones that were in the build queue.
let changedAssets = [...this.findOrphanAssets(), ...loadedAssets];
// Invalidate bundles
for (let asset of this.loadedAssets.values()) {
asset.invalidateBundle();
}
// Create a root bundle to hold all of the entry assets, and add them to the tree.
this.mainBundle = new Bundle();
for (let asset of this.entryAssets) {
this.createBundleTree(asset, this.mainBundle);
}
// If there is only one child bundle, replace the root with that bundle.
if (this.mainBundle.childBundles.size === 1) {
this.mainBundle = Array.from(this.mainBundle.childBundles)[0];
}
// Generate the final bundle names, and replace references in the built assets.
this.bundleNameMap = this.mainBundle.getBundleNameMap(
this.options.contentHash
);
for (let asset of changedAssets) {
asset.replaceBundleNames(this.bundleNameMap);
}
// Emit an HMR update if this is not the initial bundle.
if (this.hmr && !isInitialBundle) {
this.hmr.emitUpdate(changedAssets);
}
// Package everything up
this.bundleHashes = await this.mainBundle.package(
this,
this.bundleHashes
);
// Unload any orphaned assets
this.unloadOrphanedAssets();
let buildTime = Date.now() - startTime;
let time = prettifyTime(buildTime);
logger.status(emoji.success, `Built in ${time}.`, 'green');
if (!this.watcher) {
bundleReport(this.mainBundle, this.options.detailedReport);
}
this.emit('bundled', this.mainBundle);
return this.mainBundle;
} catch (err) {
this.errored = true;
logger.error(err);
if (this.hmr) {
this.hmr.emitError(err);
}
if (process.env.NODE_ENV === 'production') {
process.exitCode = 1;
} else if (process.env.NODE_ENV === 'test' && !this.hmr) {
throw err;
}
} finally {
this.pending = false;
this.emit('buildEnd');
// If not in watch mode, stop the worker farm so we don't keep the process running.
if (!this.watcher && this.options.killWorkers) {
this.stop();
}
}
}
async start() {
if (this.farm) {
return;
}
await this.loadPlugins();
await loadEnv(Path.join(this.options.rootDir, 'index'));
this.options.extensions = Object.assign({}, this.parser.extensions);
this.options.bundleLoaders = this.bundleLoaders;
this.options.env = process.env;
if (this.options.watch) {
this.watcher = new Watcher();
this.watcher.on('change', this.onChange.bind(this));
}
if (this.options.hmr) {
this.hmr = new HMRServer();
this.options.hmrPort = await this.hmr.start(this.options);
}
this.farm = WorkerFarm.getShared(this.options);
}
stop() {
if (this.farm) {
this.farm.end();
}
if (this.watcher) {
this.watcher.stop();
}
if (this.hmr) {
this.hmr.stop();
}
}
async getAsset(name, parent) {
let asset = await this.resolveAsset(name, parent);
this.buildQueue.add(asset);
await this.buildQueue.run();
return asset;
}
async resolveAsset(name, parent) {
let {path, pkg} = await this.resolver.resolve(name, parent);
if (this.loadedAssets.has(path)) {
return this.loadedAssets.get(path);
}
let asset = this.parser.getAsset(path, pkg, this.options);
this.loadedAssets.set(path, asset);
this.watch(path, asset);
return asset;
}
watch(path, asset) {
if (!this.watcher) {
return;
}
if (!this.watchedAssets.has(path)) {
this.watcher.watch(path);
this.watchedAssets.set(path, new Set());
}
this.watchedAssets.get(path).add(asset);
}
unwatch(path, asset) {
if (!this.watchedAssets.has(path)) {
return;
}
let watched = this.watchedAssets.get(path);
watched.delete(asset);
if (watched.size === 0) {
this.watchedAssets.delete(path);
this.watcher.unwatch(path);
}
}
async resolveDep(asset, dep, install = true) {
try {
return await this.resolveAsset(dep.name, asset.name);
} catch (err) {
// If the dep is optional, return before we throw
if (dep.optional) {
return;
}
if (err.code === 'MODULE_NOT_FOUND') {
let isLocalFile = /^[/~.]/.test(dep.name);
let fromNodeModules = asset.name.includes(
`${Path.sep}node_modules${Path.sep}`
);
if (
!isLocalFile &&
!fromNodeModules &&
this.options.autoinstall &&
install
) {
return await this.installDep(asset, dep);
}
err.message = `Cannot resolve dependency '${dep.name}'`;
if (isLocalFile) {
const absPath = Path.resolve(Path.dirname(asset.name), dep.name);
err.message += ` at '${absPath}'`;
}
await this.throwDepError(asset, dep, err);
}
throw err;
}
}
async installDep(asset, dep) {
// Check if module exists, prevents useless installs
let resolved = await this.resolver.resolveModule(dep.name, asset.name);
// If the module resolved (i.e. wasn't a local file), but the module directory wasn't found, install it.
if (resolved.moduleName && !resolved.moduleDir) {
try {
await installPackage([resolved.moduleName], asset.name, {
saveDev: false
});
} catch (err) {
await this.throwDepError(asset, dep, err);
}
}
return await this.resolveDep(asset, dep, false);
}
async throwDepError(asset, dep, err) {
// Generate a code frame where the dependency was used
if (dep.loc) {
await asset.loadIfNeeded();
err.loc = dep.loc;
err = asset.generateErrorMessage(err);
}
err.fileName = asset.name;
throw err;
}
async processAsset(asset, isRebuild) {
if (isRebuild) {
asset.invalidate();
if (this.cache) {
this.cache.invalidate(asset.name);
}
}
await this.loadAsset(asset);
}
async loadAsset(asset) {
if (asset.processed) {
return;
}
if (!this.errored) {
logger.status(emoji.progress, `Building ${asset.basename}...`);
}
// Mark the asset processed so we don't load it twice
asset.processed = true;
// First try the cache, otherwise load and compile in the background
let startTime = Date.now();
let processed = this.cache && (await this.cache.read(asset.name));
if (!processed || asset.shouldInvalidate(processed.cacheData)) {
processed = await this.farm.run(asset.name, asset.package, this.options);
if (this.cache) {
this.cache.write(asset.name, processed);
}
}
asset.buildTime = Date.now() - startTime;
asset.generated = processed.generated;
asset.hash = processed.hash;
// Call the delegate to get implicit dependencies
let dependencies = processed.dependencies;
if (this.delegate.getImplicitDependencies) {
let implicitDeps = await this.delegate.getImplicitDependencies(asset);
if (implicitDeps) {
dependencies = dependencies.concat(implicitDeps);
}
}
// Resolve and load asset dependencies
let assetDeps = await Promise.all(
dependencies.map(async dep => {
if (dep.includedInParent) {
// This dependency is already included in the parent's generated output,
// so no need to load it. We map the name back to the parent asset so
// that changing it triggers a recompile of the parent.
this.watch(dep.name, asset);
} else {
let assetDep = await this.resolveDep(asset, dep);
if (assetDep) {
await this.loadAsset(assetDep);
}
return assetDep;
}
})
);
// Store resolved assets in their original order
dependencies.forEach((dep, i) => {
asset.dependencies.set(dep.name, dep);
let assetDep = assetDeps[i];
if (assetDep) {
asset.depAssets.set(dep, assetDep);
}
});
}
createBundleTree(asset, bundle, dep, parentBundles = new Set()) {
if (dep) {
asset.parentDeps.add(dep);
}
if (asset.parentBundle) {
// If the asset is already in a bundle, it is shared. Move it to the lowest common ancestor.
if (asset.parentBundle !== bundle) {
let commonBundle = bundle.findCommonAncestor(asset.parentBundle);
if (
asset.parentBundle !== commonBundle &&
asset.parentBundle.type === commonBundle.type
) {
this.moveAssetToBundle(asset, commonBundle);
return;
}
} else {
return;
}
// Detect circular bundles
if (parentBundles.has(asset.parentBundle)) {
return;
}
}
let isEntryAsset =
asset.parentBundle && asset.parentBundle.entryAsset === asset;
if ((dep && dep.dynamic) || !bundle.type) {
// If the asset is already the entry asset of a bundle, don't create a duplicate.
if (isEntryAsset) {
return;
}
// Create a new bundle for dynamic imports
bundle = bundle.createChildBundle(asset);
} else if (asset.type && !this.packagers.has(asset.type)) {
// If the asset is already the entry asset of a bundle, don't create a duplicate.
if (isEntryAsset) {
return;
}
// No packager is available for this asset type. Create a new bundle with only this asset.
bundle.createSiblingBundle(asset);
} else {
// Add the asset to the common bundle of the asset's type
bundle.getSiblingBundle(asset.type).addAsset(asset);
}
// If the asset generated a representation for the parent bundle type, also add it there
if (asset.generated[bundle.type] != null) {
bundle.addAsset(asset);
}
// Add the asset to sibling bundles for each generated type
if (asset.type && asset.generated[asset.type]) {
for (let t in asset.generated) {
if (asset.generated[t]) {
bundle.getSiblingBundle(t).addAsset(asset);
}
}
}
asset.parentBundle = bundle;
parentBundles.add(bundle);
for (let [dep, assetDep] of asset.depAssets) {
this.createBundleTree(assetDep, bundle, dep, parentBundles);
}
parentBundles.delete(bundle);
return bundle;
}
moveAssetToBundle(asset, commonBundle) {
// Never move the entry asset of a bundle, as it was explicitly requested to be placed in a separate bundle.
if (asset.parentBundle.entryAsset === asset) {
return;
}
for (let bundle of Array.from(asset.bundles)) {
bundle.removeAsset(asset);
commonBundle.getSiblingBundle(bundle.type).addAsset(asset);
}
let oldBundle = asset.parentBundle;
asset.parentBundle = commonBundle;
// Move all dependencies as well
for (let child of asset.depAssets.values()) {
if (child.parentBundle === oldBundle) {
this.moveAssetToBundle(child, commonBundle);
}
}
}
*findOrphanAssets() {
for (let asset of this.loadedAssets.values()) {
if (!asset.parentBundle) {
yield asset;
}
}
}
unloadOrphanedAssets() {
for (let asset of this.findOrphanAssets()) {
this.unloadAsset(asset);
}
}
unloadAsset(asset) {
this.loadedAssets.delete(asset.name);
if (this.watcher) {
this.unwatch(asset.name, asset);
// Unwatch all included dependencies that map to this asset
for (let dep of asset.dependencies.values()) {
if (dep.includedInParent) {
this.unwatch(dep.name, asset);
}
}
}
}
async onChange(path) {
let assets = this.watchedAssets.get(path);
if (!assets || !assets.size) {
return;
}
logger.clear();
logger.status(emoji.progress, `Building ${Path.basename(path)}...`);
// Add the asset to the rebuild queue, and reset the timeout.
for (let asset of assets) {
this.buildQueue.add(asset, true);
}
clearTimeout(this.rebuildTimeout);
this.rebuildTimeout = setTimeout(async () => {
await this.bundle();
}, 100);
}
middleware() {
this.bundle();
return Server.middleware(this);
}
async serve(port = 1234, https = false) {
this.server = await Server.serve(this, port, https);
this.bundle();
return this.server;
}
}
module.exports = Bundler;
Bundler.Asset = require('./Asset');
Bundler.Packager = require('./packagers/Packager');

110
VISUALIZACION/node_modules/parcel-bundler/src/FSCache.js generated vendored Executable file
View file

@ -0,0 +1,110 @@
const fs = require('./utils/fs');
const path = require('path');
const md5 = require('./utils/md5');
const objectHash = require('./utils/objectHash');
const pkg = require('../package.json');
const logger = require('./Logger');
// These keys can affect the output, so if they differ, the cache should not match
const OPTION_KEYS = ['publicURL', 'minify', 'hmr', 'target'];
class FSCache {
constructor(options) {
this.dir = path.resolve(options.cacheDir || '.cache');
this.dirExists = false;
this.invalidated = new Set();
this.optionsHash = objectHash(
OPTION_KEYS.reduce((p, k) => ((p[k] = options[k]), p), {
version: pkg.version
})
);
}
async ensureDirExists() {
await fs.mkdirp(this.dir);
this.dirExists = true;
}
getCacheFile(filename) {
let hash = md5(this.optionsHash + filename);
return path.join(this.dir, hash + '.json');
}
async writeDepMtimes(data) {
// Write mtimes for each dependent file that is already compiled into this asset
for (let dep of data.dependencies) {
if (dep.includedInParent) {
let stats = await fs.stat(dep.name);
dep.mtime = stats.mtime.getTime();
}
}
}
async write(filename, data) {
try {
await this.ensureDirExists();
await this.writeDepMtimes(data);
await fs.writeFile(this.getCacheFile(filename), JSON.stringify(data));
this.invalidated.delete(filename);
} catch (err) {
logger.error('Error writing to cache', err);
}
}
async checkDepMtimes(data) {
// Check mtimes for files that are already compiled into this asset
// If any of them changed, invalidate.
for (let dep of data.dependencies) {
if (dep.includedInParent) {
let stats = await fs.stat(dep.name);
if (stats.mtime > dep.mtime) {
return false;
}
}
}
return true;
}
async read(filename) {
if (this.invalidated.has(filename)) {
return null;
}
let cacheFile = this.getCacheFile(filename);
try {
let stats = await fs.stat(filename);
let cacheStats = await fs.stat(cacheFile);
if (stats.mtime > cacheStats.mtime) {
return null;
}
let json = await fs.readFile(cacheFile);
let data = JSON.parse(json);
if (!await this.checkDepMtimes(data)) {
return null;
}
return data;
} catch (err) {
return null;
}
}
invalidate(filename) {
this.invalidated.add(filename);
}
async delete(filename) {
try {
await fs.unlink(this.getCacheFile(filename));
this.invalidated.delete(filename);
} catch (err) {
// Fail silently
}
}
}
module.exports = FSCache;

105
VISUALIZACION/node_modules/parcel-bundler/src/HMRServer.js generated vendored Executable file
View file

@ -0,0 +1,105 @@
const http = require('http');
const https = require('https');
const WebSocket = require('ws');
const prettyError = require('./utils/prettyError');
const generateCertificate = require('./utils/generateCertificate');
const getCertificate = require('./utils/getCertificate');
const logger = require('./Logger');
class HMRServer {
async start(options = {}) {
await new Promise(async resolve => {
if (!options.https) {
this.server = http.createServer();
} else if (typeof options.https === 'boolean') {
this.server = https.createServer(generateCertificate(options));
} else {
this.server = https.createServer(await getCertificate(options.https));
}
this.wss = new WebSocket.Server({server: this.server});
this.server.listen(options.hmrPort, resolve);
});
this.wss.on('connection', ws => {
ws.onerror = this.handleSocketError;
if (this.unresolvedError) {
ws.send(JSON.stringify(this.unresolvedError));
}
});
this.wss.on('error', this.handleSocketError);
return this.wss._server.address().port;
}
stop() {
this.wss.close();
this.server.close();
}
emitError(err) {
let {message, stack} = prettyError(err);
// store the most recent error so we can notify new connections
// and so we can broadcast when the error is resolved
this.unresolvedError = {
type: 'error',
error: {
message,
stack
}
};
this.broadcast(this.unresolvedError);
}
emitUpdate(assets) {
if (this.unresolvedError) {
this.unresolvedError = null;
this.broadcast({
type: 'error-resolved'
});
}
const containsHtmlAsset = assets.some(asset => asset.type === 'html');
if (containsHtmlAsset) {
this.broadcast({
type: 'reload'
});
} else {
this.broadcast({
type: 'update',
assets: assets.map(asset => {
let deps = {};
for (let [dep, depAsset] of asset.depAssets) {
deps[dep.name] = depAsset.id;
}
return {
id: asset.id,
generated: asset.generated,
deps: deps
};
})
});
}
}
handleSocketError(err) {
if (err.error.code === 'ECONNRESET') {
// This gets triggered on page refresh, ignore this
return;
}
logger.warn(err);
}
broadcast(msg) {
const json = JSON.stringify(msg);
for (let ws of this.wss.clients) {
ws.send(json);
}
}
}
module.exports = HMRServer;

217
VISUALIZACION/node_modules/parcel-bundler/src/Logger.js generated vendored Executable file
View file

@ -0,0 +1,217 @@
const chalk = require('chalk');
const readline = require('readline');
const prettyError = require('./utils/prettyError');
const emoji = require('./utils/emoji');
const {countBreaks} = require('grapheme-breaker');
const stripAnsi = require('strip-ansi');
class Logger {
constructor(options) {
this.lines = 0;
this.statusLine = null;
this.setOptions(options);
}
setOptions(options) {
this.logLevel =
options && isNaN(options.logLevel) === false
? Number(options.logLevel)
: 3;
this.color =
options && typeof options.color === 'boolean'
? options.color
: chalk.supportsColor;
this.chalk = new chalk.constructor({enabled: this.color});
this.isTest =
options && typeof options.isTest === 'boolean'
? options.isTest
: process.env.NODE_ENV === 'test';
}
countLines(message) {
return message.split('\n').reduce((p, line) => {
if (process.stdout.columns) {
return p + Math.ceil((line.length || 1) / process.stdout.columns);
}
return p + 1;
}, 0);
}
writeRaw(message) {
this.lines += this.countLines(message) - 1;
process.stdout.write(message);
}
write(message, persistent = false) {
if (!persistent) {
this.lines += this.countLines(message);
}
this._log(message);
}
log(message) {
if (this.logLevel < 3) {
return;
}
this.write(message);
}
persistent(message) {
if (this.logLevel < 3) {
return;
}
this.write(this.chalk.bold(message), true);
}
warn(err) {
if (this.logLevel < 2) {
return;
}
let {message, stack} = prettyError(err, {color: this.color});
this.write(this.chalk.yellow(`${emoji.warning} ${message}`));
if (stack) {
this.write(stack);
}
}
error(err) {
if (this.logLevel < 1) {
return;
}
let {message, stack} = prettyError(err, {color: this.color});
this.status(emoji.error, message, 'red');
if (stack) {
this.write(stack);
}
}
clear() {
if (!this.color || this.isTest) {
return;
}
while (this.lines > 0) {
readline.clearLine(process.stdout, 0);
readline.moveCursor(process.stdout, 0, -1);
this.lines--;
}
readline.cursorTo(process.stdout, 0);
this.statusLine = null;
}
writeLine(line, msg) {
if (!this.color) {
return this.log(msg);
}
let n = this.lines - line;
let stdout = process.stdout;
readline.cursorTo(stdout, 0);
readline.moveCursor(stdout, 0, -n);
stdout.write(msg);
readline.clearLine(stdout, 1);
readline.cursorTo(stdout, 0);
readline.moveCursor(stdout, 0, n);
}
status(emoji, message, color = 'gray') {
if (this.logLevel < 3) {
return;
}
let hasStatusLine = this.statusLine != null;
if (!hasStatusLine) {
this.statusLine = this.lines;
}
this.writeLine(
this.statusLine,
this.chalk[color].bold(`${emoji} ${message}`)
);
if (!hasStatusLine) {
process.stdout.write('\n');
this.lines++;
}
}
handleMessage(options) {
this[options.method](...options.args);
}
_log(message) {
console.log(message);
}
table(columns, table) {
// Measure column widths
let colWidths = [];
for (let row of table) {
let i = 0;
for (let item of row) {
colWidths[i] = Math.max(colWidths[i] || 0, stringWidth(item));
i++;
}
}
// Render rows
for (let row of table) {
let items = row.map((item, i) => {
// Add padding between columns unless the alignment is the opposite to the
// next column and pad to the column width.
let padding =
!columns[i + 1] || columns[i + 1].align === columns[i].align ? 4 : 0;
return pad(item, colWidths[i] + padding, columns[i].align);
});
this.log(items.join(''));
}
}
}
// Pad a string with spaces on either side
function pad(text, length, align = 'left') {
let pad = ' '.repeat(length - stringWidth(text));
if (align === 'right') {
return pad + text;
}
return text + pad;
}
// Count visible characters in a string
function stringWidth(string) {
return countBreaks(stripAnsi('' + string));
}
// If we are in a worker, make a proxy class which will
// send the logger calls to the main process via IPC.
// These are handled in WorkerFarm and directed to handleMessage above.
if (process.send && process.env.WORKER_TYPE === 'parcel-worker') {
const worker = require('./worker');
class LoggerProxy {}
for (let method of Object.getOwnPropertyNames(Logger.prototype)) {
LoggerProxy.prototype[method] = (...args) => {
worker.addCall(
{
location: require.resolve('./Logger'),
method,
args
},
false
);
};
}
module.exports = new LoggerProxy();
} else {
module.exports = new Logger();
}

85
VISUALIZACION/node_modules/parcel-bundler/src/Parser.js generated vendored Executable file
View file

@ -0,0 +1,85 @@
const path = require('path');
const RawAsset = require('./assets/RawAsset');
const GlobAsset = require('./assets/GlobAsset');
const glob = require('glob');
class Parser {
constructor(options = {}) {
this.extensions = {};
this.registerExtension('js', './assets/JSAsset');
this.registerExtension('jsx', './assets/JSAsset');
this.registerExtension('es6', './assets/JSAsset');
this.registerExtension('jsm', './assets/JSAsset');
this.registerExtension('mjs', './assets/JSAsset');
this.registerExtension('ml', './assets/ReasonAsset');
this.registerExtension('re', './assets/ReasonAsset');
this.registerExtension('ts', './assets/TypeScriptAsset');
this.registerExtension('tsx', './assets/TypeScriptAsset');
this.registerExtension('coffee', './assets/CoffeeScriptAsset');
this.registerExtension('vue', './assets/VueAsset');
this.registerExtension('json', './assets/JSONAsset');
this.registerExtension('json5', './assets/JSONAsset');
this.registerExtension('yaml', './assets/YAMLAsset');
this.registerExtension('yml', './assets/YAMLAsset');
this.registerExtension('toml', './assets/TOMLAsset');
this.registerExtension('gql', './assets/GraphqlAsset');
this.registerExtension('graphql', './assets/GraphqlAsset');
this.registerExtension('css', './assets/CSSAsset');
this.registerExtension('pcss', './assets/CSSAsset');
this.registerExtension('styl', './assets/StylusAsset');
this.registerExtension('stylus', './assets/StylusAsset');
this.registerExtension('less', './assets/LESSAsset');
this.registerExtension('sass', './assets/SASSAsset');
this.registerExtension('scss', './assets/SASSAsset');
this.registerExtension('html', './assets/HTMLAsset');
this.registerExtension('htm', './assets/HTMLAsset');
this.registerExtension('rs', './assets/RustAsset');
this.registerExtension('webmanifest', './assets/WebManifestAsset');
this.registerExtension('glsl', './assets/GLSLAsset');
this.registerExtension('vert', './assets/GLSLAsset');
this.registerExtension('frag', './assets/GLSLAsset');
this.registerExtension('jade', './assets/PugAsset');
this.registerExtension('pug', './assets/PugAsset');
let extensions = options.extensions || {};
for (let ext in extensions) {
this.registerExtension(ext, extensions[ext]);
}
}
registerExtension(ext, parser) {
if (!ext.startsWith('.')) {
ext = '.' + ext;
}
this.extensions[ext.toLowerCase()] = parser;
}
findParser(filename) {
if (/[*+{}]/.test(filename) && glob.hasMagic(filename)) {
return GlobAsset;
}
let extension = path.extname(filename).toLowerCase();
let parser = this.extensions[extension] || RawAsset;
if (typeof parser === 'string') {
parser = this.extensions[extension] = require(parser);
}
return parser;
}
getAsset(filename, pkg, options = {}) {
let Asset = this.findParser(filename);
options.parser = this;
return new Asset(filename, pkg, options);
}
}
module.exports = Parser;

102
VISUALIZACION/node_modules/parcel-bundler/src/Pipeline.js generated vendored Executable file
View file

@ -0,0 +1,102 @@
const Parser = require('./Parser');
const path = require('path');
const md5 = require('./utils/md5');
/**
* A Pipeline composes multiple Asset types together.
*/
class Pipeline {
constructor(options) {
this.options = options;
this.parser = new Parser(options);
}
async process(path, pkg, options) {
let asset = this.parser.getAsset(path, pkg, options);
let generated = await this.processAsset(asset);
let generatedMap = {};
for (let rendition of generated) {
generatedMap[rendition.type] = rendition.value;
}
return {
dependencies: Array.from(asset.dependencies.values()),
generated: generatedMap,
hash: asset.hash,
cacheData: asset.cacheData
};
}
async processAsset(asset) {
try {
await asset.process();
} catch (err) {
throw asset.generateErrorMessage(err);
}
let inputType = path.extname(asset.name).slice(1);
let generated = [];
for (let rendition of this.iterateRenditions(asset)) {
let {type, value} = rendition;
if (typeof value !== 'string' || rendition.final) {
generated.push(rendition);
continue;
}
// Find an asset type for the rendition type.
// If the asset is not already an instance of this asset type, process it.
let AssetType = this.parser.findParser(
asset.name.slice(0, -inputType.length) + type
);
if (!(asset instanceof AssetType)) {
let opts = Object.assign({rendition}, asset.options);
let subAsset = new AssetType(asset.name, asset.package, opts);
subAsset.contents = value;
subAsset.dependencies = asset.dependencies;
let processed = await this.processAsset(subAsset);
generated = generated.concat(processed);
Object.assign(asset.cacheData, subAsset.cacheData);
asset.hash = md5(asset.hash + subAsset.hash);
} else {
generated.push(rendition);
}
}
// Post process. This allows assets a chance to modify the output produced by sub-asset types.
asset.generated = generated;
try {
generated = await asset.postProcess(generated);
} catch (err) {
throw asset.generateErrorMessage(err);
}
return generated;
}
*iterateRenditions(asset) {
if (Array.isArray(asset.generated)) {
return yield* asset.generated;
}
if (typeof asset.generated === 'string') {
return yield {
type: asset.type,
value: asset.generated
};
}
// Backward compatibility support for the old API.
// Assume all renditions are final - don't compose asset types together.
for (let type in asset.generated) {
yield {
type,
value: asset.generated[type],
final: true
};
}
}
}
module.exports = Pipeline;

433
VISUALIZACION/node_modules/parcel-bundler/src/Resolver.js generated vendored Executable file
View file

@ -0,0 +1,433 @@
const builtins = require('./builtins');
const path = require('path');
const glob = require('glob');
const fs = require('./utils/fs');
const micromatch = require('micromatch');
const EMPTY_SHIM = require.resolve('./builtins/_empty');
const GLOB_RE = /[*+{}]/;
/**
* This resolver implements a modified version of the node_modules resolution algorithm:
* https://nodejs.org/api/modules.html#modules_all_together
*
* In addition to the standard algorithm, Parcel supports:
* - All file extensions supported by Parcel.
* - Glob file paths
* - Absolute paths (e.g. /foo) resolved relative to the project root.
* - Tilde paths (e.g. ~/foo) resolved relative to the nearest module root in node_modules.
* - The package.json module, jsnext:main, and browser field as replacements for package.main.
* - The package.json browser and alias fields as an alias map within a local module.
* - The package.json alias field in the root package for global aliases across all modules.
*/
class Resolver {
constructor(options = {}) {
this.options = options;
this.cache = new Map();
this.packageCache = new Map();
this.rootPackage = null;
}
async resolve(input, parent) {
let filename = input;
// Check the cache first
let key = this.getCacheKey(filename, parent);
if (this.cache.has(key)) {
return this.cache.get(key);
}
// Check if this is a glob
if (GLOB_RE.test(filename) && glob.hasMagic(filename)) {
return {path: path.resolve(path.dirname(parent), filename)};
}
// Get file extensions to search
let extensions = Array.isArray(this.options.extensions)
? this.options.extensions.slice()
: Object.keys(this.options.extensions);
if (parent) {
// parent's extension given high priority
const parentExt = path.extname(parent);
extensions = [parentExt, ...extensions.filter(ext => ext !== parentExt)];
}
extensions.unshift('');
// Resolve the module directory or local file path
let module = await this.resolveModule(filename, parent);
let resolved;
if (module.moduleDir) {
resolved = await this.loadNodeModules(module, extensions);
} else if (module.filePath) {
resolved = await this.loadRelative(module.filePath, extensions);
}
if (!resolved) {
let dir = parent ? path.dirname(parent) : process.cwd();
let err = new Error(`Cannot find module '${input}' from '${dir}'`);
err.code = 'MODULE_NOT_FOUND';
throw err;
}
this.cache.set(key, resolved);
return resolved;
}
async resolveModule(filename, parent) {
let dir = parent ? path.dirname(parent) : process.cwd();
// If this isn't the entrypoint, resolve the input file to an absolute path
if (parent) {
filename = this.resolveFilename(filename, dir);
}
// Resolve aliases in the parent module for this file.
filename = await this.loadAlias(filename, dir);
// Return just the file path if this is a file, not in node_modules
if (path.isAbsolute(filename)) {
return {
filePath: filename
};
}
// Resolve the module in node_modules
let resolved;
try {
resolved = await this.findNodeModulePath(filename, dir);
} catch (err) {
// ignore
}
// If we couldn't resolve the node_modules path, just return the module name info
if (!resolved) {
let parts = this.getModuleParts(filename);
resolved = {
moduleName: parts[0],
subPath: parts[1]
};
}
return resolved;
}
getCacheKey(filename, parent) {
return (parent ? path.dirname(parent) : '') + ':' + filename;
}
resolveFilename(filename, dir) {
switch (filename[0]) {
case '/':
// Absolute path. Resolve relative to project root.
return path.resolve(this.options.rootDir, filename.slice(1));
case '~':
// Tilde path. Resolve relative to nearest node_modules directory,
// or the project root - whichever comes first.
while (
dir !== this.options.rootDir &&
path.basename(path.dirname(dir)) !== 'node_modules'
) {
dir = path.dirname(dir);
}
return path.join(dir, filename.slice(1));
case '.':
// Relative path.
return path.resolve(dir, filename);
default:
// Module
return path.normalize(filename);
}
}
async loadRelative(filename, extensions) {
// Find a package.json file in the current package.
let pkg = await this.findPackage(path.dirname(filename));
// First try as a file, then as a directory.
return (
(await this.loadAsFile(filename, extensions, pkg)) ||
(await this.loadDirectory(filename, extensions, pkg))
);
}
async findNodeModulePath(filename, dir) {
if (builtins[filename]) {
return {filePath: builtins[filename]};
}
let parts = this.getModuleParts(filename);
let root = path.parse(dir).root;
while (dir !== root) {
// Skip node_modules directories
if (path.basename(dir) === 'node_modules') {
dir = path.dirname(dir);
}
try {
// First, check if the module directory exists. This prevents a lot of unnecessary checks later.
let moduleDir = path.join(dir, 'node_modules', parts[0]);
let stats = await fs.stat(moduleDir);
if (stats.isDirectory()) {
return {
moduleName: parts[0],
subPath: parts[1],
moduleDir: moduleDir,
filePath: path.join(dir, 'node_modules', filename)
};
}
} catch (err) {
// ignore
}
// Move up a directory
dir = path.dirname(dir);
}
}
async loadNodeModules(module, extensions) {
try {
// If a module was specified as a module sub-path (e.g. some-module/some/path),
// it is likely a file. Try loading it as a file first.
if (module.subPath) {
let pkg = await this.readPackage(module.moduleDir);
let res = await this.loadAsFile(module.filePath, extensions, pkg);
if (res) {
return res;
}
}
// Otherwise, load as a directory.
return await this.loadDirectory(module.filePath, extensions);
} catch (e) {
// ignore
}
}
async isFile(file) {
try {
let stat = await fs.stat(file);
return stat.isFile() || stat.isFIFO();
} catch (err) {
return false;
}
}
async loadDirectory(dir, extensions, pkg) {
try {
pkg = await this.readPackage(dir);
// First try loading package.main as a file, then try as a directory.
let main = this.getPackageMain(pkg);
let res =
(await this.loadAsFile(main, extensions, pkg)) ||
(await this.loadDirectory(main, extensions, pkg));
if (res) {
return res;
}
} catch (err) {
// ignore
}
// Fall back to an index file inside the directory.
return await this.loadAsFile(path.join(dir, 'index'), extensions, pkg);
}
async readPackage(dir) {
let file = path.join(dir, 'package.json');
if (this.packageCache.has(file)) {
return this.packageCache.get(file);
}
let json = await fs.readFile(file, 'utf8');
let pkg = JSON.parse(json);
pkg.pkgfile = file;
pkg.pkgdir = dir;
// If the package has a `source` field, check if it is behind a symlink.
// If so, we treat the module as source code rather than a pre-compiled module.
if (pkg.source) {
let realpath = await fs.realpath(file);
if (realpath === file) {
delete pkg.source;
}
}
this.packageCache.set(file, pkg);
return pkg;
}
getPackageMain(pkg) {
// libraries like d3.js specifies node.js specific files in the "main" which breaks the build
// we use the "module" or "browser" field to get the full dependency tree if available.
// If this is a linked module with a `source` field, use that as the entry point.
let main = [pkg.source, pkg.module, pkg.browser, pkg.main].find(
entry => typeof entry === 'string'
);
// Default to index file if no main field find
if (!main || main === '.' || main === './') {
main = 'index';
}
return path.resolve(pkg.pkgdir, main);
}
async loadAsFile(file, extensions, pkg) {
// Try all supported extensions
for (let f of this.expandFile(file, extensions, pkg)) {
if (await this.isFile(f)) {
return {path: f, pkg};
}
}
}
expandFile(file, extensions, pkg, expandAliases = true) {
// Expand extensions and aliases
let res = [];
for (let ext of extensions) {
let f = file + ext;
if (expandAliases) {
let alias = this.resolveAliases(file + ext, pkg);
if (alias !== f) {
res = res.concat(this.expandFile(alias, extensions, pkg, false));
}
}
res.push(f);
}
return res;
}
resolveAliases(filename, pkg) {
// First resolve local package aliases, then project global ones.
return this.resolvePackageAliases(
this.resolvePackageAliases(filename, pkg),
this.rootPackage
);
}
resolvePackageAliases(filename, pkg) {
if (!pkg) {
return filename;
}
// Resolve aliases in the package.source, package.alias, and package.browser fields.
return (
this.getAlias(filename, pkg.pkgdir, pkg.source) ||
this.getAlias(filename, pkg.pkgdir, pkg.alias) ||
this.getAlias(filename, pkg.pkgdir, pkg.browser) ||
filename
);
}
getAlias(filename, dir, aliases) {
if (!filename || !aliases || typeof aliases !== 'object') {
return null;
}
let alias;
// If filename is an absolute path, get one relative to the package.json directory.
if (path.isAbsolute(filename)) {
filename = path.relative(dir, filename);
if (filename[0] !== '.') {
filename = './' + filename;
}
alias = this.lookupAlias(aliases, filename);
} else {
// It is a node_module. First try the entire filename as a key.
alias = aliases[filename];
if (alias == null) {
// If it didn't match, try only the module name.
let parts = this.getModuleParts(filename);
alias = aliases[parts[0]];
if (typeof alias === 'string') {
// Append the filename back onto the aliased module.
alias = path.join(alias, ...parts.slice(1));
}
}
}
// If the alias is set to `false`, return an empty file.
if (alias === false) {
return EMPTY_SHIM;
}
// If the alias is a relative path, then resolve
// relative to the package.json directory.
if (alias && alias[0] === '.') {
return path.resolve(dir, alias);
}
// Otherwise, assume the alias is a module
return alias;
}
lookupAlias(aliases, filename) {
// First, try looking up the exact filename
let alias = aliases[filename];
if (alias != null) {
return alias;
}
// Otherwise, try replacing glob keys
for (let key in aliases) {
if (GLOB_RE.test(key)) {
let re = micromatch.makeRe(key, {capture: true});
if (re.test(filename)) {
return filename.replace(re, aliases[key]);
}
}
}
}
async findPackage(dir) {
// Find the nearest package.json file within the current node_modules folder
let root = path.parse(dir).root;
while (dir !== root && path.basename(dir) !== 'node_modules') {
try {
return await this.readPackage(dir);
} catch (err) {
// ignore
}
dir = path.dirname(dir);
}
}
async loadAlias(filename, dir) {
// Load the root project's package.json file if we haven't already
if (!this.rootPackage) {
this.rootPackage = await this.findPackage(this.options.rootDir);
}
// Load the local package, and resolve aliases
let pkg = await this.findPackage(dir);
return this.resolveAliases(filename, pkg);
}
getModuleParts(name) {
let parts = path.normalize(name).split(path.sep);
if (parts[0].charAt(0) === '@') {
// Scoped module (e.g. @scope/module). Merge the first two parts back together.
parts.splice(0, 2, `${parts[0]}/${parts[1]}`);
}
return parts;
}
}
module.exports = Resolver;

131
VISUALIZACION/node_modules/parcel-bundler/src/Server.js generated vendored Executable file
View file

@ -0,0 +1,131 @@
const http = require('http');
const https = require('https');
const serveStatic = require('serve-static');
const getPort = require('get-port');
const serverErrors = require('./utils/customErrors').serverErrors;
const generateCertificate = require('./utils/generateCertificate');
const getCertificate = require('./utils/getCertificate');
const logger = require('./Logger');
const path = require('path');
const url = require('url');
serveStatic.mime.define({
'application/wasm': ['wasm']
});
function setHeaders(res) {
enableCors(res);
}
function enableCors(res) {
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader(
'Access-Control-Allow-Methods',
'GET, HEAD, PUT, PATCH, POST, DELETE'
);
res.setHeader(
'Access-Control-Allow-Headers',
'Origin, X-Requested-With, Content-Type, Accept, Content-Type'
);
}
function middleware(bundler) {
const serve = serveStatic(bundler.options.outDir, {
index: false,
setHeaders: setHeaders
});
return function(req, res, next) {
// Wait for the bundler to finish bundling if needed
if (bundler.pending) {
bundler.once('bundled', respond);
} else {
respond();
}
function respond() {
let {pathname} = url.parse(req.url);
if (bundler.errored) {
return send500();
} else if (
!pathname.startsWith(bundler.options.publicURL) ||
path.extname(pathname) === ''
) {
// If the URL doesn't start with the public path, or the URL doesn't
// have a file extension, send the main HTML bundle.
return sendIndex();
} else {
// Otherwise, serve the file from the dist folder
req.url = pathname.slice(bundler.options.publicURL.length);
return serve(req, res, send404);
}
}
function sendIndex() {
// If the main asset is an HTML file, serve it
if (bundler.mainBundle.type === 'html') {
req.url = `/${path.basename(bundler.mainBundle.name)}`;
serve(req, res, send404);
} else {
send404();
}
}
function send500() {
res.setHeader('Content-Type', 'text/plain; charset=utf-8');
res.writeHead(500);
res.end('🚨 Build error, check the console for details.');
}
function send404() {
if (next) {
return next();
}
res.writeHead(404);
res.end();
}
};
}
async function serve(bundler, port, useHTTPS = false) {
let handler = middleware(bundler);
let server;
if (!useHTTPS) {
server = http.createServer(handler);
} else if (typeof useHTTPS === 'boolean') {
server = https.createServer(generateCertificate(bundler.options), handler);
} else {
server = https.createServer(await getCertificate(useHTTPS), handler);
}
let freePort = await getPort({port});
server.listen(freePort);
return new Promise((resolve, reject) => {
server.on('error', err => {
logger.error(new Error(serverErrors(err, server.address().port)));
reject(err);
});
server.once('listening', () => {
let addon =
server.address().port !== port
? `- ${logger.chalk.yellow(
`configured port ${port} could not be used.`
)}`
: '';
logger.persistent(
`Server running at ${logger.chalk.cyan(
`${useHTTPS ? 'https' : 'http'}://localhost:${server.address().port}`
)} ${addon}`
);
resolve(server);
});
});
}
exports.middleware = middleware;
exports.serve = serve;

293
VISUALIZACION/node_modules/parcel-bundler/src/SourceMap.js generated vendored Executable file
View file

@ -0,0 +1,293 @@
const {SourceMapConsumer, SourceMapGenerator} = require('source-map');
const lineCounter = require('./utils/lineCounter');
class SourceMap {
constructor(mappings, sources) {
this.mappings = this.purifyMappings(mappings);
this.sources = sources || {};
this.lineCount = null;
}
purifyMappings(mappings) {
if (Array.isArray(mappings)) {
return mappings.filter(mapping => {
return (
mapping &&
mapping.source &&
mapping.original &&
typeof mapping.original.line === 'number' &&
mapping.original.line > 0 &&
typeof mapping.original.column === 'number' &&
mapping.generated &&
typeof mapping.generated.line === 'number' &&
mapping.generated.line > 0 &&
typeof mapping.generated.column === 'number'
);
});
}
return [];
}
async getConsumer(map) {
if (map instanceof SourceMapConsumer) {
return map;
}
map = typeof map === 'string' ? JSON.parse(map) : map;
return await new SourceMapConsumer(map);
}
async addMap(map, lineOffset = 0, columnOffset = 0) {
if (!(map instanceof SourceMap) && map.version) {
let consumer = await this.getConsumer(map);
consumer.eachMapping(mapping => {
this.addConsumerMapping(mapping, lineOffset, columnOffset);
if (!this.sources[mapping.source]) {
this.sources[mapping.source] = consumer.sourceContentFor(
mapping.source,
true
);
}
});
if (consumer.destroy) {
// Only needs to happen in source-map 0.7
consumer.destroy();
}
} else {
if (!map.eachMapping) {
map = new SourceMap(map.mappings, map.sources);
}
if (lineOffset === 0 && columnOffset === 0) {
this.mappings = this.mappings.concat(map.mappings);
} else {
map.eachMapping(mapping => {
this.addMapping(mapping, lineOffset, columnOffset);
});
}
Object.keys(map.sources).forEach(sourceName => {
if (!this.sources[sourceName]) {
this.sources[sourceName] = map.sources[sourceName];
}
});
}
return this;
}
addMapping(mapping, lineOffset = 0, columnOffset = 0) {
mapping.generated = {
line: mapping.generated.line + lineOffset,
column: mapping.generated.column + columnOffset
};
this.mappings.push(mapping);
}
addConsumerMapping(mapping, lineOffset = 0, columnOffset = 0) {
if (
!mapping.source ||
!mapping.originalLine ||
(!mapping.originalColumn && mapping.originalColumn !== 0)
) {
return;
}
this.mappings.push({
source: mapping.source,
original: {
line: mapping.originalLine,
column: mapping.originalColumn
},
generated: {
line: mapping.generatedLine + lineOffset,
column: mapping.generatedColumn + columnOffset
},
name: mapping.name
});
}
eachMapping(callback) {
this.mappings.forEach(callback);
}
generateEmptyMap(sourceName, sourceContent) {
this.sources[sourceName] = sourceContent;
this.lineCount = lineCounter(sourceContent);
for (let line = 1; line < this.lineCount + 1; line++) {
this.addMapping({
source: sourceName,
original: {
line: line,
column: 0
},
generated: {
line: line,
column: 0
}
});
}
return this;
}
async extendSourceMap(original, extension) {
if (!(extension instanceof SourceMap)) {
extension = await new SourceMap().addMap(extension);
}
if (!(original instanceof SourceMap)) {
original = await this.getConsumer(original);
}
extension.eachMapping(mapping => {
let originalMapping = original.originalPositionFor({
line: mapping.original.line,
column: mapping.original.column
});
if (!originalMapping.line) {
return false;
}
this.addMapping({
source: originalMapping.source,
name: originalMapping.name,
original: {
line: originalMapping.line,
column: originalMapping.column
},
generated: {
line: mapping.generated.line,
column: mapping.generated.column
}
});
if (!this.sources[originalMapping.source]) {
this.sources[originalMapping.source] = original.sourceContentFor(
originalMapping.source,
true
);
}
});
if (original.destroy) {
// Only needs to happen in source-map 0.7
original.destroy();
}
return this;
}
findClosest(line, column, key = 'original') {
if (line < 1) {
throw new Error('Line numbers must be >= 1');
}
if (column < 0) {
throw new Error('Column numbers must be >= 0');
}
if (this.mappings.length < 1) {
return undefined;
}
let startIndex = 0;
let stopIndex = this.mappings.length - 1;
let middleIndex = Math.floor((stopIndex + startIndex) / 2);
while (
startIndex < stopIndex &&
this.mappings[middleIndex][key].line !== line
) {
if (line < this.mappings[middleIndex][key].line) {
stopIndex = middleIndex - 1;
} else if (line > this.mappings[middleIndex][key].line) {
startIndex = middleIndex + 1;
}
middleIndex = Math.floor((stopIndex + startIndex) / 2);
}
let mapping = this.mappings[middleIndex];
if (!mapping || mapping[key].line !== line) {
return this.mappings.length - 1;
}
while (
middleIndex >= 1 &&
this.mappings[middleIndex - 1][key].line === line
) {
middleIndex--;
}
while (
middleIndex < this.mappings.length - 1 &&
this.mappings[middleIndex + 1][key].line === line &&
column > this.mappings[middleIndex][key].column
) {
middleIndex++;
}
return middleIndex;
}
originalPositionFor(generatedPosition) {
let index = this.findClosest(
generatedPosition.line,
generatedPosition.column,
'generated'
);
return {
source: this.mappings[index].source,
name: this.mappings[index].name,
line: this.mappings[index].original.line,
column: this.mappings[index].original.column
};
}
generatedPositionFor(originalPosition) {
let index = this.findClosest(
originalPosition.line,
originalPosition.column,
'original'
);
return {
source: this.mappings[index].source,
name: this.mappings[index].name,
line: this.mappings[index].generated.line,
column: this.mappings[index].generated.column
};
}
sourceContentFor(fileName) {
return this.sources[fileName];
}
offset(lineOffset = 0, columnOffset = 0) {
this.mappings.map(mapping => {
mapping.generated.line = mapping.generated.line + lineOffset;
mapping.generated.column = mapping.generated.column + columnOffset;
return mapping;
});
if (this.lineCount != null) {
this.lineCount += lineOffset;
}
}
stringify(file, sourceRoot) {
let generator = new SourceMapGenerator({file, sourceRoot});
this.eachMapping(mapping => generator.addMapping(mapping));
Object.keys(this.sources).forEach(sourceName =>
generator.setSourceContent(sourceName, this.sources[sourceName])
);
return generator.toString();
}
}
module.exports = SourceMap;

138
VISUALIZACION/node_modules/parcel-bundler/src/Watcher.js generated vendored Executable file
View file

@ -0,0 +1,138 @@
const {FSWatcher} = require('chokidar');
const Path = require('path');
/**
* This watcher wraps chokidar so that we watch directories rather than individual files.
* This prevents us from hitting EMFILE errors when running out of file descriptors.
*/
class Watcher {
constructor() {
// FS events on macOS are flakey in the tests, which write lots of files very quickly
// See https://github.com/paulmillr/chokidar/issues/612
this.shouldWatchDirs = process.env.NODE_ENV !== 'test';
this.watcher = new FSWatcher({
useFsEvents: this.shouldWatchDirs,
ignoreInitial: true,
ignored: /\.cache|\.git/
});
this.watchedDirectories = new Map();
// Only close the watcher after the ready event is emitted
this.ready = false;
this.stopped = false;
this.watcher.once('ready', () => {
this.ready = true;
if (this.stopped) {
this.watcher.close();
}
});
}
/**
* Find a parent directory of `path` which is already watched
*/
getWatchedParent(path) {
path = Path.dirname(path);
let root = Path.parse(path).root;
while (path !== root) {
if (this.watchedDirectories.has(path)) {
return path;
}
path = Path.dirname(path);
}
return null;
}
/**
* Find a list of child directories of `path` which are already watched
*/
getWatchedChildren(path) {
path = Path.dirname(path) + Path.sep;
let res = [];
for (let dir of this.watchedDirectories.keys()) {
if (dir.startsWith(path)) {
res.push(dir);
}
}
return res;
}
/**
* Add a path to the watcher
*/
watch(path) {
if (this.shouldWatchDirs) {
// If there is no parent directory already watching this path, add a new watcher.
let parent = this.getWatchedParent(path);
if (!parent) {
// Find watchers on child directories, and remove them. They will be handled by the new parent watcher.
let children = this.getWatchedChildren(path);
let count = 1;
for (let dir of children) {
count += this.watchedDirectories.get(dir);
this.watcher._closePath(dir);
this.watchedDirectories.delete(dir);
}
let dir = Path.dirname(path);
this.watcher.add(dir);
this.watchedDirectories.set(dir, count);
} else {
// Otherwise, increment the reference count of the parent watcher.
this.watchedDirectories.set(
parent,
this.watchedDirectories.get(parent) + 1
);
}
} else {
this.watcher.add(path);
}
}
/**
* Remove a path from the watcher
*/
unwatch(path) {
if (this.shouldWatchDirs) {
let dir = this.getWatchedParent(path);
if (dir) {
// When the count of files watching a directory reaches zero, unwatch it.
let count = this.watchedDirectories.get(dir) - 1;
if (count === 0) {
this.watchedDirectories.delete(dir);
this.watcher.unwatch(dir);
} else {
this.watchedDirectories.set(dir, count);
}
}
} else {
this.watcher.unwatch(path);
}
}
/**
* Add an event handler
*/
on(event, callback) {
this.watcher.on(event, callback);
}
/**
* Stop watching all paths
*/
stop() {
this.stopped = true;
if (this.ready) {
this.watcher.close();
}
}
}
module.exports = Watcher;

View file

@ -0,0 +1,176 @@
const Asset = require('../Asset');
const postcss = require('postcss');
const valueParser = require('postcss-value-parser');
const postcssTransform = require('../transforms/postcss');
const CssSyntaxError = require('postcss/lib/css-syntax-error');
const URL_RE = /url\s*\("?(?![a-z]+:)/;
const IMPORT_RE = /@import/;
const PROTOCOL_RE = /^[a-z]+:/;
class CSSAsset extends Asset {
constructor(name, pkg, options) {
super(name, pkg, options);
this.type = 'css';
}
mightHaveDependencies() {
return (
!/\.css$/.test(this.name) ||
IMPORT_RE.test(this.contents) ||
URL_RE.test(this.contents)
);
}
parse(code) {
let root = postcss.parse(code, {from: this.name, to: this.name});
return new CSSAst(code, root);
}
collectDependencies() {
this.ast.root.walkAtRules('import', rule => {
let params = valueParser(rule.params).nodes;
let [name, ...media] = params;
let dep;
if (name.type === 'string') {
dep = name.value;
} else if (
name.type === 'function' &&
name.value === 'url' &&
name.nodes.length
) {
dep = name.nodes[0].value;
}
if (!dep) {
throw new Error('Could not find import name for ' + rule);
}
if (PROTOCOL_RE.test(dep)) {
return;
}
media = valueParser.stringify(media).trim();
this.addDependency(dep, {media, loc: rule.source.start});
rule.remove();
this.ast.dirty = true;
});
this.ast.root.walkDecls(decl => {
if (URL_RE.test(decl.value)) {
let parsed = valueParser(decl.value);
let dirty = false;
parsed.walk(node => {
if (
node.type === 'function' &&
node.value === 'url' &&
node.nodes.length
) {
let url = this.addURLDependency(node.nodes[0].value, {
loc: decl.source.start
});
dirty = node.nodes[0].value !== url;
node.nodes[0].value = url;
}
});
if (dirty) {
decl.value = parsed.toString();
this.ast.dirty = true;
}
}
});
}
async transform() {
await postcssTransform(this);
}
getCSSAst() {
// Converts the ast to a CSS ast if needed, so we can apply postcss transforms.
if (!(this.ast instanceof CSSAst)) {
this.ast = CSSAsset.prototype.parse.call(this, this.ast.render());
}
return this.ast.root;
}
generate() {
let css = this.ast ? this.ast.render() : this.contents;
let js = '';
if (this.options.hmr) {
this.addDependency('_css_loader');
js = `
var reloadCSS = require('_css_loader');
module.hot.dispose(reloadCSS);
module.hot.accept(reloadCSS);
`;
}
if (this.cssModules) {
js +=
'module.exports = ' + JSON.stringify(this.cssModules, false, 2) + ';';
}
return [
{
type: 'css',
value: css,
cssModules: this.cssModules
},
{
type: 'js',
value: js,
final: true
}
];
}
generateErrorMessage(err) {
// Wrap the error in a CssSyntaxError if needed so we can generate a code frame
if (err.loc && !err.showSourceCode) {
err = new CssSyntaxError(
err.message,
err.loc.line,
err.loc.column,
this.contents
);
}
err.message = err.reason || err.message;
err.loc = {
line: err.line,
column: err.column
};
if (err.showSourceCode) {
err.codeFrame = err.showSourceCode();
err.highlightedCodeFrame = err.showSourceCode(true);
}
return err;
}
}
class CSSAst {
constructor(css, root) {
this.css = css;
this.root = root;
this.dirty = false;
}
render() {
if (this.dirty) {
this.css = '';
postcss.stringify(this.root, c => (this.css += c));
}
return this.css;
}
}
module.exports = CSSAsset;

View file

@ -0,0 +1,36 @@
const Asset = require('../Asset');
const localRequire = require('../utils/localRequire');
class CoffeeScriptAsset extends Asset {
constructor(name, pkg, options) {
super(name, pkg, options);
this.type = 'js';
}
async generate() {
// require coffeescript, installed locally in the app
let coffee = await localRequire('coffeescript', this.name);
// Transpile Module using CoffeeScript and parse result as ast format through babylon
let transpiled = coffee.compile(this.contents, {
sourceMap: this.options.sourceMaps
});
let sourceMap;
if (transpiled.sourceMap) {
sourceMap = transpiled.sourceMap.generate();
sourceMap.sources = [this.relativeName];
sourceMap.sourcesContent = [this.contents];
}
return [
{
type: 'js',
value: this.options.sourceMaps ? transpiled.js : transpiled,
sourceMap
}
];
}
}
module.exports = CoffeeScriptAsset;

View file

@ -0,0 +1,62 @@
const Asset = require('../Asset');
const localRequire = require('../utils/localRequire');
const path = require('path');
const promisify = require('../utils/promisify');
const Resolver = require('../Resolver');
class GLSLAsset extends Asset {
constructor(name, pkg, options) {
super(name, pkg, options);
this.type = 'js';
}
async parse() {
const glslifyDeps = await localRequire('glslify-deps', this.name);
// Use the Parcel resolver rather than the default glslify one.
// This adds support for parcel features like alises, and tilde paths.
const resolver = new Resolver({
extensions: ['.glsl', '.vert', '.frag'],
rootDir: this.options.rootDir
});
// Parse and collect dependencies with glslify-deps
let cwd = path.dirname(this.name);
let depper = glslifyDeps({
cwd,
resolve: async (target, opts, next) => {
try {
let res = await resolver.resolve(
target,
path.join(opts.basedir, 'index')
);
next(null, res.path);
} catch (err) {
next(err);
}
}
});
return await promisify(depper.inline.bind(depper))(this.contents, cwd);
}
collectDependencies() {
for (let dep of this.ast) {
if (!dep.entry) {
this.addDependency(dep.file, {includedInParent: true});
}
}
}
async generate() {
// Generate the bundled glsl file
const glslifyBundle = await localRequire('glslify-bundle', this.name);
let glsl = glslifyBundle(this.ast);
return {
js: `module.exports=${JSON.stringify(glsl)};`
};
}
}
module.exports = GLSLAsset;

View file

@ -0,0 +1,84 @@
const Asset = require('../Asset');
const glob = require('glob');
const micromatch = require('micromatch');
const path = require('path');
class GlobAsset extends Asset {
constructor(name, pkg, options) {
super(name, pkg, options);
this.type = null; // allows this asset to be included in any type bundle
}
async load() {
let regularExpressionSafeName = this.name;
if (process.platform === 'win32')
regularExpressionSafeName = regularExpressionSafeName.replace(/\\/g, '/');
let files = glob.sync(regularExpressionSafeName, {
strict: true,
nodir: true
});
let re = micromatch.makeRe(regularExpressionSafeName, {capture: true});
let matches = {};
for (let file of files) {
let match = file.match(re);
let parts = match
.slice(1)
.filter(Boolean)
.reduce((a, p) => a.concat(p.split('/')), []);
let relative =
'./' + path.relative(path.dirname(this.name), file.normalize('NFC'));
set(matches, parts, relative);
this.addDependency(relative);
}
return matches;
}
generate() {
return {
js: 'module.exports = ' + generate(this.contents) + ';'
};
}
}
function generate(matches, indent = '') {
if (typeof matches === 'string') {
return `require(${JSON.stringify(matches)})`;
}
let res = indent + '{';
let first = true;
for (let key in matches) {
if (!first) {
res += ',';
}
res += `\n${indent} ${JSON.stringify(key)}: ${generate(
matches[key],
indent + ' '
)}`;
first = false;
}
res += '\n' + indent + '}';
return res;
}
function set(obj, path, value) {
for (let i = 0; i < path.length - 1; i++) {
let part = path[i];
if (obj[part] == null) {
obj[part] = {};
}
obj = obj[part];
}
obj[path[path.length - 1]] = value;
}
module.exports = GlobAsset;

View file

@ -0,0 +1,22 @@
const Asset = require('../Asset');
const localRequire = require('../utils/localRequire');
class GraphqlAsset extends Asset {
constructor(name, pkg, options) {
super(name, pkg, options);
this.type = 'js';
}
async parse(code) {
let gql = await localRequire('graphql-tag', this.name);
return gql(code);
}
generate() {
return {
js: `module.exports=${JSON.stringify(this.ast, false, 2)};`
};
}
}
module.exports = GraphqlAsset;

View file

@ -0,0 +1,168 @@
const Asset = require('../Asset');
const parse = require('posthtml-parser');
const api = require('posthtml/lib/api');
const urlJoin = require('../utils/urlJoin');
const render = require('posthtml-render');
const posthtmlTransform = require('../transforms/posthtml');
const htmlnanoTransform = require('../transforms/htmlnano');
const isURL = require('../utils/is-url');
// A list of all attributes that may produce a dependency
// Based on https://developer.mozilla.org/en-US/docs/Web/HTML/Attributes
const ATTRS = {
src: [
'script',
'img',
'audio',
'video',
'source',
'track',
'iframe',
'embed'
],
href: ['link', 'a', 'use'],
srcset: ['img', 'source'],
poster: ['video'],
'xlink:href': ['use'],
content: ['meta'],
data: ['object']
};
// A list of metadata that should produce a dependency
// Based on:
// - http://schema.org/
// - http://ogp.me
// - https://developer.twitter.com/en/docs/tweets/optimize-with-cards/overview/markup
// - https://msdn.microsoft.com/en-us/library/dn255024.aspx
const META = {
property: [
'og:image',
'og:image:url',
'og:image:secure_url',
'og:audio',
'og:audio:secure_url',
'og:video',
'og:video:secure_url'
],
name: [
'twitter:image',
'msapplication-square150x150logo',
'msapplication-square310x310logo',
'msapplication-square70x70logo',
'msapplication-wide310x150logo',
'msapplication-TileImage'
],
itemprop: [
'image',
'logo',
'screenshot',
'thumbnailUrl',
'contentUrl',
'downloadUrl'
]
};
// Options to be passed to `addURLDependency` for certain tags + attributes
const OPTIONS = {
a: {
href: {entry: true}
},
iframe: {
src: {entry: true}
}
};
class HTMLAsset extends Asset {
constructor(name, pkg, options) {
super(name, pkg, options);
this.type = 'html';
this.isAstDirty = false;
}
parse(code) {
let res = parse(code, {lowerCaseAttributeNames: true});
res.walk = api.walk;
res.match = api.match;
return res;
}
processSingleDependency(path, opts) {
let assetPath = this.addURLDependency(path, opts);
if (!isURL(assetPath)) {
assetPath = urlJoin(this.options.publicURL, assetPath);
}
return assetPath;
}
collectSrcSetDependencies(srcset, opts) {
const newSources = [];
for (const source of srcset.split(',')) {
const pair = source.trim().split(' ');
if (pair.length === 0) continue;
pair[0] = this.processSingleDependency(pair[0], opts);
newSources.push(pair.join(' '));
}
return newSources.join(',');
}
getAttrDepHandler(attr) {
if (attr === 'srcset') {
return this.collectSrcSetDependencies;
}
return this.processSingleDependency;
}
collectDependencies() {
this.ast.walk(node => {
if (node.attrs) {
if (node.tag === 'meta') {
if (
!Object.keys(node.attrs).some(attr => {
let values = META[attr];
return values && values.includes(node.attrs[attr]);
})
) {
return node;
}
}
for (let attr in node.attrs) {
let elements = ATTRS[attr];
// Check for virtual paths
if (node.tag === 'a' && node.attrs[attr].lastIndexOf('.') < 1) {
continue;
}
if (elements && elements.includes(node.tag)) {
let depHandler = this.getAttrDepHandler(attr);
let options = OPTIONS[node.tag];
node.attrs[attr] = depHandler.call(
this,
node.attrs[attr],
options && options[attr]
);
this.isAstDirty = true;
}
}
}
return node;
});
}
async pretransform() {
await posthtmlTransform(this);
}
async transform() {
if (this.options.minify) {
await htmlnanoTransform(this);
}
}
generate() {
return this.isAstDirty ? render(this.ast) : this.contents;
}
}
module.exports = HTMLAsset;

View file

@ -0,0 +1,200 @@
const {File: BabelFile} = require('babel-core');
const traverse = require('babel-traverse').default;
const codeFrame = require('babel-code-frame');
const collectDependencies = require('../visitors/dependencies');
const walk = require('babylon-walk');
const Asset = require('../Asset');
const babylon = require('babylon');
const insertGlobals = require('../visitors/globals');
const fsVisitor = require('../visitors/fs');
const envVisitor = require('../visitors/env');
const babel = require('../transforms/babel');
const generate = require('babel-generator').default;
const uglify = require('../transforms/uglify');
const SourceMap = require('../SourceMap');
const IMPORT_RE = /\b(?:import\b|export\b|require\s*\()/;
const ENV_RE = /\b(?:process\.env)\b/;
const GLOBAL_RE = /\b(?:process|__dirname|__filename|global|Buffer|define)\b/;
const FS_RE = /\breadFileSync\b/;
const SW_RE = /\bnavigator\s*\.\s*serviceWorker\s*\.\s*register\s*\(/;
const WORKER_RE = /\bnew\s*Worker\s*\(/;
class JSAsset extends Asset {
constructor(name, pkg, options) {
super(name, pkg, options);
this.type = 'js';
this.globals = new Map();
this.isAstDirty = false;
this.isES6Module = false;
this.outputCode = null;
this.cacheData.env = {};
this.sourceMap = options.rendition ? options.rendition.sourceMap : null;
}
shouldInvalidate(cacheData) {
for (let key in cacheData.env) {
if (cacheData.env[key] !== process.env[key]) {
return true;
}
}
return false;
}
mightHaveDependencies() {
return (
this.isAstDirty ||
!/.js$/.test(this.name) ||
IMPORT_RE.test(this.contents) ||
GLOBAL_RE.test(this.contents) ||
SW_RE.test(this.contents) ||
WORKER_RE.test(this.contents)
);
}
async getParserOptions() {
// Babylon options. We enable a few plugins by default.
const options = {
filename: this.name,
allowReturnOutsideFunction: true,
allowHashBang: true,
ecmaVersion: Infinity,
strictMode: false,
sourceType: 'module',
locations: true,
plugins: ['exportExtensions', 'dynamicImport']
};
// Check if there is a babel config file. If so, determine which parser plugins to enable
this.babelConfig = await babel.getConfig(this);
if (this.babelConfig) {
const file = new BabelFile(this.babelConfig);
options.plugins.push(...file.parserOpts.plugins);
}
return options;
}
async parse(code) {
const options = await this.getParserOptions();
return babylon.parse(code, options);
}
traverse(visitor) {
return traverse(this.ast, visitor, null, this);
}
traverseFast(visitor) {
return walk.simple(this.ast, visitor, this);
}
collectDependencies() {
walk.ancestor(this.ast, collectDependencies, this);
}
async pretransform() {
await babel(this);
// Inline environment variables
if (ENV_RE.test(this.contents)) {
await this.parseIfNeeded();
this.traverseFast(envVisitor);
}
}
async transform() {
if (this.options.target === 'browser') {
if (this.dependencies.has('fs') && FS_RE.test(this.contents)) {
await this.parseIfNeeded();
this.traverse(fsVisitor);
}
if (GLOBAL_RE.test(this.contents)) {
await this.parseIfNeeded();
walk.ancestor(this.ast, insertGlobals, this);
}
}
if (this.isES6Module) {
await babel(this);
}
if (this.options.minify) {
await uglify(this);
}
}
async generate() {
let code;
if (this.isAstDirty) {
let opts = {
sourceMaps: this.options.sourceMaps,
sourceFileName: this.relativeName
};
let generated = generate(this.ast, opts, this.contents);
if (this.options.sourceMaps && generated.rawMappings) {
let rawMap = new SourceMap(generated.rawMappings, {
[this.relativeName]: this.contents
});
// Check if we already have a source map (e.g. from TypeScript or CoffeeScript)
// In that case, we need to map the original source map to the babel generated one.
if (this.sourceMap) {
this.sourceMap = await new SourceMap().extendSourceMap(
this.sourceMap,
rawMap
);
} else {
this.sourceMap = rawMap;
}
}
code = generated.code;
} else {
code = this.outputCode || this.contents;
}
if (this.options.sourceMaps && !this.sourceMap) {
this.sourceMap = new SourceMap().generateEmptyMap(
this.relativeName,
this.contents
);
}
if (this.globals.size > 0) {
code = Array.from(this.globals.values()).join('\n') + '\n' + code;
if (this.options.sourceMaps) {
if (!(this.sourceMap instanceof SourceMap)) {
this.sourceMap = await new SourceMap().addMap(this.sourceMap);
}
this.sourceMap.offset(this.globals.size);
}
}
return {
js: code,
map: this.sourceMap
};
}
generateErrorMessage(err) {
const loc = err.loc;
if (loc) {
err.codeFrame = codeFrame(this.contents, loc.line, loc.column + 1);
err.highlightedCodeFrame = codeFrame(
this.contents,
loc.line,
loc.column + 1,
{highlightCode: true}
);
}
return err;
}
}
module.exports = JSAsset;

View file

@ -0,0 +1,36 @@
const Asset = require('../Asset');
const path = require('path');
const json5 = require('json5');
const {minify} = require('uglify-es');
class JSONAsset extends Asset {
constructor(name, pkg, options) {
super(name, pkg, options);
this.type = 'js';
}
parse(code) {
return path.extname(this.name) === '.json5' ? json5.parse(code) : null;
}
generate() {
let code = `module.exports = ${
this.ast ? JSON.stringify(this.ast, null, 2) : this.contents
};`;
if (this.options.minify) {
let minified = minify(code);
if (minified.error) {
throw minified.error;
}
code = minified.code;
}
return {
js: code
};
}
}
module.exports = JSONAsset;

View file

@ -0,0 +1,62 @@
const Asset = require('../Asset');
const localRequire = require('../utils/localRequire');
const promisify = require('../utils/promisify');
class LESSAsset extends Asset {
constructor(name, pkg, options) {
super(name, pkg, options);
this.type = 'css';
}
async parse(code) {
// less should be installed locally in the module that's being required
let less = await localRequire('less', this.name);
let render = promisify(less.render.bind(less));
let opts = Object.assign(
{},
this.package.less || (await this.getConfig(['.lessrc', '.lessrc.js']))
);
opts.filename = this.name;
opts.plugins = (opts.plugins || []).concat(urlPlugin(this));
return await render(code, opts);
}
collectDependencies() {
for (let dep of this.ast.imports) {
this.addDependency(dep, {includedInParent: true});
}
}
generate() {
return [
{
type: 'css',
value: this.ast ? this.ast.css : '',
hasDependencies: false
}
];
}
}
function urlPlugin(asset) {
return {
install: (less, pluginManager) => {
let visitor = new less.visitors.Visitor({
visitUrl: node => {
node.value.value = asset.addURLDependency(
node.value.value,
node.currentFileInfo.filename
);
return node;
}
});
visitor.run = visitor.visit;
pluginManager.addVisitor(visitor);
}
};
}
module.exports = LESSAsset;

View file

@ -0,0 +1,39 @@
const path = require('path');
const Asset = require('../Asset');
const localRequire = require('../utils/localRequire');
class PugAsset extends Asset {
constructor(name, pkg, options) {
super(name, pkg, options);
this.type = 'html';
}
async generate() {
const pug = await localRequire('pug', this.name);
const config =
(await this.getConfig(['.pugrc', '.pugrc.js', 'pug.config.js'])) || {};
const compiled = pug.compile(this.contents, {
compileDebug: false,
filename: this.name,
basedir: path.dirname(this.name),
pretty: !this.options.minify,
templateName: path.basename(this.basename, path.extname(this.basename)),
filters: config.filters,
filterOptions: config.filterOptions,
filterAliases: config.filterAliases
});
if (compiled.dependencies) {
for (let item of compiled.dependencies) {
this.addDependency(item, {
includedInParent: true
});
}
}
return compiled();
}
}
module.exports = PugAsset;

View file

@ -0,0 +1,31 @@
const Asset = require('../Asset');
const urlJoin = require('../utils/urlJoin');
const md5 = require('../utils/md5');
class RawAsset extends Asset {
// Don't load raw assets. They will be copied by the RawPackager directly.
load() {}
generate() {
// Don't return a URL to the JS bundle if there is a bundle loader defined for this asset type.
// This will cause the actual asset to be automatically preloaded prior to the JS bundle running.
if (this.options.bundleLoaders[this.type]) {
return {};
}
const pathToAsset = urlJoin(
this.options.publicURL,
this.generateBundleName()
);
return {
js: `module.exports=${JSON.stringify(pathToAsset)};`
};
}
async generateHash() {
return await md5.file(this.name);
}
}
module.exports = RawAsset;

View file

@ -0,0 +1,30 @@
const Asset = require('../Asset');
const fs = require('../utils/fs');
const localRequire = require('../utils/localRequire');
class ReasonAsset extends Asset {
constructor(name, pkg, options) {
super(name, pkg, options);
this.type = 'js';
}
async generate() {
const bsb = await localRequire('bsb-js', this.name);
// This runs BuckleScript - the Reason to JS compiler.
// Other Asset types use `localRequire` but the `bsb-js` package already
// does that internally. This should also take care of error handling in
// the Reason compilation process.
if (process.env.NODE_ENV !== 'test') {
await bsb.runBuild();
}
// This is a simplified use-case for Reason - it only loads the recommended
// BuckleScript configuration to simplify the file processing.
const outputFile = this.name.replace(/\.(re|ml)$/, '.bs.js');
const outputContent = await fs.readFile(outputFile);
return outputContent.toString();
}
}
module.exports = ReasonAsset;

View file

@ -0,0 +1,215 @@
const path = require('path');
const commandExists = require('command-exists');
const childProcess = require('child_process');
const promisify = require('../utils/promisify');
const exec = promisify(childProcess.execFile);
const tomlify = require('tomlify-j0.4');
const fs = require('../utils/fs');
const Asset = require('../Asset');
const config = require('../utils/config');
const pipeSpawn = require('../utils/pipeSpawn');
const md5 = require('../utils/md5');
const RUST_TARGET = 'wasm32-unknown-unknown';
const MAIN_FILES = ['src/lib.rs', 'src/main.rs'];
// Track installation status so we don't need to check more than once
let rustInstalled = false;
let wasmGCInstalled = false;
class RustAsset extends Asset {
constructor(name, pkg, options) {
super(name, pkg, options);
this.type = 'wasm';
}
process() {
// We don't want to process this asset if the worker is in a warm up phase
// since the asset will also be processed by the main process, which
// may cause errors since rust writes to the filesystem.
if (this.options.isWarmUp) {
return;
}
return super.process();
}
async parse() {
// Install rust toolchain and target if needed
await this.installRust();
// See if there is a Cargo config in the project
let cargoConfig = await this.getConfig(['Cargo.toml']);
let cargoDir;
let isMainFile = false;
if (cargoConfig) {
const mainFiles = MAIN_FILES.slice();
if (cargoConfig.lib && cargoConfig.lib.path) {
mainFiles.push(cargoConfig.lib.path);
}
cargoDir = path.dirname(await config.resolve(this.name, ['Cargo.toml']));
isMainFile = mainFiles.some(
file => path.join(cargoDir, file) === this.name
);
}
// If this is the main file of a Cargo build, use the cargo command to compile.
// Otherwise, use rustc directly.
if (isMainFile) {
await this.cargoBuild(cargoConfig, cargoDir);
} else {
await this.rustcBuild();
}
// If this is a prod build, use wasm-gc to remove unused code
if (this.options.minify) {
await this.installWasmGC();
await exec('wasm-gc', [this.wasmPath, this.wasmPath]);
}
}
async installRust() {
if (rustInstalled) {
return;
}
// Check for rustup
try {
await commandExists('rustup');
} catch (e) {
throw new Error(
"Rust isn't installed. Visit https://www.rustup.rs/ for more info"
);
}
// Ensure nightly toolchain is installed
let [stdout] = await exec('rustup', ['show']);
if (!stdout.includes('nightly')) {
await pipeSpawn('rustup', ['update']);
await pipeSpawn('rustup', ['toolchain', 'install', 'nightly']);
}
// Ensure wasm target is installed
[stdout] = await exec('rustup', [
'target',
'list',
'--toolchain',
'nightly'
]);
if (!stdout.includes(RUST_TARGET + ' (installed)')) {
await pipeSpawn('rustup', [
'target',
'add',
RUST_TARGET,
'--toolchain',
'nightly'
]);
}
rustInstalled = true;
}
async installWasmGC() {
if (wasmGCInstalled) {
return;
}
try {
await commandExists('wasm-gc');
} catch (e) {
await pipeSpawn('cargo', [
'install',
'--git',
'https://github.com/alexcrichton/wasm-gc'
]);
}
wasmGCInstalled = true;
}
async cargoBuild(cargoConfig, cargoDir) {
// Ensure the cargo config has cdylib as the crate-type
if (!cargoConfig.lib) {
cargoConfig.lib = {};
}
if (!Array.isArray(cargoConfig.lib['crate-type'])) {
cargoConfig.lib['crate-type'] = [];
}
if (!cargoConfig.lib['crate-type'].includes('cdylib')) {
cargoConfig.lib['crate-type'].push('cdylib');
await fs.writeFile(
path.join(cargoDir, 'Cargo.toml'),
tomlify.toToml(cargoConfig)
);
}
// Run cargo
let args = ['+nightly', 'build', '--target', RUST_TARGET, '--release'];
await exec('cargo', args, {cwd: cargoDir});
// Get output file paths
let outDir = path.join(cargoDir, 'target', RUST_TARGET, 'release');
// Rust converts '-' to '_' when outputting files.
let rustName = cargoConfig.package.name.replace(/-/g, '_');
this.wasmPath = path.join(outDir, rustName + '.wasm');
this.depsPath = path.join(outDir, rustName + '.d');
}
async rustcBuild() {
// Get output filename
await fs.mkdirp(this.options.cacheDir);
let name = md5(this.name);
this.wasmPath = path.join(this.options.cacheDir, name + '.wasm');
// Run rustc to compile the code
const args = [
'+nightly',
'--target',
RUST_TARGET,
'-O',
'--crate-type=cdylib',
this.name,
'-o',
this.wasmPath
];
await exec('rustc', args);
// Run again to collect dependencies
this.depsPath = path.join(this.options.cacheDir, name + '.d');
await exec('rustc', [this.name, '--emit=dep-info', '-o', this.depsPath]);
}
async collectDependencies() {
// Read deps file
let contents = await fs.readFile(this.depsPath, 'utf8');
let dir = path.dirname(this.name);
let deps = contents
.split('\n')
.filter(Boolean)
.slice(1);
for (let dep of deps) {
dep = path.resolve(dir, dep.slice(0, dep.indexOf(':')));
if (dep !== this.name) {
this.addDependency(dep, {includedInParent: true});
}
}
}
async generate() {
return {
wasm: {
path: this.wasmPath, // pass output path to RawPackager
mtime: Date.now() // force re-bundling since otherwise the hash would never change
}
};
}
}
module.exports = RustAsset;

View file

@ -0,0 +1,78 @@
const Asset = require('../Asset');
const localRequire = require('../utils/localRequire');
const promisify = require('../utils/promisify');
const path = require('path');
const os = require('os');
const Resolver = require('../Resolver');
const syncPromise = require('../utils/syncPromise');
class SASSAsset extends Asset {
constructor(name, pkg, options) {
super(name, pkg, options);
this.type = 'css';
}
async parse(code) {
// node-sass should be installed locally in the module that's being required
let sass = await localRequire('node-sass', this.name);
let render = promisify(sass.render.bind(sass));
const resolver = new Resolver({
extensions: ['.scss', '.sass'],
rootDir: this.options.rootDir
});
let opts = Object.assign(
{},
this.package.sass || (await this.getConfig(['.sassrc', '.sassrc.js']))
);
opts.includePaths = (opts.includePaths || []).concat(
path.dirname(this.name)
);
opts.data = opts.data ? opts.data + os.EOL + code : code;
opts.indentedSyntax =
typeof opts.indentedSyntax === 'boolean'
? opts.indentedSyntax
: path.extname(this.name).toLowerCase() === '.sass';
opts.functions = Object.assign({}, opts.functions, {
url: node => {
let filename = this.addURLDependency(node.getValue());
return new sass.types.String(`url(${JSON.stringify(filename)})`);
}
});
opts.importer = (url, prev, done) => {
let resolved;
try {
resolved = syncPromise(
resolver.resolve(url, prev === 'stdin' ? this.name : prev)
).path;
} catch (e) {
resolved = url;
}
return done({
file: resolved
});
};
return await render(opts);
}
collectDependencies() {
for (let dep of this.ast.stats.includedFiles) {
this.addDependency(dep, {includedInParent: true});
}
}
generate() {
return [
{
type: 'css',
value: this.ast ? this.ast.css.toString() : '',
hasDependencies: false
}
];
}
}
module.exports = SASSAsset;

View file

@ -0,0 +1,116 @@
// const CSSAsset = require('./CSSAsset');
const Asset = require('../Asset');
const localRequire = require('../utils/localRequire');
const Resolver = require('../Resolver');
const syncPromise = require('../utils/syncPromise');
const URL_RE = /^(?:url\s*\(\s*)?['"]?(?:[#/]|(?:https?:)?\/\/)/i;
class StylusAsset extends Asset {
constructor(name, pkg, options) {
super(name, pkg, options);
this.type = 'css';
}
async parse(code) {
// stylus should be installed locally in the module that's being required
let stylus = await localRequire('stylus', this.name);
let opts =
this.package.stylus ||
(await this.getConfig(['.stylusrc', '.stylusrc.js']));
let style = stylus(code, opts);
style.set('filename', this.name);
style.set('include css', true);
style.set('Evaluator', await createEvaluator(this));
// Setup a handler for the URL function so we add dependencies for linked assets.
style.define('url', node => {
let filename = this.addURLDependency(node.val, node.filename);
return new stylus.nodes.Literal(`url(${JSON.stringify(filename)})`);
});
return style;
}
generate() {
return [
{
type: 'css',
value: this.ast.render(),
hasDependencies: false
}
];
}
generateErrorMessage(err) {
let index = err.message.indexOf('\n');
err.codeFrame = err.message.slice(index + 1);
err.message = err.message.slice(0, index);
return err;
}
}
async function createEvaluator(asset) {
const Evaluator = await localRequire(
'stylus/lib/visitor/evaluator',
asset.name
);
const utils = await localRequire('stylus/lib/utils', asset.name);
const resolver = new Resolver(
Object.assign({}, asset.options, {
extensions: ['.styl', '.css']
})
);
// This is a custom stylus evaluator that extends stylus with support for the node
// require resolution algorithm. It also adds all dependencies to the parcel asset
// tree so the file watcher works correctly, etc.
class CustomEvaluator extends Evaluator {
visitImport(imported) {
let node = this.visit(imported.path).first;
let path = node.string;
if (node.name !== 'url' && path && !URL_RE.test(path)) {
try {
// First try resolving using the node require resolution algorithm.
// This allows stylus files in node_modules to be resolved properly.
// If we find something, update the AST so stylus gets the absolute path to load later.
node.string = syncPromise(
resolver.resolve(path, imported.filename)
).path;
asset.addDependency(node.string, {includedInParent: true});
} catch (err) {
// If we couldn't resolve, try the normal stylus resolver.
// We just need to do this to keep track of the dependencies - stylus does the real work.
// support optional .styl
if (!/\.styl$/i.test(path)) {
path += '.styl';
}
let found = utils.find(path, this.paths, this.filename);
if (!found) {
found = utils.lookupIndex(node.string, this.paths, this.filename);
}
if (!found) {
let nodeName = imported.once ? 'require' : 'import';
throw new Error(
'failed to locate @' + nodeName + ' file ' + node.string
);
}
for (let file of found) {
asset.addDependency(file, {includedInParent: true});
}
}
}
// Done. Let stylus do its thing.
return super.visitImport(imported);
}
}
return CustomEvaluator;
}
module.exports = StylusAsset;

View file

@ -0,0 +1,22 @@
const Asset = require('../Asset');
const toml = require('toml');
const serializeObject = require('../utils/serializeObject');
class TOMLAsset extends Asset {
constructor(name, pkg, options) {
super(name, pkg, options);
this.type = 'js';
}
parse(code) {
return toml.parse(code);
}
generate() {
return {
js: serializeObject(this.ast, this.options.minify)
};
}
}
module.exports = TOMLAsset;

View file

@ -0,0 +1,67 @@
const Asset = require('../Asset');
const localRequire = require('../utils/localRequire');
class TypeScriptAsset extends Asset {
constructor(name, pkg, options) {
super(name, pkg, options);
this.type = 'js';
}
async generate() {
// require typescript, installed locally in the app
let typescript = await localRequire('typescript', this.name);
let transpilerOptions = {
compilerOptions: {
module: typescript.ModuleKind.CommonJS,
jsx: typescript.JsxEmit.Preserve,
// it brings the generated output from TypeScript closer to that generated by Babel
// see https://www.typescriptlang.org/docs/handbook/release-notes/typescript-2-7.html
esModuleInterop: true
},
fileName: this.relativeName
};
let tsconfig = await this.getConfig(['tsconfig.json']);
// Overwrite default if config is found
if (tsconfig) {
transpilerOptions.compilerOptions = Object.assign(
transpilerOptions.compilerOptions,
tsconfig.compilerOptions
);
}
transpilerOptions.compilerOptions.noEmit = false;
transpilerOptions.compilerOptions.sourceMap = this.options.sourceMaps;
// Transpile Module using TypeScript and parse result as ast format through babylon
let transpiled = typescript.transpileModule(
this.contents,
transpilerOptions
);
let sourceMap = transpiled.sourceMapText;
if (sourceMap) {
sourceMap = JSON.parse(sourceMap);
sourceMap.sources = [this.relativeName];
sourceMap.sourcesContent = [this.contents];
// Remove the source map URL
let content = transpiled.outputText;
transpiled.outputText = content.substring(
0,
content.lastIndexOf('//# sourceMappingURL')
);
}
return [
{
type: 'js',
value: transpiled.outputText,
sourceMap
}
];
}
}
module.exports = TypeScriptAsset;

View file

@ -0,0 +1,262 @@
const Asset = require('../Asset');
const localRequire = require('../utils/localRequire');
const md5 = require('../utils/md5');
const {minify} = require('uglify-es');
class VueAsset extends Asset {
constructor(name, pkg, options) {
super(name, pkg, options);
this.type = 'js';
}
async parse(code) {
// Is being used in component-compiler-utils, errors if not installed...
this.vueTemplateCompiler = await localRequire(
'vue-template-compiler',
this.name
);
this.vue = await localRequire('@vue/component-compiler-utils', this.name);
return this.vue.parse({
source: code,
needMap: this.options.sourceMaps,
filename: this.relativeName, // Used for sourcemaps
sourceRoot: '' // Used for sourcemaps. Override so it doesn't use cwd
});
}
async generate() {
let descriptor = this.ast;
let parts = [];
if (descriptor.script) {
parts.push({
type: descriptor.script.lang || 'js',
value: descriptor.script.content,
sourceMap: descriptor.script.map
});
}
if (descriptor.template) {
parts.push({
type: descriptor.template.lang || 'html',
value: descriptor.template.content.trim()
});
}
if (descriptor.styles) {
for (let style of descriptor.styles) {
parts.push({
type: style.lang || 'css',
value: style.content.trim(),
modules: !!style.module
});
}
}
return parts;
}
async postProcess(generated) {
let result = [];
let hasScoped = this.ast.styles.some(s => s.scoped);
let id = md5(this.name).slice(-6);
let scopeId = hasScoped ? `data-v-${id}` : null;
let optsVar = '$' + id;
// Generate JS output.
let js = this.ast.script ? generated[0].value : '';
let supplemental = `
var ${optsVar} = exports.default || module.exports;
if (typeof ${optsVar} === 'function') {
${optsVar} = ${optsVar}.options;
}
`;
supplemental += this.compileTemplate(generated, scopeId, optsVar);
supplemental += this.compileCSSModules(generated, optsVar);
supplemental += this.compileHMR(generated, optsVar);
if (this.options.minify && supplemental) {
let {code, error} = minify(supplemental, {toplevel: true});
if (error) {
throw error;
}
supplemental = code;
}
js += supplemental;
if (js) {
result.push({
type: 'js',
value: js
});
}
let map = generated.find(r => r.type === 'map');
if (map) {
result.push(map);
}
let css = this.compileStyle(generated, scopeId);
if (css) {
result.push({
type: 'css',
value: css
});
}
return result;
}
compileTemplate(generated, scopeId, optsVar) {
let html = generated.find(r => r.type === 'html');
if (html) {
let isFunctional = this.ast.template.attrs.functional;
let template = this.vue.compileTemplate({
source: html.value,
filename: this.relativeName,
compiler: this.vueTemplateCompiler,
isProduction: this.options.production,
isFunctional,
compilerOptions: {
scopeId
}
});
if (Array.isArray(template.errors) && template.errors.length >= 1) {
throw new Error(template.errors[0]);
}
return `
/* template */
Object.assign(${optsVar}, (function () {
${template.code}
return {
render: render,
staticRenderFns: staticRenderFns,
_compiled: true,
_scopeId: ${JSON.stringify(scopeId)},
functional: ${JSON.stringify(isFunctional)}
};
})());
`;
}
return '';
}
compileCSSModules(generated, optsVar) {
let cssRenditions = generated.filter(r => r.type === 'css');
let cssModulesCode = '';
this.ast.styles.forEach((style, index) => {
if (style.module) {
let cssModules = JSON.stringify(cssRenditions[index].cssModules);
let name = style.module === true ? '$style' : style.module;
cssModulesCode += `\nthis[${JSON.stringify(name)}] = ${cssModules};`;
}
});
if (cssModulesCode) {
cssModulesCode = `function hook(){${cssModulesCode}\n}`;
let isFunctional =
this.ast.template && this.ast.template.attrs.functional;
if (isFunctional) {
return `
/* css modules */
(function () {
${cssModulesCode}
${optsVar}._injectStyles = hook;
var originalRender = ${optsVar}.render;
${optsVar}.render = function (h, context) {
hook.call(context);
return originalRender(h, context);
};
})();
`;
} else {
return `
/* css modules */
(function () {
${cssModulesCode}
${optsVar}.beforeCreate = ${optsVar}.beforeCreate ? ${optsVar}.beforeCreate.concat(hook) : [hook];
})();
`;
}
}
return '';
}
compileStyle(generated, scopeId) {
return generated.filter(r => r.type === 'css').reduce((p, r, i) => {
let css = r.value;
let scoped = this.ast.styles[i].scoped;
// Process scoped styles if needed.
if (scoped) {
let {code, errors} = this.vue.compileStyle({
source: css,
filename: this.relativeName,
id: scopeId,
scoped
});
if (errors.length) {
throw errors[0];
}
css = code;
}
return p + css;
}, '');
}
compileHMR(generated, optsVar) {
if (!this.options.hmr) {
return '';
}
this.addDependency('vue-hot-reload-api');
this.addDependency('vue');
let cssHMR = '';
if (this.ast.styles.length) {
cssHMR = `
var reloadCSS = require('_css_loader');
module.hot.dispose(reloadCSS);
module.hot.accept(reloadCSS);
`;
}
let isFunctional = this.ast.template && this.ast.template.attrs.functional;
return `
/* hot reload */
(function () {
if (module.hot) {
var api = require('vue-hot-reload-api');
api.install(require('vue'));
if (api.compatible) {
module.hot.accept();
if (!module.hot.data) {
api.createRecord('${optsVar}', ${optsVar});
} else {
api.${
isFunctional ? 'rerender' : 'reload'
}('${optsVar}', ${optsVar});
}
}
${cssHMR}
}
})();`;
}
}
module.exports = VueAsset;

View file

@ -0,0 +1,38 @@
const Asset = require('../Asset');
class WebManifestAsset extends Asset {
constructor(name, pkg, options) {
super(name, pkg, options);
this.type = 'webmanifest';
}
parse(content) {
return JSON.parse(content);
}
collectDependencies() {
if (Array.isArray(this.ast.icons)) {
for (let icon of this.ast.icons) {
icon.src = this.addURLDependency(icon.src);
}
}
if (Array.isArray(this.ast.screenshots)) {
for (let shot of this.ast.screenshots) {
shot.src = this.addURLDependency(shot.src);
}
}
if (this.ast.serviceworker && this.ast.serviceworker.src) {
this.ast.serviceworker.src = this.addURLDependency(
this.ast.serviceworker.src
);
}
}
generate() {
return JSON.stringify(this.ast);
}
}
module.exports = WebManifestAsset;

View file

@ -0,0 +1,22 @@
const Asset = require('../Asset');
const yaml = require('js-yaml');
const serializeObject = require('../utils/serializeObject');
class YAMLAsset extends Asset {
constructor(name, pkg, options) {
super(name, pkg, options);
this.type = 'js';
}
parse(code) {
return yaml.safeLoad(code);
}
generate() {
return {
js: serializeObject(this.ast, this.options.minify)
};
}
}
module.exports = YAMLAsset;

View file

@ -0,0 +1,12 @@
{
"extends": "../.eslintrc.json",
"parserOptions": {
"ecmaVersion": 5
},
"env": {
"browser": true
},
"rules": {
"no-global-assign": 1
}
}

View file

View file

@ -0,0 +1,83 @@
var getBundleURL = require('./bundle-url').getBundleURL;
function loadBundlesLazy(bundles) {
if (!Array.isArray(bundles)) {
bundles = [bundles]
}
var id = bundles[bundles.length - 1];
try {
return Promise.resolve(require(id));
} catch (err) {
if (err.code === 'MODULE_NOT_FOUND') {
return new LazyPromise(function (resolve, reject) {
loadBundles(bundles)
.then(resolve, reject);
});
}
throw err;
}
}
function loadBundles(bundles) {
var id = bundles[bundles.length - 1];
return Promise.all(bundles.slice(0, -1).map(loadBundle))
.then(function () {
return require(id);
});
}
var bundleLoaders = {};
function registerBundleLoader(type, loader) {
bundleLoaders[type] = loader;
}
module.exports = exports = loadBundlesLazy;
exports.load = loadBundles;
exports.register = registerBundleLoader;
var bundles = {};
function loadBundle(bundle) {
var id;
if (Array.isArray(bundle)) {
id = bundle[1];
bundle = bundle[0];
}
if (bundles[bundle]) {
return bundles[bundle];
}
var type = (bundle.substring(bundle.lastIndexOf('.') + 1, bundle.length) || bundle).toLowerCase();
var bundleLoader = bundleLoaders[type];
if (bundleLoader) {
return bundles[bundle] = bundleLoader(getBundleURL() + bundle)
.then(function (resolved) {
if (resolved) {
module.bundle.modules[id] = [function (require, module) {
module.exports = resolved;
}, {}];
}
return resolved;
});
}
}
function LazyPromise(executor) {
this.executor = executor;
this.promise = null;
}
LazyPromise.prototype.then = function (onSuccess, onError) {
if (this.promise === null) this.promise = new Promise(this.executor)
return this.promise.then(onSuccess, onError)
};
LazyPromise.prototype.catch = function (onError) {
if (this.promise === null) this.promise = new Promise(this.executor)
return this.promise.catch(onError)
};

View file

@ -0,0 +1,29 @@
var bundleURL = null;
function getBundleURLCached() {
if (!bundleURL) {
bundleURL = getBundleURL();
}
return bundleURL;
}
function getBundleURL() {
// Attempt to find the URL of the current script and use that as the base URL
try {
throw new Error;
} catch (err) {
var matches = ('' + err.stack).match(/(https?|file|ftp):\/\/[^)\n]+/g);
if (matches) {
return getBaseURL(matches[0]);
}
}
return '/';
}
function getBaseURL(url) {
return ('' + url).replace(/^((?:https?|file|ftp):\/\/.+)\/[^/]+$/, '$1') + '/';
}
exports.getBundleURL = getBundleURLCached;
exports.getBaseURL = getBaseURL;

View file

@ -0,0 +1,30 @@
var bundle = require('./bundle-url');
function updateLink(link) {
var newLink = link.cloneNode();
newLink.onload = function () {
link.remove();
};
newLink.href = link.href.split('?')[0] + '?' + Date.now();
link.parentNode.insertBefore(newLink, link.nextSibling);
}
var cssTimeout = null;
function reloadCSS() {
if (cssTimeout) {
return;
}
cssTimeout = setTimeout(function () {
var links = document.querySelectorAll('link[rel="stylesheet"]');
for (var i = 0; i < links.length; i++) {
if (bundle.getBaseURL(links[i].href) === bundle.getBundleURL()) {
updateLink(links[i]);
}
}
cssTimeout = null;
}, 50);
}
module.exports = reloadCSS;

View file

@ -0,0 +1,176 @@
var OVERLAY_ID = '__parcel__error__overlay__';
var OldModule = module.bundle.Module;
function Module(moduleName) {
OldModule.call(this, moduleName);
this.hot = {
data: module.bundle.hotData,
_acceptCallbacks: [],
_disposeCallbacks: [],
accept: function (fn) {
this._acceptCallbacks.push(fn || function () {});
},
dispose: function (fn) {
this._disposeCallbacks.push(fn);
}
};
module.bundle.hotData = null;
}
module.bundle.Module = Module;
var parent = module.bundle.parent;
if ((!parent || !parent.isParcelRequire) && typeof WebSocket !== 'undefined') {
var hostname = process.env.HMR_HOSTNAME || location.hostname;
var protocol = location.protocol === 'https:' ? 'wss' : 'ws';
var ws = new WebSocket(protocol + '://' + hostname + ':' + process.env.HMR_PORT + '/');
ws.onmessage = function(event) {
var data = JSON.parse(event.data);
if (data.type === 'update') {
data.assets.forEach(function (asset) {
hmrApply(global.parcelRequire, asset);
});
data.assets.forEach(function (asset) {
if (!asset.isNew) {
hmrAccept(global.parcelRequire, asset.id);
}
});
// Clear the console after HMR
console.clear();
}
if (data.type === 'reload') {
ws.close();
ws.onclose = function () {
location.reload();
}
}
if (data.type === 'error-resolved') {
console.log('[parcel] ✨ Error resolved');
removeErrorOverlay();
}
if (data.type === 'error') {
console.error('[parcel] 🚨 ' + data.error.message + '\n' + data.error.stack);
removeErrorOverlay();
var overlay = createErrorOverlay(data);
document.body.appendChild(overlay);
}
};
}
function removeErrorOverlay() {
var overlay = document.getElementById(OVERLAY_ID);
if (overlay) {
overlay.remove();
}
}
function createErrorOverlay(data) {
var overlay = document.createElement('div');
overlay.id = OVERLAY_ID;
// html encode message and stack trace
var message = document.createElement('div');
var stackTrace = document.createElement('pre');
message.innerText = data.error.message;
stackTrace.innerText = data.error.stack;
overlay.innerHTML = (
'<div style="background: black; font-size: 16px; color: white; position: fixed; height: 100%; width: 100%; top: 0px; left: 0px; padding: 30px; opacity: 0.85; font-family: Menlo, Consolas, monospace; z-index: 9999;">' +
'<span style="background: red; padding: 2px 4px; border-radius: 2px;">ERROR</span>' +
'<span style="top: 2px; margin-left: 5px; position: relative;">🚨</span>' +
'<div style="font-size: 18px; font-weight: bold; margin-top: 20px;">' + message.innerHTML + '</div>' +
'<pre>' + stackTrace.innerHTML + '</pre>' +
'</div>'
);
return overlay;
}
function getParents(bundle, id) {
var modules = bundle.modules;
if (!modules) {
return [];
}
var parents = [];
var k, d, dep;
for (k in modules) {
for (d in modules[k][1]) {
dep = modules[k][1][d];
if (dep === id || (Array.isArray(dep) && dep[dep.length - 1] === id)) {
parents.push(+k);
}
}
}
if (bundle.parent) {
parents = parents.concat(getParents(bundle.parent, id));
}
return parents;
}
function hmrApply(bundle, asset) {
var modules = bundle.modules;
if (!modules) {
return;
}
if (modules[asset.id] || !bundle.parent) {
var fn = new Function('require', 'module', 'exports', asset.generated.js);
asset.isNew = !modules[asset.id];
modules[asset.id] = [fn, asset.deps];
} else if (bundle.parent) {
hmrApply(bundle.parent, asset);
}
}
function hmrAccept(bundle, id) {
var modules = bundle.modules;
if (!modules) {
return;
}
if (!modules[id] && bundle.parent) {
return hmrAccept(bundle.parent, id);
}
var cached = bundle.cache[id];
bundle.hotData = {};
if (cached) {
cached.hot.data = bundle.hotData;
}
if (cached && cached.hot && cached.hot._disposeCallbacks.length) {
cached.hot._disposeCallbacks.forEach(function (cb) {
cb(bundle.hotData);
});
}
delete bundle.cache[id];
bundle(id);
cached = bundle.cache[id];
if (cached && cached.hot && cached.hot._acceptCallbacks.length) {
cached.hot._acceptCallbacks.forEach(function (cb) {
cb();
});
return true;
}
return getParents(global.parcelRequire, id).some(function (id) {
return hmrAccept(global.parcelRequire, id)
});
}

View file

@ -0,0 +1,12 @@
var builtins = require('node-libs-browser');
for (var key in builtins) {
if (builtins[key] == null) {
builtins[key] = require.resolve('./_empty.js');
}
}
builtins['_bundle_loader'] = require.resolve('./bundle-loader.js');
builtins['_css_loader'] = require.resolve('./css-loader.js');
module.exports = builtins;

View file

@ -0,0 +1,18 @@
module.exports = function loadCSSBundle(bundle) {
return new Promise(function (resolve, reject) {
var link = document.createElement('link');
link.rel = 'stylesheet';
link.href = bundle;
link.onerror = function (e) {
link.onerror = link.onload = null;
reject(e);
};
link.onload = function () {
link.onerror = link.onload = null;
resolve();
};
document.getElementsByTagName('head')[0].appendChild(link);
});
};

View file

@ -0,0 +1,20 @@
module.exports = function loadJSBundle(bundle) {
return new Promise(function (resolve, reject) {
var script = document.createElement('script');
script.async = true;
script.type = 'text/javascript';
script.charset = 'utf-8';
script.src = bundle;
script.onerror = function (e) {
script.onerror = script.onload = null;
reject(e);
};
script.onload = function () {
script.onerror = script.onload = null;
resolve();
};
document.getElementsByTagName('head')[0].appendChild(script);
});
};

View file

@ -0,0 +1,16 @@
module.exports = function loadWASMBundle(bundle) {
return fetch(bundle)
.then(function (res) {
if (WebAssembly.instantiateStreaming) {
return WebAssembly.instantiateStreaming(res);
} else {
return res.arrayBuffer()
.then(function (data) {
return WebAssembly.instantiate(data);
});
}
})
.then(function (wasmModule) {
return wasmModule.instance.exports;
});
};

View file

@ -0,0 +1,4 @@
// loading a CSS style is a no-op in Node.js
module.exports = function loadCSSBundle() {
return Promise.resolve();
};

View file

@ -0,0 +1,20 @@
var fs = require('fs');
module.exports = function loadJSBundle(bundle) {
return new Promise(function(resolve, reject) {
fs.readFile(__dirname + bundle, 'utf8', function(err, data) {
if (err) {
reject(err);
} else {
// wait for the next event loop iteration, so we are sure
// the current module is fully loaded
setImmediate(function() {
resolve(data);
});
}
});
})
.then(function(code) {
new Function('', code)();
});
};

View file

@ -0,0 +1,19 @@
var fs = require('fs');
module.exports = function loadWASMBundle(bundle) {
return new Promise(function(resolve, reject) {
fs.readFile(__dirname + bundle, function(err, data) {
if (err) {
reject(err);
} else {
resolve(data.buffer);
}
});
})
.then(function(data) {
return WebAssembly.instantiate(data);
})
.then(function(wasmModule) {
return wasmModule.instance.exports;
});
};

View file

@ -0,0 +1,101 @@
// modules are defined as an array
// [ module function, map of requires ]
//
// map of requires is short require name -> numeric require
//
// anything defined in a previous bundle is accessed via the
// orig method which is the require for previous bundles
// eslint-disable-next-line no-global-assign
parcelRequire = (function (modules, cache, entry, globalName) {
// Save the require from previous bundle to this closure if any
var previousRequire = typeof parcelRequire === 'function' && parcelRequire;
var nodeRequire = typeof require === 'function' && require;
function newRequire(name, jumped) {
if (!cache[name]) {
if (!modules[name]) {
// if we cannot find the module within our internal map or
// cache jump to the current global require ie. the last bundle
// that was added to the page.
var currentRequire = typeof parcelRequire === 'function' && parcelRequire;
if (!jumped && currentRequire) {
return currentRequire(name, true);
}
// If there are other bundles on this page the require from the
// previous one is saved to 'previousRequire'. Repeat this as
// many times as there are bundles until the module is found or
// we exhaust the require chain.
if (previousRequire) {
return previousRequire(name, true);
}
// Try the node require function if it exists.
if (nodeRequire && typeof name === 'string') {
return nodeRequire(name);
}
var err = new Error('Cannot find module \'' + name + '\'');
err.code = 'MODULE_NOT_FOUND';
throw err;
}
localRequire.resolve = resolve;
var module = cache[name] = new newRequire.Module(name);
modules[name][0].call(module.exports, localRequire, module, module.exports, this);
}
return cache[name].exports;
function localRequire(x){
return newRequire(localRequire.resolve(x));
}
function resolve(x){
return modules[name][1][x] || x;
}
}
function Module(moduleName) {
this.id = moduleName;
this.bundle = newRequire;
this.exports = {};
}
newRequire.isParcelRequire = true;
newRequire.Module = Module;
newRequire.modules = modules;
newRequire.cache = cache;
newRequire.parent = previousRequire;
for (var i = 0; i < entry.length; i++) {
newRequire(entry[i]);
}
if (entry.length) {
// Expose entry point to Node, AMD or browser globals
// Based on https://github.com/ForbesLindesay/umd/blob/master/template.js
var mainExports = newRequire(entry[entry.length - 1]);
// CommonJS
if (typeof exports === "object" && typeof module !== "undefined") {
module.exports = mainExports;
// RequireJS
} else if (typeof define === "function" && define.amd) {
define(function () {
return mainExports;
});
// <script>
} else if (globalName) {
this[globalName] = mainExports;
}
}
// Override the current require with this new one
return newRequire;
})

View file

@ -0,0 +1 @@
parcelRequire=function(e,r,n,t){function i(n,t){function o(e){return i(o.resolve(e))}function c(r){return e[n][1][r]||r}if(!r[n]){if(!e[n]){var l="function"==typeof parcelRequire&&parcelRequire;if(!t&&l)return l(n,!0);if(u)return u(n,!0);if(f&&"string"==typeof n)return f(n);var p=new Error("Cannot find module '"+n+"'");throw p.code="MODULE_NOT_FOUND",p}o.resolve=c;var a=r[n]=new i.Module(n);e[n][0].call(a.exports,o,a,a.exports,this)}return r[n].exports}function o(e){this.id=e,this.bundle=i,this.exports={}}var u="function"==typeof parcelRequire&&parcelRequire,f="function"==typeof require&&require;i.isParcelRequire=!0,i.Module=o,i.modules=e,i.cache=r,i.parent=u;for(var c=0;c<n.length;c++)i(n[c]);if(n.length){var l=i(n[n.length-1]);"object"==typeof exports&&"undefined"!=typeof module?module.exports=l:"function"==typeof define&&define.amd?define(function(){return l}):t&&(this[t]=l)}return i};

198
VISUALIZACION/node_modules/parcel-bundler/src/cli.js generated vendored Executable file
View file

@ -0,0 +1,198 @@
require('v8-compile-cache');
const chalk = require('chalk');
const program = require('commander');
const version = require('../package.json').version;
program.version(version);
program
.command('serve [input...]')
.description('starts a development server')
.option(
'-p, --port <port>',
'set the port to serve on. defaults to 1234',
parseInt
)
.option(
'--hmr-port <port>',
'set the port to serve HMR websockets, defaults to random',
parseInt
)
.option(
'--hmr-hostname <hostname>',
'set the hostname of HMR websockets, defaults to location.hostname of current window'
)
.option('--https', 'serves files over HTTPS')
.option('--cert <path>', 'path to certificate to use with HTTPS')
.option('--key <path>', 'path to private key to use with HTTPS')
.option(
'--open [browser]',
'automatically open in specified browser, defaults to default browser'
)
.option(
'-d, --out-dir <path>',
'set the output directory. defaults to "dist"'
)
.option(
'-o, --out-file <filename>',
'set the output filename for the application entry point.'
)
.option(
'--public-url <url>',
'set the public URL to serve on. defaults to the same as the --out-dir option'
)
.option('--global <variable>', 'expose your module through a global variable')
.option('--no-hmr', 'disable hot module replacement')
.option('--no-cache', 'disable the filesystem cache')
.option('--no-source-maps', 'disable sourcemaps')
.option('--no-autoinstall', 'disable autoinstall')
.option(
'-t, --target [target]',
'set the runtime environment, either "node", "browser" or "electron". defaults to "browser"',
/^(node|browser|electron)$/
)
.option('-V, --version', 'output the version number')
.option(
'--log-level <level>',
'set the log level, either "0" (no output), "1" (errors), "2" (warnings + errors) or "3" (all).',
/^([0-3])$/
)
.action(bundle);
program
.command('watch [input...]')
.description('starts the bundler in watch mode')
.option(
'-d, --out-dir <path>',
'set the output directory. defaults to "dist"'
)
.option(
'-o, --out-file <filename>',
'set the output filename for the application entry point.'
)
.option(
'--public-url <url>',
'set the public URL to serve on. defaults to the same as the --out-dir option'
)
.option('--global <variable>', 'expose your module through a global variable')
.option(
'--hmr-port <port>',
'set the port to serve HMR websockets, defaults to random',
parseInt
)
.option(
'--hmr-hostname <hostname>',
'set the hostname of HMR websockets, defaults to location.hostname of current window'
)
.option('--no-hmr', 'disable hot module replacement')
.option('--no-cache', 'disable the filesystem cache')
.option('--no-source-maps', 'disable sourcemaps')
.option('--no-autoinstall', 'disable autoinstall')
.option(
'-t, --target [target]',
'set the runtime environment, either "node", "browser" or "electron". defaults to "browser"',
/^(node|browser|electron)$/
)
.option(
'--log-level <level>',
'set the log level, either "0" (no output), "1" (errors), "2" (warnings + errors) or "3" (all).',
/^([0-3])$/
)
.action(bundle);
program
.command('build [input...]')
.description('bundles for production')
.option(
'-d, --out-dir <path>',
'set the output directory. defaults to "dist"'
)
.option(
'-o, --out-file <filename>',
'set the output filename for the application entry point.'
)
.option(
'--public-url <url>',
'set the public URL to serve on. defaults to the same as the --out-dir option'
)
.option('--global <variable>', 'expose your module through a global variable')
.option('--no-minify', 'disable minification')
.option('--no-cache', 'disable the filesystem cache')
.option('--no-source-maps', 'disable sourcemaps')
.option(
'-t, --target <target>',
'set the runtime environment, either "node", "browser" or "electron". defaults to "browser"',
/^(node|browser|electron)$/
)
.option(
'--detailed-report',
'print a detailed build report after a completed build'
)
.option(
'--log-level <level>',
'set the log level, either "0" (no output), "1" (errors), "2" (warnings + errors) or "3" (all).',
/^([0-3])$/
)
.action(bundle);
program
.command('help [command]')
.description('display help information for a command')
.action(function(command) {
let cmd = program.commands.find(c => c.name() === command) || program;
cmd.help();
});
program.on('--help', function() {
console.log('');
console.log(
' Run `' +
chalk.bold('parcel help <command>') +
'` for more information on specific commands'
);
console.log('');
});
// Make serve the default command except for --help
var args = process.argv;
if (args[2] === '--help' || args[2] === '-h') args[2] = 'help';
if (!args[2] || !program.commands.some(c => c.name() === args[2])) {
args.splice(2, 0, 'serve');
}
program.parse(args);
async function bundle(main, command) {
// Require bundler here so the help command is fast
const Bundler = require('../');
if (command.name() === 'build') {
process.env.NODE_ENV = 'production';
} else {
process.env.NODE_ENV = process.env.NODE_ENV || 'development';
}
if (command.cert && command.key) {
command.https = {
cert: command.cert,
key: command.key
};
}
const bundler = new Bundler(main, command);
command.target = command.target || 'browser';
if (command.name() === 'serve' && command.target === 'browser') {
const server = await bundler.serve(command.port || 1234, command.https);
if (server && command.open) {
await require('./utils/openInBrowser')(
`${command.https ? 'https' : 'http'}://localhost:${
server.address().port
}`,
command.open
);
}
} else {
bundler.bundle();
}
}

View file

@ -0,0 +1,29 @@
const Packager = require('./Packager');
class CSSPackager extends Packager {
async addAsset(asset) {
let css = asset.generated.css || '';
// Figure out which media types this asset was imported with.
// We only want to import the asset once, so group them all together.
let media = [];
for (let dep of asset.parentDeps) {
if (!dep.media) {
// Asset was imported without a media type. Don't wrap in @media.
media.length = 0;
break;
} else {
media.push(dep.media);
}
}
// If any, wrap in an @media block
if (media.length) {
css = `@media ${media.join(', ')} {\n${css.trim()}\n}\n`;
}
await this.write(css);
}
}
module.exports = CSSPackager;

View file

@ -0,0 +1,99 @@
const Packager = require('./Packager');
const posthtml = require('posthtml');
const path = require('path');
const urlJoin = require('../utils/urlJoin');
// https://www.w3.org/TR/html5/dom.html#metadata-content-2
const metadataContent = new Set([
'base',
'link',
'meta',
'noscript',
'script',
'style',
'template',
'title'
]);
class HTMLPackager extends Packager {
async addAsset(asset) {
let html = asset.generated.html || '';
// Find child bundles that have JS or CSS sibling bundles,
// add them to the head so they are loaded immediately.
let siblingBundles = Array.from(this.bundle.childBundles)
.reduce((p, b) => p.concat([...b.siblingBundles.values()]), [])
.filter(b => b.type === 'css' || b.type === 'js');
if (siblingBundles.length > 0) {
html = posthtml(
this.insertSiblingBundles.bind(this, siblingBundles)
).process(html, {sync: true}).html;
}
await this.write(html);
}
addBundlesToTree(bundles, tree) {
const head = find(tree, 'head');
if (head) {
const content = head.content || (head.content = []);
content.push(...bundles);
return;
}
const html = find(tree, 'html');
const content = html ? html.content || (html.content = []) : tree;
const index = findBundleInsertIndex(content);
content.splice(index, 0, ...bundles);
}
insertSiblingBundles(siblingBundles, tree) {
const bundles = [];
for (let bundle of siblingBundles) {
if (bundle.type === 'css') {
bundles.push({
tag: 'link',
attrs: {
rel: 'stylesheet',
href: urlJoin(this.options.publicURL, path.basename(bundle.name))
}
});
} else if (bundle.type === 'js') {
bundles.push({
tag: 'script',
attrs: {
src: urlJoin(this.options.publicURL, path.basename(bundle.name))
}
});
}
}
this.addBundlesToTree(bundles, tree);
}
}
function find(tree, tag) {
let res;
tree.match({tag}, node => {
res = node;
return node;
});
return res;
}
function findBundleInsertIndex(content) {
for (let index = 0; index < content.length; index++) {
const node = content[index];
if (node && node.tag && !metadataContent.has(node.tag)) {
return index;
}
}
return 0;
}
module.exports = HTMLPackager;

View file

@ -0,0 +1,229 @@
const fs = require('fs');
const path = require('path');
const Packager = require('./Packager');
const urlJoin = require('../utils/urlJoin');
const lineCounter = require('../utils/lineCounter');
const prelude = {
source: fs
.readFileSync(path.join(__dirname, '../builtins/prelude.js'), 'utf8')
.trim(),
minified: fs
.readFileSync(path.join(__dirname, '../builtins/prelude.min.js'), 'utf8')
.trim()
.replace(/;$/, '')
};
class JSPackager extends Packager {
async start() {
this.first = true;
this.dedupe = new Map();
this.bundleLoaders = new Set();
this.externalModules = new Set();
let preludeCode = this.options.minify ? prelude.minified : prelude.source;
if (this.options.target === 'electron') {
preludeCode =
`process.env.HMR_PORT=${
this.options.hmrPort
};process.env.HMR_HOSTNAME=${JSON.stringify(
this.options.hmrHostname
)};` + preludeCode;
}
await this.write(preludeCode + '({');
this.lineOffset = lineCounter(preludeCode);
}
async addAsset(asset) {
if (this.dedupe.has(asset.generated.js)) {
return;
}
// Don't dedupe when HMR is turned on since it messes with the asset ids
if (!this.options.hmr) {
this.dedupe.set(asset.generated.js, asset.id);
}
let deps = {};
for (let [dep, mod] of asset.depAssets) {
// For dynamic dependencies, list the child bundles to load along with the module id
if (dep.dynamic && this.bundle.childBundles.has(mod.parentBundle)) {
let bundles = [this.getBundleSpecifier(mod.parentBundle)];
for (let child of mod.parentBundle.siblingBundles) {
if (!child.isEmpty) {
bundles.push(this.getBundleSpecifier(child));
this.bundleLoaders.add(child.type);
}
}
bundles.push(mod.id);
deps[dep.name] = bundles;
this.bundleLoaders.add(mod.type);
} else {
deps[dep.name] = this.dedupe.get(mod.generated.js) || mod.id;
// If the dep isn't in this bundle, add it to the list of external modules to preload.
// Only do this if this is the root JS bundle, otherwise they will have already been
// loaded in parallel with this bundle as part of a dynamic import.
if (
!this.bundle.assets.has(mod) &&
(!this.bundle.parentBundle || this.bundle.parentBundle.type !== 'js')
) {
this.externalModules.add(mod);
this.bundleLoaders.add(mod.type);
}
}
}
this.bundle.addOffset(asset, this.lineOffset);
await this.writeModule(
asset.id,
asset.generated.js,
deps,
asset.generated.map
);
}
getBundleSpecifier(bundle) {
let name = path.basename(bundle.name);
if (bundle.entryAsset) {
return [name, bundle.entryAsset.id];
}
return name;
}
async writeModule(id, code, deps = {}, map) {
let wrapped = this.first ? '' : ',';
wrapped +=
id + ':[function(require,module,exports) {\n' + (code || '') + '\n},';
wrapped += JSON.stringify(deps);
wrapped += ']';
this.first = false;
await this.write(wrapped);
// Use the pre-computed line count from the source map if possible
let lineCount = map && map.lineCount ? map.lineCount : lineCounter(code);
this.lineOffset += 1 + lineCount;
}
async addAssetToBundle(asset) {
if (this.bundle.assets.has(asset)) {
return;
}
this.bundle.addAsset(asset);
if (!asset.parentBundle) {
asset.parentBundle = this.bundle;
}
// Add all dependencies as well
for (let child of asset.depAssets.values()) {
await this.addAssetToBundle(child, this.bundle);
}
await this.addAsset(asset);
}
async writeBundleLoaders() {
if (this.bundleLoaders.size === 0) {
return false;
}
let bundleLoader = this.bundler.loadedAssets.get(
require.resolve('../builtins/bundle-loader')
);
if (this.externalModules.size > 0 && !bundleLoader) {
bundleLoader = await this.bundler.getAsset('_bundle_loader');
}
if (bundleLoader) {
await this.addAssetToBundle(bundleLoader);
} else {
return;
}
// Generate a module to register the bundle loaders that are needed
let loads = 'var b=require(' + bundleLoader.id + ');';
for (let bundleType of this.bundleLoaders) {
let loader = this.options.bundleLoaders[bundleType];
if (loader) {
let target = this.options.target === 'node' ? 'node' : 'browser';
let asset = await this.bundler.getAsset(loader[target]);
await this.addAssetToBundle(asset);
loads +=
'b.register(' +
JSON.stringify(bundleType) +
',require(' +
asset.id +
'));';
}
}
// Preload external modules before running entry point if needed
if (this.externalModules.size > 0) {
let preload = [];
for (let mod of this.externalModules) {
// Find the bundle that has the module as its entry point
let bundle = Array.from(mod.bundles).find(b => b.entryAsset === mod);
if (bundle) {
preload.push([path.basename(bundle.name), mod.id]);
}
}
if (this.bundle.entryAsset) {
preload.push(this.bundle.entryAsset.id);
}
loads += 'b.load(' + JSON.stringify(preload) + ');';
}
// Asset ids normally start at 1, so this should be safe.
await this.writeModule(0, loads, {});
return true;
}
async end() {
let entry = [];
// Add the HMR runtime if needed.
if (this.options.hmr) {
let asset = await this.bundler.getAsset(
require.resolve('../builtins/hmr-runtime')
);
await this.addAssetToBundle(asset);
entry.push(asset.id);
}
if (await this.writeBundleLoaders()) {
entry.push(0);
}
if (this.bundle.entryAsset && this.externalModules.size === 0) {
entry.push(this.bundle.entryAsset.id);
}
await this.dest.write(
'},{},' +
JSON.stringify(entry) +
', ' +
JSON.stringify(this.options.global || null) +
')'
);
if (this.options.sourceMaps) {
// Add source map url if a map bundle exists
let mapBundle = this.bundle.siblingBundlesMap.get('map');
if (mapBundle) {
await this.write(
`\n//# sourceMappingURL=${urlJoin(
this.options.publicURL,
path.basename(mapBundle.name)
)}`
);
}
}
await this.dest.end();
}
}
module.exports = JSPackager;

View file

@ -0,0 +1,44 @@
const fs = require('fs');
const promisify = require('../utils/promisify');
const path = require('path');
const {mkdirp} = require('../utils/fs');
class Packager {
constructor(bundle, bundler) {
this.bundle = bundle;
this.bundler = bundler;
this.options = bundler.options;
}
async setup() {
// Create sub-directories if needed
if (this.bundle.name.includes(path.sep)) {
await mkdirp(path.dirname(this.bundle.name));
}
this.dest = fs.createWriteStream(this.bundle.name);
this.dest.write = promisify(this.dest.write.bind(this.dest));
this.dest.end = promisify(this.dest.end.bind(this.dest));
}
async write(string) {
await this.dest.write(string);
}
async start() {}
// eslint-disable-next-line no-unused-vars
async addAsset(asset) {
throw new Error('Must be implemented by subclasses');
}
getSize() {
return this.dest.bytesWritten;
}
async end() {
await this.dest.end();
}
}
module.exports = Packager;

View file

@ -0,0 +1,32 @@
const Packager = require('./Packager');
const path = require('path');
const fs = require('../utils/fs');
class RawPackager extends Packager {
// Override so we don't create a file for this bundle.
// Each asset will be emitted as a separate file instead.
setup() {}
async addAsset(asset) {
let contents = asset.generated[asset.type];
if (!contents || (contents && contents.path)) {
contents = await fs.readFile(contents ? contents.path : asset.name);
}
// Create sub-directories if needed
if (this.bundle.name.includes(path.sep)) {
await fs.mkdirp(path.dirname(this.bundle.name));
}
this.size = contents.length;
await fs.writeFile(this.bundle.name, contents);
}
getSize() {
return this.size || 0;
}
end() {}
}
module.exports = RawPackager;

View file

@ -0,0 +1,29 @@
const path = require('path');
const Packager = require('./Packager');
const SourceMap = require('../SourceMap');
class SourceMapPackager extends Packager {
async start() {
this.sourceMap = new SourceMap();
}
async addAsset(asset) {
await this.sourceMap.addMap(
asset.generated.map,
this.bundle.parentBundle.getOffset(asset)
);
}
async end() {
let file = path.basename(this.bundle.name);
await this.write(
this.sourceMap.stringify(
file,
path.relative(this.options.outDir, this.options.rootDir)
)
);
await super.end();
}
}
module.exports = SourceMapPackager;

View file

@ -0,0 +1,34 @@
const JSPackager = require('./JSPackager');
const CSSPackager = require('./CSSPackager');
const HTMLPackager = require('./HTMLPackager');
const SourceMapPackager = require('./SourceMapPackager');
const RawPackager = require('./RawPackager');
class PackagerRegistry {
constructor() {
this.packagers = new Map();
this.add('js', JSPackager);
this.add('css', CSSPackager);
this.add('html', HTMLPackager);
this.add('map', SourceMapPackager);
}
add(type, packager) {
if (typeof packager === 'string') {
packager = require(packager);
}
this.packagers.set(type, packager);
}
has(type) {
return this.packagers.has(type);
}
get(type) {
return this.packagers.get(type) || RawPackager;
}
}
module.exports = PackagerRegistry;

View file

@ -0,0 +1,301 @@
const presetEnv = require('babel-preset-env');
const getTargetEngines = require('../utils/getTargetEngines');
const localRequire = require('../utils/localRequire');
const path = require('path');
const {util: babelUtils} = require('babel-core');
const NODE_MODULES = `${path.sep}node_modules${path.sep}`;
const ENV_PLUGINS = require('babel-preset-env/data/plugins');
const ENV_PRESETS = {
es2015: true,
es2016: true,
es2017: true,
latest: true,
env: true
};
const JSX_EXTENSIONS = {
'.jsx': true,
'.tsx': true
};
const JSX_PRAGMA = {
react: 'React.createElement',
preact: 'h',
nervjs: 'Nerv.createElement',
hyperapp: 'h'
};
async function babelTransform(asset) {
let config = await getConfig(asset);
if (!config) {
return;
}
await asset.parseIfNeeded();
// If this is an internally generated config, use our internal babel-core,
// otherwise require a local version from the package we're compiling.
let babel = config.internal
? require('babel-core')
: await localRequire('babel-core', asset.name);
// TODO: support other versions of babel
if (parseInt(babel.version, 10) !== 6) {
throw new Error(`Unsupported babel version: ${babel.version}`);
}
let res = babel.transformFromAst(asset.ast, asset.contents, config);
if (!res.ignored) {
asset.ast = res.ast;
asset.isAstDirty = true;
}
}
module.exports = babelTransform;
async function getConfig(asset) {
let config = await getBabelConfig(asset);
if (config) {
config.code = false;
config.filename = asset.name;
config.babelrc = false;
// Hide the internal property from babel
let internal = config.internal;
delete config.internal;
Object.defineProperty(config, 'internal', {
value: internal,
configurable: true
});
}
return config;
}
babelTransform.getConfig = getConfig;
async function getBabelConfig(asset) {
// If asset is marked as an ES6 modules, this is a second pass after dependencies are extracted.
// Just compile modules to CommonJS.
if (asset.isES6Module) {
return {
internal: true,
plugins: [require('babel-plugin-transform-es2015-modules-commonjs')]
};
}
if (asset.babelConfig) {
return asset.babelConfig;
}
// Consider the module source code rather than precompiled if the resolver
// used the `source` field, or it is not in node_modules.
let isSource =
!!(asset.package && asset.package.source) ||
!asset.name.includes(NODE_MODULES);
// Try to resolve a .babelrc file. If one is found, consider the module source code.
let babelrc = await getBabelRc(asset, isSource);
isSource = isSource || !!babelrc;
let envConfig = await getEnvConfig(asset, isSource);
let jsxConfig = getJSXConfig(asset, isSource);
// Merge the babel-preset-env config and the babelrc if needed
if (babelrc && !shouldIgnoreBabelrc(asset.name, babelrc)) {
if (envConfig) {
// Filter out presets that are already applied by babel-preset-env
if (Array.isArray(babelrc.presets)) {
babelrc.presets = babelrc.presets.filter(preset => {
return !ENV_PRESETS[getPluginName(preset)];
});
}
// Filter out plugins that are already applied by babel-preset-env
if (Array.isArray(babelrc.plugins)) {
babelrc.plugins = babelrc.plugins.filter(plugin => {
return !ENV_PLUGINS[getPluginName(plugin)];
});
}
// Add plugins generated by babel-preset-env to get to the app's target engines.
mergeConfigs(babelrc, envConfig);
}
// Add JSX config if it isn't already specified in the babelrc
let hasReact =
hasPlugin(babelrc.presets, 'react') ||
hasPlugin(babelrc.plugins, 'transform-react-jsx');
if (!hasReact) {
mergeConfigs(babelrc, jsxConfig);
}
return babelrc;
}
// If there is a babel-preset-env config, and it isn't empty use that
if (envConfig && (envConfig.plugins.length > 0 || jsxConfig)) {
mergeConfigs(envConfig, jsxConfig);
return envConfig;
}
// If there is a JSX config, return that
if (jsxConfig) {
return jsxConfig;
}
// Otherwise, don't run babel at all
return null;
}
function mergeConfigs(a, b) {
if (b) {
a.presets = (a.presets || []).concat(b.presets || []);
a.plugins = (a.plugins || []).concat(b.plugins || []);
}
return a;
}
function hasPlugin(arr, plugin) {
return Array.isArray(arr) && arr.some(p => getPluginName(p) === plugin);
}
function getPluginName(p) {
return Array.isArray(p) ? p[0] : p;
}
/**
* Finds a .babelrc for an asset. By default, .babelrc files inside node_modules are not used.
* However, there are some exceptions:
* - if `browserify.transforms` includes "babelify" in package.json (for legacy module compat)
* - the `source` field in package.json is used by the resolver
*/
async function getBabelRc(asset, isSource) {
// Support legacy browserify packages
let browserify = asset.package && asset.package.browserify;
if (browserify && Array.isArray(browserify.transform)) {
// Look for babelify in the browserify transform list
let babelify = browserify.transform.find(
t => (Array.isArray(t) ? t[0] : t) === 'babelify'
);
// If specified as an array, override the config with the one specified
if (Array.isArray(babelify) && babelify[1]) {
return babelify[1];
}
// Otherwise, return the .babelrc if babelify was found
return babelify ? await findBabelRc(asset) : null;
}
// If this asset is not in node_modules, always use the .babelrc
if (isSource) {
return await findBabelRc(asset);
}
// Otherwise, don't load .babelrc for node_modules.
// See https://github.com/parcel-bundler/parcel/issues/13.
return null;
}
async function findBabelRc(asset) {
if (asset.package && asset.package.babel) {
return asset.package.babel;
}
return await asset.getConfig(['.babelrc', '.babelrc.js']);
}
function shouldIgnoreBabelrc(filename, babelrc) {
// Determine if we should ignore this babelrc file. We do this here instead of
// letting babel-core handle it because this config might be merged with our
// autogenerated one later which shouldn't be ignored.
let ignore = babelUtils.arrayify(babelrc.ignore, babelUtils.regexify);
let only =
babelrc.only && babelUtils.arrayify(babelrc.only, babelUtils.regexify);
return babelUtils.shouldIgnore(filename, ignore, only);
}
/**
* Generates a babel-preset-env config for an asset.
* This is done by finding the source module's target engines, and the app's
* target engines, and doing a diff to include only the necessary plugins.
*/
async function getEnvConfig(asset, isSourceModule) {
// Load the target engines for the app and generate a babel-preset-env config
let targetEngines = await getTargetEngines(asset, true);
let targetEnv = await getEnvPlugins(targetEngines, true);
if (!targetEnv) {
return null;
}
// If this is the app module, the source and target will be the same, so just compile everything.
// Otherwise, load the source engines and generate a babel-present-env config.
if (!isSourceModule) {
let sourceEngines = await getTargetEngines(asset, false);
let sourceEnv = (await getEnvPlugins(sourceEngines, false)) || targetEnv;
// Do a diff of the returned plugins. We only need to process the remaining plugins to get to the app target.
let sourcePlugins = new Set(sourceEnv.map(p => p[0]));
targetEnv = targetEnv.filter(plugin => {
return !sourcePlugins.has(plugin[0]);
});
}
return {plugins: targetEnv, internal: true};
}
const envCache = new Map();
async function getEnvPlugins(targets, useBuiltIns = false) {
if (!targets) {
return null;
}
let key = JSON.stringify(targets);
if (envCache.has(key)) {
return envCache.get(key);
}
let plugins = presetEnv.default(
{},
{targets, modules: false, useBuiltIns: useBuiltIns ? 'entry' : false}
).plugins;
envCache.set(key, plugins);
return plugins;
}
/**
* Generates a babel config for JSX. Attempts to detect react or react-like libraries
* and changes the pragma accordingly.
*/
function getJSXConfig(asset, isSourceModule) {
// Don't enable JSX in node_modules
if (!isSourceModule) {
return null;
}
// Find a dependency that we can map to a JSX pragma
let pragma = null;
for (let dep in JSX_PRAGMA) {
let pkg = asset.package;
if (
pkg &&
((pkg.dependencies && pkg.dependencies[dep]) ||
(pkg.devDependencies && pkg.devDependencies[dep]))
) {
pragma = JSX_PRAGMA[dep];
break;
}
}
if (pragma || JSX_EXTENSIONS[path.extname(asset.name)]) {
return {
plugins: [[require('babel-plugin-transform-react-jsx'), {pragma}]],
internal: true
};
}
}

View file

@ -0,0 +1,18 @@
const posthtml = require('posthtml');
const htmlnano = require('htmlnano');
module.exports = async function(asset) {
await asset.parseIfNeeded();
const htmlNanoConfig =
asset.package.htmlnano ||
(await asset.getConfig(['.htmlnanorc', '.htmlnanorc.js'])) ||
{};
let res = await posthtml([htmlnano(htmlNanoConfig)]).process(asset.ast, {
skipParse: true
});
asset.ast = res.tree;
asset.isAstDirty = true;
};

View file

@ -0,0 +1,72 @@
const localRequire = require('../utils/localRequire');
const loadPlugins = require('../utils/loadPlugins');
const postcss = require('postcss');
const cssnano = require('cssnano');
module.exports = async function(asset) {
let config = await getConfig(asset);
if (!config) {
return;
}
await asset.parseIfNeeded();
let res = await postcss(config.plugins).process(asset.getCSSAst(), config);
asset.ast.css = res.css;
asset.ast.dirty = false;
};
async function getConfig(asset) {
let config =
asset.package.postcss ||
(await asset.getConfig([
'.postcssrc',
'.postcssrc.js',
'postcss.config.js'
]));
let enableModules =
asset.options.rendition && asset.options.rendition.modules;
if (!config && !asset.options.minify && !enableModules) {
return;
}
config = Object.assign({}, config);
let postcssModulesConfig = {
getJSON: (filename, json) => (asset.cssModules = json)
};
if (config.plugins && config.plugins['postcss-modules']) {
postcssModulesConfig = Object.assign(
config.plugins['postcss-modules'],
postcssModulesConfig
);
delete config.plugins['postcss-modules'];
}
config.plugins = await loadPlugins(config.plugins, asset.name);
if (config.modules || enableModules) {
let postcssModules = await localRequire('postcss-modules', asset.name);
config.plugins.push(postcssModules(postcssModulesConfig));
}
if (asset.options.minify) {
config.plugins.push(
cssnano(
(await asset.getConfig(['cssnano.config.js'])) || {
// Only enable safe css transforms by default.
// See: https://github.com/parcel-bundler/parcel/issues/698
// Note: Remove when upgrading cssnano to v4
// See: https://github.com/ben-eb/cssnano/releases/tag/v4.0.0-rc.0
safe: true
}
)
);
}
config.from = asset.name;
config.to = asset.name;
return config;
}

View file

@ -0,0 +1,33 @@
const loadPlugins = require('../utils/loadPlugins');
const posthtml = require('posthtml');
module.exports = async function(asset) {
let config = await getConfig(asset);
if (!config) {
return;
}
await asset.parseIfNeeded();
let res = await posthtml(config.plugins).process(asset.ast, config);
asset.ast = res.tree;
asset.isAstDirty = true;
};
async function getConfig(asset) {
let config =
asset.package.posthtml ||
(await asset.getConfig([
'.posthtmlrc',
'.posthtmlrc.js',
'posthtml.config.js'
]));
if (!config && !asset.options.minify) {
return;
}
config = Object.assign({}, config);
config.plugins = await loadPlugins(config.plugins, asset.name);
config.skipParse = true;
return config;
}

View file

@ -0,0 +1,66 @@
const {minify} = require('uglify-es');
const SourceMap = require('../SourceMap');
module.exports = async function(asset) {
await asset.parseIfNeeded();
// Convert AST into JS
let source = (await asset.generate()).js;
let customConfig = await asset.getConfig(['.uglifyrc']);
let options = {
warnings: true,
mangle: {
toplevel: true
}
};
let sourceMap;
if (asset.options.sourceMap) {
sourceMap = new SourceMap();
options.output = {
source_map: {
add(source, gen_line, gen_col, orig_line, orig_col, name) {
sourceMap.addMapping({
source,
name,
original: {
line: orig_line,
column: orig_col
},
generated: {
line: gen_line,
column: gen_col
}
});
}
}
};
}
if (customConfig) {
options = Object.assign(options, customConfig);
}
let result = minify(source, options);
if (result.error) {
throw result.error;
}
if (sourceMap) {
if (asset.sourceMap) {
asset.sourceMap = await new SourceMap().extendSourceMap(
asset.sourceMap,
sourceMap
);
} else {
asset.sourceMap = sourceMap;
}
}
// babel-generator did our code generation for us, so remove the old AST
asset.ast = null;
asset.outputCode = result.code;
asset.isAstDirty = false;
};

View file

@ -0,0 +1,92 @@
class PromiseQueue {
constructor(callback, options = {}) {
this.process = callback;
this.maxConcurrent = options.maxConcurrent || Infinity;
this.retry = options.retry !== false;
this.queue = [];
this.processing = new Set();
this.processed = new Set();
this.numRunning = 0;
this.runPromise = null;
this.resolve = null;
this.reject = null;
}
add(job, ...args) {
if (this.processing.has(job)) {
return;
}
if (this.runPromise && this.numRunning < this.maxConcurrent) {
this._runJob(job, args);
} else {
this.queue.push([job, args]);
}
this.processing.add(job);
}
run() {
if (this.runPromise) {
return this.runPromise;
}
const runPromise = new Promise((resolve, reject) => {
this.resolve = resolve;
this.reject = reject;
});
this.runPromise = runPromise;
this._next();
return runPromise;
}
async _runJob(job, args) {
try {
this.numRunning++;
await this.process(job, ...args);
this.processing.delete(job);
this.processed.add(job);
this.numRunning--;
this._next();
} catch (err) {
this.numRunning--;
if (this.retry) {
this.queue.push([job, args]);
} else {
this.processing.delete(job);
}
if (this.reject) {
this.reject(err);
}
this._reset();
}
}
_next() {
if (!this.runPromise) {
return;
}
if (this.queue.length > 0) {
while (this.queue.length > 0 && this.numRunning < this.maxConcurrent) {
this._runJob(...this.queue.shift());
}
} else if (this.processing.size === 0) {
this.resolve(this.processed);
this._reset();
}
}
_reset() {
this.processed = new Set();
this.runPromise = null;
this.resolve = null;
this.reject = null;
}
}
module.exports = PromiseQueue;

View file

@ -0,0 +1,97 @@
const path = require('path');
const prettifyTime = require('./prettifyTime');
const logger = require('../Logger');
const emoji = require('./emoji');
const filesize = require('filesize');
const LARGE_BUNDLE_SIZE = 1024 * 1024;
const NUM_LARGE_ASSETS = 10;
const COLUMNS = [
{align: 'left'}, // name
{align: 'right'}, // size
{align: 'right'} // time
];
function bundleReport(mainBundle, detailed = false) {
// Get a list of bundles sorted by size
let bundles = Array.from(iterateBundles(mainBundle)).sort(
(a, b) => b.totalSize - a.totalSize
);
let rows = [];
for (let bundle of bundles) {
// Add a row for the bundle
rows.push([
formatFilename(bundle.name, logger.chalk.cyan.bold),
logger.chalk.bold(
prettifySize(bundle.totalSize, bundle.totalSize > LARGE_BUNDLE_SIZE)
),
logger.chalk.green.bold(prettifyTime(bundle.bundleTime))
]);
// If detailed, generate a list of the top 10 largest assets in the bundle
if (detailed && bundle.assets.size > 1) {
let assets = Array.from(bundle.assets)
.filter(a => a.type === bundle.type)
.sort((a, b) => b.bundledSize - a.bundledSize);
let largestAssets = assets.slice(0, NUM_LARGE_ASSETS);
for (let asset of largestAssets) {
// Add a row for the asset.
rows.push([
(asset == assets[assets.length - 1] ? '└── ' : '├── ') +
formatFilename(asset.name, logger.chalk.reset),
logger.chalk.dim(prettifySize(asset.bundledSize)),
logger.chalk.dim(logger.chalk.green(prettifyTime(asset.buildTime)))
]);
}
// Show how many more assets there are
if (assets.length > largestAssets.length) {
rows.push([
'└── ' +
logger.chalk.dim(
`+ ${assets.length - largestAssets.length} more assets`
)
]);
}
// If this isn't the last bundle, add an empty row before the next one
if (bundle !== bundles[bundles.length - 1]) {
rows.push([]);
}
}
}
// Render table
logger.log('');
logger.table(COLUMNS, rows);
}
module.exports = bundleReport;
function* iterateBundles(bundle) {
if (!bundle.isEmpty) {
yield bundle;
}
for (let child of bundle.childBundles) {
yield* iterateBundles(child);
}
}
function prettifySize(size, isLarge) {
let res = filesize(size);
if (isLarge) {
return logger.chalk.yellow(emoji.warning + ' ' + res);
}
return logger.chalk.magenta(res);
}
function formatFilename(filename, color = logger.chalk.reset) {
let dir = path.relative(process.cwd(), path.dirname(filename));
return (
logger.chalk.dim(dir + (dir ? path.sep : '')) +
color(path.basename(filename))
);
}

View file

@ -0,0 +1,59 @@
const fs = require('./fs');
const path = require('path');
const PARSERS = {
json: require('json5').parse,
toml: require('toml').parse
};
const existsCache = new Map();
async function resolve(filepath, filenames, root = path.parse(filepath).root) {
filepath = path.dirname(filepath);
// Don't traverse above the module root
if (filepath === root || path.basename(filepath) === 'node_modules') {
return null;
}
for (const filename of filenames) {
let file = path.join(filepath, filename);
let exists = existsCache.has(file)
? existsCache.get(file)
: await fs.exists(file);
if (exists) {
existsCache.set(file, true);
return file;
}
}
return resolve(filepath, filenames, root);
}
async function load(filepath, filenames, root = path.parse(filepath).root) {
let configFile = await resolve(filepath, filenames, root);
if (configFile) {
try {
let extname = path.extname(configFile).slice(1);
if (extname === 'js') {
return require(configFile);
}
let configContent = (await fs.readFile(configFile)).toString();
let parse = PARSERS[extname] || PARSERS.json;
return configContent ? parse(configContent) : null;
} catch (err) {
if (err.code === 'MODULE_NOT_FOUND' || err.code === 'ENOENT') {
existsCache.delete(configFile);
return null;
}
throw err;
}
}
return null;
}
exports.resolve = resolve;
exports.load = load;

View file

@ -0,0 +1,18 @@
const serverErrorList = {
EACCES: "You don't have access to bind the server to port {port}.",
EADDRINUSE: 'There is already a process listening on port {port}.'
};
function serverErrors(err, port) {
let desc = `Error: ${
err.code
} occurred while setting up server on port ${port}.`;
if (serverErrorList[err.code]) {
desc = serverErrorList[err.code].replace(/{port}/g, port);
}
return desc;
}
module.exports.serverErrors = serverErrors;

View file

@ -0,0 +1,8 @@
const supportsEmoji =
process.platform !== 'win32' || process.env.TERM === 'xterm-256color';
// Fallback symbols for Windows from https://en.wikipedia.org/wiki/Code_page_437
exports.progress = supportsEmoji ? '⏳' : '∞';
exports.success = supportsEmoji ? '✨' : '√';
exports.error = supportsEmoji ? '🚨' : '×';
exports.warning = supportsEmoji ? '⚠️' : '‼';

26
VISUALIZACION/node_modules/parcel-bundler/src/utils/env.js generated vendored Executable file
View file

@ -0,0 +1,26 @@
const config = require('./config');
const dotenv = require('dotenv');
async function loadEnv(filepath) {
const NODE_ENV = process.env.NODE_ENV || 'development';
const dotenvFiles = [
`.env.${NODE_ENV}.local`,
`.env.${NODE_ENV}`,
// Don't include `.env.local` for `test` environment
// since normally you expect tests to produce the same
// results for everyone
NODE_ENV !== 'test' && '.env.local',
'.env'
].filter(Boolean);
await Promise.all(
dotenvFiles.map(async dotenvFile => {
const envPath = await config.resolve(filepath, [dotenvFile]);
if (envPath) {
dotenv.config({path: envPath});
}
})
);
}
module.exports = loadEnv;

18
VISUALIZACION/node_modules/parcel-bundler/src/utils/fs.js generated vendored Executable file
View file

@ -0,0 +1,18 @@
const promisify = require('./promisify');
const fs = require('fs');
const mkdirp = require('mkdirp');
exports.readFile = promisify(fs.readFile);
exports.writeFile = promisify(fs.writeFile);
exports.stat = promisify(fs.stat);
exports.readdir = promisify(fs.readdir);
exports.unlink = promisify(fs.unlink);
exports.realpath = promisify(fs.realpath);
exports.exists = function(filename) {
return new Promise(resolve => {
fs.exists(filename, resolve);
});
};
exports.mkdirp = promisify(mkdirp);

View file

@ -0,0 +1,130 @@
const forge = require('node-forge');
const fs = require('fs');
const mkdirp = require('mkdirp');
const path = require('path');
const logger = require('../Logger');
function generateCertificate(options = {}) {
const privateKeyPath = path.join(options.cacheDir, 'private.pem');
const certPath = path.join(options.cacheDir, 'primary.crt');
if (options.cache) {
const cachedKey =
fs.existsSync(privateKeyPath) && fs.readFileSync(privateKeyPath);
const cachedCert = fs.existsSync(certPath) && fs.readFileSync(certPath);
if (cachedKey && cachedCert) {
return {
key: cachedKey,
cert: cachedCert
};
}
}
logger.log('Generating SSL Certificate...');
const pki = forge.pki;
const keys = pki.rsa.generateKeyPair(2048);
const cert = pki.createCertificate();
cert.publicKey = keys.publicKey;
cert.serialNumber = '01';
cert.validity.notBefore = new Date();
cert.validity.notAfter = new Date();
cert.validity.notAfter.setFullYear(cert.validity.notBefore.getFullYear() + 1);
const attrs = [
{
name: 'commonName',
value: 'parceljs.org'
},
{
name: 'countryName',
value: 'US'
},
{
shortName: 'ST',
value: 'Virginia'
},
{
name: 'localityName',
value: 'Blacksburg'
},
{
name: 'organizationName',
value: 'parcelBundler'
},
{
shortName: 'OU',
value: 'Test'
}
];
cert.setSubject(attrs);
cert.setIssuer(attrs);
cert.setExtensions([
{
name: 'basicConstraints',
cA: true
},
{
name: 'keyUsage',
keyCertSign: true,
digitalSignature: true,
nonRepudiation: true,
keyEncipherment: true,
dataEncipherment: true
},
{
name: 'extKeyUsage',
serverAuth: true,
clientAuth: true,
codeSigning: true,
emailProtection: true,
timeStamping: true
},
{
name: 'nsCertType',
client: true,
server: true,
email: true,
objsign: true,
sslCA: true,
emailCA: true,
objCA: true
},
{
name: 'subjectAltName',
altNames: [
{
type: 6, // URI
value: 'http://example.org/webid#me'
},
{
type: 7, // IP
ip: '127.0.0.1'
}
]
},
{
name: 'subjectKeyIdentifier'
}
]);
cert.sign(keys.privateKey, forge.md.sha256.create());
const privPem = pki.privateKeyToPem(keys.privateKey);
const certPem = pki.certificateToPem(cert);
if (options.cache) {
mkdirp.sync(options.cacheDir);
fs.writeFileSync(privateKeyPath, privPem);
fs.writeFileSync(certPath, certPem);
}
return {
key: privPem,
cert: certPem
};
}
module.exports = generateCertificate;

View file

@ -0,0 +1,13 @@
const fs = require('./fs');
async function getCertificate(options) {
try {
let cert = await fs.readFile(options.cert);
let key = await fs.readFile(options.key);
return {key, cert};
} catch (err) {
throw new Error('Certificate and/or key not found');
}
}
module.exports = getCertificate;

View file

@ -0,0 +1,31 @@
const path = require('path');
function getRootDir(files) {
let cur = null;
for (let file of files) {
let parsed = path.parse(file);
if (!cur) {
cur = parsed;
} else if (parsed.root !== cur.root) {
// bail out. there is no common root.
// this can happen on windows, e.g. C:\foo\bar vs. D:\foo\bar
return process.cwd();
} else {
// find the common path parts.
let curParts = cur.dir.split(path.sep);
let newParts = parsed.dir.split(path.sep);
let len = Math.min(curParts.length, newParts.length);
let i = 0;
while (i < len && curParts[i] === newParts[i]) {
i++;
}
cur.dir = i > 1 ? curParts.slice(0, i).join(path.sep) : cur.root;
}
}
return cur ? cur.dir : process.cwd();
}
module.exports = getRootDir;

View file

@ -0,0 +1,130 @@
const browserslist = require('browserslist');
const semver = require('semver');
const Path = require('path');
const DEFAULT_ENGINES = {
browsers: ['> 0.25%'],
node: '6'
};
/**
* Loads target node and browser versions from the following locations:
* - package.json engines field
* - package.json browserslist field
* - browserslist or .browserslistrc files
* - .babelrc or .babelrc.js files with babel-preset-env
*/
async function getTargetEngines(asset, isTargetApp) {
let targets = {};
let path = isTargetApp
? Path.join(asset.options.rootDir, 'index')
: asset.name;
let compileTarget =
asset.options.target === 'browser' ? 'browsers' : asset.options.target;
let pkg = await asset.getConfig(['package.json'], {path});
let engines = pkg && pkg.engines;
let nodeVersion = engines && getMinSemver(engines.node);
if (compileTarget === 'node') {
// Use package.engines.node by default if we are compiling for node.
if (typeof nodeVersion === 'string') {
targets.node = nodeVersion;
}
} else {
if (
engines &&
(typeof engines.browsers === 'string' || Array.isArray(engines.browsers))
) {
targets.browsers = engines.browsers;
} else if (pkg && pkg.browserslist) {
targets.browsers = pkg.browserslist;
} else {
let browserslist = await loadBrowserslist(asset, path);
if (browserslist) {
targets.browsers = browserslist;
} else {
let babelTargets = await loadBabelrc(asset, path);
if (babelTargets && babelTargets.browsers) {
targets.browsers = babelTargets.browsers;
} else if (babelTargets && babelTargets.node && !nodeVersion) {
nodeVersion = babelTargets.node;
}
}
}
// Fall back to package.engines.node for node_modules without any browser target info.
if (!isTargetApp && !targets.browsers && typeof nodeVersion === 'string') {
targets.node = nodeVersion;
}
}
// If we didn't find any targets, set some default engines for the target app.
if (
isTargetApp &&
!targets[compileTarget] &&
DEFAULT_ENGINES[compileTarget]
) {
targets[compileTarget] = DEFAULT_ENGINES[compileTarget];
}
// Parse browser targets
if (targets.browsers) {
if (
typeof targets.browsers === 'object' &&
!Array.isArray(targets.browsers)
) {
let env = asset.options.production
? 'production'
: process.env.NODE_ENV || 'development';
targets.browsers = targets.browsers[env] || targets.browsers.defaults;
}
if (targets.browsers) {
targets.browsers = browserslist(targets.browsers).sort();
}
}
// Dont compile if we couldn't find any targets
if (Object.keys(targets).length === 0) {
return null;
}
return targets;
}
function getMinSemver(version) {
try {
let range = new semver.Range(version);
let sorted = range.set.sort((a, b) => a[0].semver.compare(b[0].semver));
return sorted[0][0].semver.version;
} catch (err) {
return null;
}
}
async function loadBrowserslist(asset, path) {
let config = await asset.getConfig(['browserslist', '.browserslistrc'], {
path,
load: false
});
if (config) {
return browserslist.readConfig(config);
}
}
async function loadBabelrc(asset, path) {
let config = await asset.getConfig(['.babelrc', '.babelrc.js'], {path});
if (config && config.presets) {
let env = config.presets.find(
plugin =>
Array.isArray(plugin) &&
(plugin[0] === 'env' || plugin[0] === '@babel/env')
);
if (env && env[1] && env[1].targets) {
return env[1].targets;
}
}
}
module.exports = getTargetEngines;

View file

@ -0,0 +1,106 @@
const config = require('./config');
const promisify = require('./promisify');
const resolve = promisify(require('resolve'));
const commandExists = require('command-exists');
const logger = require('../Logger');
const emoji = require('./emoji');
const pipeSpawn = require('./pipeSpawn');
const PromiseQueue = require('./PromiseQueue');
const path = require('path');
const fs = require('./fs');
async function install(modules, filepath, options = {}) {
let {installPeers = true, saveDev = true, packageManager} = options;
logger.status(emoji.progress, `Installing ${modules.join(', ')}...`);
let packageLocation = await config.resolve(filepath, ['package.json']);
let cwd = packageLocation ? path.dirname(packageLocation) : process.cwd();
if (!packageManager) {
packageManager = await determinePackageManager(filepath);
}
let commandToUse = packageManager === 'npm' ? 'install' : 'add';
let args = [commandToUse, ...modules];
if (saveDev) {
args.push('-D');
} else if (packageManager === 'npm') {
args.push('--save');
}
// npm doesn't auto-create a package.json when installing,
// so create an empty one if needed.
if (packageManager === 'npm' && !packageLocation) {
await fs.writeFile(path.join(cwd, 'package.json'), '{}');
}
try {
await pipeSpawn(packageManager, args, {cwd});
} catch (err) {
throw new Error(`Failed to install ${modules.join(', ')}.`);
}
if (installPeers) {
await Promise.all(
modules.map(m => installPeerDependencies(filepath, m, options))
);
}
}
async function installPeerDependencies(filepath, name, options) {
let basedir = path.dirname(filepath);
const [resolved] = await resolve(name, {basedir});
const pkg = await config.load(resolved, ['package.json']);
const peers = pkg.peerDependencies || {};
const modules = [];
for (const peer in peers) {
modules.push(`${peer}@${peers[peer]}`);
}
if (modules.length) {
await install(
modules,
filepath,
Object.assign({}, options, {installPeers: false})
);
}
}
async function determinePackageManager(filepath) {
let configFile = await config.resolve(filepath, [
'yarn.lock',
'package-lock.json'
]);
let hasYarn = await checkForYarnCommand();
// If Yarn isn't available, or there is a package-lock.json file, use npm.
let configName = configFile && path.basename(configFile);
if (!hasYarn || configName === 'package-lock.json') {
return 'npm';
}
return 'yarn';
}
let hasYarn = null;
async function checkForYarnCommand() {
if (hasYarn != null) {
return hasYarn;
}
try {
hasYarn = await commandExists('yarn');
} catch (err) {
hasYarn = false;
}
return hasYarn;
}
let queue = new PromiseQueue(install, {maxConcurrent: 1, retry: false});
module.exports = function(...args) {
queue.add(...args);
return queue.run();
};

View file

@ -0,0 +1,11 @@
const isURL = require('is-url');
// Matches anchor (ie: #raptors)
const ANCHOR_REGEXP = /^#/;
// Matches scheme (ie: tel:, mailto:, data:, itms-apps:)
const SCHEME_REGEXP = /^[a-z][a-z0-9\-+.]*:/i;
module.exports = function(url) {
return isURL(url) || ANCHOR_REGEXP.test(url) || SCHEME_REGEXP.test(url);
};

View file

@ -0,0 +1,12 @@
function lineCounter(string) {
let lines = 1;
for (let i = 0; i < string.length; i++) {
if (string.charAt(i) === '\n') {
lines++;
}
}
return lines;
}
module.exports = lineCounter;

View file

@ -0,0 +1,37 @@
const localRequire = require('./localRequire');
module.exports = async function loadPlugins(plugins, relative) {
if (Array.isArray(plugins)) {
return await Promise.all(
plugins.map(async p => await loadPlugin(p, relative)).filter(Boolean)
);
} else if (typeof plugins === 'object') {
let mapPlugins = await Promise.all(
Object.keys(plugins).map(
async p => await loadPlugin(p, relative, plugins[p])
)
);
return mapPlugins.filter(Boolean);
} else {
return [];
}
};
async function loadPlugin(plugin, relative, options) {
if (typeof plugin === 'string') {
plugin = await localRequire(plugin, relative);
plugin = plugin.default || plugin;
if (typeof options !== 'object') {
options = {};
}
if (Object.keys(options).length > 0) {
plugin = plugin(options);
}
plugin = plugin.default || plugin;
}
return plugin;
}

View file

@ -0,0 +1,30 @@
const {dirname} = require('path');
const resolve = require('resolve');
const worker = require('../worker');
const cache = new Map();
async function localRequire(name, path, triedInstall = false) {
let basedir = dirname(path);
let key = basedir + ':' + name;
let resolved = cache.get(key);
if (!resolved) {
try {
resolved = resolve.sync(name, {basedir});
} catch (e) {
if (e.code === 'MODULE_NOT_FOUND' && !triedInstall) {
await worker.addCall({
location: require.resolve('./installPackage.js'),
args: [[name], path]
});
return localRequire(name, path, true);
}
throw e;
}
cache.set(key, resolved);
}
return require(resolved);
}
module.exports = localRequire;

23
VISUALIZACION/node_modules/parcel-bundler/src/utils/md5.js generated vendored Executable file
View file

@ -0,0 +1,23 @@
const crypto = require('crypto');
const fs = require('fs');
function md5(string) {
return crypto
.createHash('md5')
.update(string)
.digest('hex');
}
md5.file = function(filename) {
return new Promise((resolve, reject) => {
fs
.createReadStream(filename)
.pipe(crypto.createHash('md5').setEncoding('hex'))
.on('finish', function() {
resolve(this.read());
})
.on('error', reject);
});
};
module.exports = md5;

View file

@ -0,0 +1,17 @@
const crypto = require('crypto');
function objectHash(object) {
let hash = crypto.createHash('md5');
for (let key of Object.keys(object).sort()) {
let val = object[key];
if (typeof val === 'object' && val) {
hash.update(key + objectHash(val));
} else {
hash.update(key + val);
}
}
return hash.digest('hex');
}
module.exports = objectHash;

View file

@ -0,0 +1,14 @@
const opn = require('opn');
const openInBrowser = async (url, browser) => {
try {
const options = typeof browser === 'string' ? {app: browser} : undefined;
await opn(url, options);
} catch (err) {
console.error(`Unexpected error while opening in browser: ${browser}`);
console.error(err);
}
};
module.exports = openInBrowser;

View file

@ -0,0 +1,29 @@
const spawn = require('cross-spawn');
const logger = require('../Logger');
function pipeSpawn(cmd, params, opts) {
const cp = spawn(cmd, params, Object.assign({
env: Object.assign({
FORCE_COLOR: logger.color,
npm_config_color: logger.color ? 'always': '',
npm_config_progress: true
}, process.env)
}, opts));
cp.stdout.setEncoding('utf8').on('data', d => logger.writeRaw(d));
cp.stderr.setEncoding('utf8').on('data', d => logger.writeRaw(d));
return new Promise((resolve, reject) => {
cp.on('error', reject);
cp.on('close', function(code) {
if (code !== 0) {
return reject(new Error(cmd + ' failed.'));
}
logger.clear();
return resolve();
});
});
}
module.exports = pipeSpawn;

View file

@ -0,0 +1,3 @@
module.exports = function(time) {
return time < 1000 ? `${time}ms` : `${(time / 1000).toFixed(2)}s`;
};

View file

@ -0,0 +1,24 @@
module.exports = function(err, opts = {}) {
let message = typeof err === 'string' ? err : err.message;
if (!message) {
message = 'Unknown error';
}
if (err.fileName) {
let fileName = err.fileName;
if (err.loc) {
fileName += `:${err.loc.line}:${err.loc.column}`;
}
message = `${fileName}: ${message}`;
}
let stack;
if (err.codeFrame) {
stack = (opts.color && err.highlightedCodeFrame) || err.codeFrame;
} else if (err.stack) {
stack = err.stack.slice(err.stack.indexOf('\n') + 1);
}
return {message, stack};
};

View file

@ -0,0 +1,13 @@
module.exports = function(fn) {
return function(...args) {
return new Promise(function(resolve, reject) {
fn(...args, function(err, ...res) {
if (err) return reject(err);
if (res.length === 1) return resolve(res[0]);
resolve(res);
});
});
};
};

View file

@ -0,0 +1,19 @@
const {minify} = require('uglify-es');
const {serialize} = require('serialize-to-js');
function serializeObject(obj, shouldMinify = false) {
let code = `module.exports = ${serialize(obj)};`;
if (shouldMinify) {
let minified = minify(code);
if (minified.error) {
throw minified.error;
}
code = minified.code;
}
return code;
}
module.exports = serializeObject;

View file

@ -0,0 +1,31 @@
const deasync = require('deasync');
/**
* Synchronously waits for a promise to return by
* yielding to the node event loop as needed.
*/
function syncPromise(promise) {
let isDone = false;
let res, err;
promise.then(
value => {
res = value;
isDone = true;
},
error => {
err = error;
isDone = true;
}
);
deasync.loopWhile(() => !isDone);
if (err) {
throw err;
}
return res;
}
module.exports = syncPromise;

View file

@ -0,0 +1,15 @@
const URL = require('url');
const path = require('path');
/**
* Joins a path onto a URL, and normalizes Windows paths
* e.g. from \path\to\res.js to /path/to/res.js.
*/
module.exports = function(publicURL, assetPath) {
const url = URL.parse(publicURL, false, true);
const assetUrl = URL.parse(assetPath);
url.pathname = path.posix.join(url.pathname, assetUrl.pathname);
url.search = assetUrl.search;
url.hash = assetUrl.hash;
return URL.format(url);
};

View file

@ -0,0 +1,171 @@
const types = require('babel-types');
const template = require('babel-template');
const traverse = require('babel-traverse').default;
const urlJoin = require('../utils/urlJoin');
const isURL = require('../utils/is-url');
const matchesPattern = require('./matches-pattern');
const requireTemplate = template('require("_bundle_loader")');
const argTemplate = template('require.resolve(MODULE)');
const serviceWorkerPattern = ['navigator', 'serviceWorker', 'register'];
module.exports = {
ImportDeclaration(node, asset) {
asset.isES6Module = true;
addDependency(asset, node.source);
},
ExportNamedDeclaration(node, asset) {
asset.isES6Module = true;
if (node.source) {
addDependency(asset, node.source);
}
},
ExportAllDeclaration(node, asset) {
asset.isES6Module = true;
addDependency(asset, node.source);
},
ExportDefaultDeclaration(node, asset) {
asset.isES6Module = true;
},
CallExpression(node, asset, ancestors) {
let {callee, arguments: args} = node;
let isRequire =
types.isIdentifier(callee) &&
callee.name === 'require' &&
args.length === 1 &&
types.isStringLiteral(args[0]) &&
!hasBinding(ancestors, 'require') &&
!isInFalsyBranch(ancestors);
if (isRequire) {
let optional = ancestors.some(a => types.isTryStatement(a)) || undefined;
addDependency(asset, args[0], {optional});
return;
}
let isDynamicImport =
callee.type === 'Import' &&
args.length === 1 &&
types.isStringLiteral(args[0]);
if (isDynamicImport) {
asset.addDependency('_bundle_loader');
addDependency(asset, args[0], {dynamic: true});
node.callee = requireTemplate().expression;
node.arguments[0] = argTemplate({MODULE: args[0]}).expression;
asset.isAstDirty = true;
return;
}
const isRegisterServiceWorker =
types.isStringLiteral(args[0]) &&
matchesPattern(callee, serviceWorkerPattern);
if (isRegisterServiceWorker) {
// Treat service workers as an entry point so filenames remain consistent across builds.
// https://developers.google.com/web/fundamentals/primers/service-workers/lifecycle#avoid_changing_the_url_of_your_service_worker_script
addURLDependency(asset, args[0], {entry: true});
return;
}
},
NewExpression(node, asset) {
const {callee, arguments: args} = node;
const isWebWorker =
callee.type === 'Identifier' &&
callee.name === 'Worker' &&
args.length === 1 &&
types.isStringLiteral(args[0]);
if (isWebWorker) {
addURLDependency(asset, args[0]);
return;
}
}
};
function hasBinding(node, name) {
if (Array.isArray(node)) {
return node.some(ancestor => hasBinding(ancestor, name));
} else if (
types.isProgram(node) ||
types.isBlockStatement(node) ||
types.isBlock(node)
) {
return node.body.some(statement => hasBinding(statement, name));
} else if (
types.isFunctionDeclaration(node) ||
types.isFunctionExpression(node) ||
types.isArrowFunctionExpression(node)
) {
return (
(node.id !== null && node.id.name === name) ||
node.params.some(
param => types.isIdentifier(param) && param.name === name
)
);
} else if (types.isVariableDeclaration(node)) {
return node.declarations.some(declaration => declaration.id.name === name);
}
return false;
}
function isInFalsyBranch(ancestors) {
// Check if any ancestors are if statements
return ancestors.some((node, index) => {
if (types.isIfStatement(node)) {
let res = evaluateExpression(node.test);
if (res && res.confident) {
// If the test is truthy, exclude the dep if it is in the alternate branch.
// If the test if falsy, exclude the dep if it is in the consequent branch.
let child = ancestors[index + 1];
return res.value ? child === node.alternate : child === node.consequent;
}
}
});
}
function evaluateExpression(node) {
// Wrap the node in a standalone program so we can traverse it
node = types.file(types.program([types.expressionStatement(node)]));
// Find the first expression and evaluate it.
let res = null;
traverse(node, {
Expression(path) {
res = path.evaluate();
path.stop();
}
});
return res;
}
function addDependency(asset, node, opts = {}) {
if (asset.options.target !== 'browser') {
const isRelativeImport = /^[/~.]/.test(node.value);
if (!isRelativeImport) return;
}
opts.loc = node.loc && node.loc.start;
asset.addDependency(node.value, opts);
}
function addURLDependency(asset, node, opts = {}) {
opts.loc = node.loc && node.loc.start;
let assetPath = asset.addURLDependency(node.value, opts);
if (!isURL(assetPath)) {
assetPath = urlJoin(asset.options.publicURL, assetPath);
}
node.value = assetPath;
asset.isAstDirty = true;
}

View file

@ -0,0 +1,28 @@
const types = require('babel-types');
const matchesPattern = require('./matches-pattern');
module.exports = {
MemberExpression(node, asset) {
// Inline environment variables accessed on process.env
if (matchesPattern(node.object, 'process.env')) {
let key = types.toComputedKey(node);
if (types.isStringLiteral(key)) {
let val = types.valueToNode(process.env[key.value]);
morph(node, val);
asset.isAstDirty = true;
asset.cacheData.env[key.value] = process.env[key.value];
}
}
}
};
// replace object properties
function morph(object, newProperties) {
for (let key in object) {
delete object[key];
}
for (let key in newProperties) {
object[key] = newProperties[key];
}
}

201
VISUALIZACION/node_modules/parcel-bundler/src/visitors/fs.js generated vendored Executable file
View file

@ -0,0 +1,201 @@
const t = require('babel-types');
const Path = require('path');
const fs = require('fs');
const template = require('babel-template');
const logger = require('../Logger');
const bufferTemplate = template('Buffer(CONTENT, ENC)');
module.exports = {
AssignmentExpression(path) {
if (!isRequire(path.node.right, 'fs', 'readFileSync')) {
return;
}
for (let name in path.getBindingIdentifiers()) {
const binding = path.scope.getBinding(name);
if (!binding) continue;
binding.path.setData('__require', path.node);
}
},
CallExpression(path, asset) {
// See https://github.com/defunctzombie/node-browser-resolve#skip
let ignore =
asset.package &&
asset.package.browser &&
asset.package.browser.fs === false;
if (!ignore && referencesImport(path, 'fs', 'readFileSync')) {
let vars = {
__dirname: Path.dirname(asset.name),
__filename: asset.basename
};
let filename, args, res;
try {
[filename, ...args] = path
.get('arguments')
.map(arg => evaluate(arg, vars));
filename = Path.resolve(filename);
res = fs.readFileSync(filename, ...args);
} catch (err) {
if (err instanceof NodeNotEvaluatedError) {
// Warn using a code frame
err.fileName = asset.name;
asset.generateErrorMessage(err);
logger.warn(err);
return;
}
// Add location info so we log a code frame with the error
err.loc =
path.node.arguments.length > 0
? path.node.arguments[0].loc.start
: path.node.loc.start;
throw err;
}
let replacementNode;
if (Buffer.isBuffer(res)) {
replacementNode = bufferTemplate({
CONTENT: t.stringLiteral(res.toString('base64')),
ENC: t.stringLiteral('base64')
});
} else {
replacementNode = t.stringLiteral(res);
}
asset.addDependency(filename, {includedInParent: true});
path.replaceWith(replacementNode);
asset.isAstDirty = true;
}
}
};
function isRequire(node, name, method) {
// e.g. require('fs').readFileSync
if (t.isMemberExpression(node) && node.property.name === method) {
node = node.object;
}
if (!t.isCallExpression(node)) {
return false;
}
let {callee, arguments: args} = node;
let isRequire =
t.isIdentifier(callee) &&
callee.name === 'require' &&
args.length === 1 &&
t.isStringLiteral(args[0]);
if (!isRequire) {
return false;
}
if (name && args[0].value !== name) {
return false;
}
return true;
}
function referencesImport(path, name, method) {
let callee = path.node.callee;
let bindingPath;
// e.g. readFileSync()
if (t.isIdentifier(callee)) {
bindingPath = getBindingPath(path, callee.name);
} else if (t.isMemberExpression(callee)) {
if (callee.property.name !== method) {
return false;
}
// e.g. fs.readFileSync()
if (t.isIdentifier(callee.object)) {
bindingPath = getBindingPath(path, callee.object.name);
// require('fs').readFileSync()
} else if (isRequire(callee.object, name)) {
return true;
}
} else {
return false;
}
if (!bindingPath) {
return;
}
let bindingNode = bindingPath.getData('__require') || bindingPath.node;
let parent = bindingPath.parentPath;
// e.g. import fs from 'fs';
if (parent.isImportDeclaration()) {
if (
bindingPath.isImportSpecifier() &&
bindingPath.node.imported.name !== method
) {
return false;
}
return parent.node.source.value === name;
// e.g. var fs = require('fs');
} else if (
t.isVariableDeclarator(bindingNode) ||
t.isAssignmentExpression(bindingNode)
) {
let left = bindingNode.id || bindingNode.left;
let right = bindingNode.init || bindingNode.right;
// e.g. var {readFileSync} = require('fs');
if (t.isObjectPattern(left)) {
let prop = left.properties.find(p => p.value.name === callee.name);
if (!prop || prop.key.name !== method) {
return false;
}
} else if (!t.isIdentifier(left)) {
return false;
}
return isRequire(right, name, method);
}
return false;
}
function getBindingPath(path, name) {
let binding = path.scope.getBinding(name);
return binding && binding.path;
}
function NodeNotEvaluatedError(node) {
this.message = 'Cannot statically evaluate fs argument';
this.node = node;
this.loc = node.loc.start;
}
function evaluate(path, vars) {
// Inline variables
path.traverse({
Identifier: function(ident) {
let key = ident.node.name;
if (key in vars) {
ident.replaceWith(t.valueToNode(vars[key]));
}
}
});
let res = path.evaluate();
if (!res.confident) {
throw new NodeNotEvaluatedError(path.node);
}
return res.value;
}

View file

@ -0,0 +1,56 @@
const Path = require('path');
const types = require('babel-types');
const VARS = {
process: asset => {
asset.addDependency('process');
return 'var process = require("process");';
},
global: () => 'var global = arguments[3];',
__dirname: asset =>
`var __dirname = ${JSON.stringify(Path.dirname(asset.name))};`,
__filename: asset => `var __filename = ${JSON.stringify(asset.name)};`,
Buffer: asset => {
asset.addDependency('buffer');
return 'var Buffer = require("buffer").Buffer;';
},
// Prevent AMD defines from working when loading UMD bundles.
// Ideally the CommonJS check would come before the AMD check, but many
// existing modules do the checks the opposite way leading to modules
// not exporting anything to Parcel.
define: () => 'var define;'
};
module.exports = {
Identifier(node, asset, ancestors) {
let parent = ancestors[ancestors.length - 2];
if (
VARS.hasOwnProperty(node.name) &&
!asset.globals.has(node.name) &&
types.isReferenced(node, parent)
) {
asset.globals.set(node.name, VARS[node.name](asset));
}
},
Declaration(node, asset, ancestors) {
// If there is a global declaration of one of the variables, remove our declaration
let identifiers = types.getBindingIdentifiers(node);
for (let id in identifiers) {
if (VARS.hasOwnProperty(id) && !inScope(ancestors)) {
// Don't delete entirely, so we don't add it again when the declaration is referenced
asset.globals.set(id, '');
}
}
}
};
function inScope(ancestors) {
for (let i = ancestors.length - 2; i >= 0; i--) {
if (types.isScope(ancestors[i]) && !types.isProgram(ancestors[i])) {
return true;
}
}
return false;
}

View file

@ -0,0 +1,36 @@
const types = require('babel-types');
// from babel-types. remove when we upgrade to babel 7.
// https://github.com/babel/babel/blob/0189b387026c35472dccf45d14d58312d249f799/packages/babel-types/src/index.js#L347
module.exports = function matchesPattern(member, match, allowPartial) {
// not a member expression
if (!types.isMemberExpression(member)) return false;
const parts = Array.isArray(match) ? match : match.split('.');
const nodes = [];
let node;
for (node = member; types.isMemberExpression(node); node = node.object) {
nodes.push(node.property);
}
nodes.push(node);
if (nodes.length < parts.length) return false;
if (!allowPartial && nodes.length > parts.length) return false;
for (let i = 0, j = nodes.length - 1; i < parts.length; i++, j--) {
const node = nodes[j];
let value;
if (types.isIdentifier(node)) {
value = node.name;
} else if (types.isStringLiteral(node)) {
value = node.value;
} else {
return false;
}
if (parts[i] !== value) return false;
}
return true;
};

39
VISUALIZACION/node_modules/parcel-bundler/src/worker.js generated vendored Executable file
View file

@ -0,0 +1,39 @@
require('v8-compile-cache');
const Pipeline = require('./Pipeline');
const child = require('./workerfarm/child');
const WorkerFarm = require('./workerfarm/WorkerFarm');
let pipeline;
function init(options, isLocal = false) {
pipeline = new Pipeline(options || {});
Object.assign(process.env, options.env || {});
process.env.HMR_PORT = options.hmrPort;
process.env.HMR_HOSTNAME = options.hmrHostname;
if (isLocal) {
process.env.WORKER_TYPE = 'parcel-worker';
}
}
async function run(path, pkg, options, isWarmUp) {
try {
options.isWarmUp = isWarmUp;
return await pipeline.process(path, pkg, options);
} catch (e) {
e.fileName = path;
throw e;
}
}
// request.location is a module path relative to src or lib
async function addCall(request, awaitResponse = true) {
if (process.send && process.env.WORKER_TYPE === 'parcel-worker') {
return child.addCall(request, awaitResponse);
} else {
return WorkerFarm.getShared().processRequest(request);
}
}
exports.init = init;
exports.run = run;
exports.addCall = addCall;

View file

@ -0,0 +1,141 @@
const childProcess = require('child_process');
const {EventEmitter} = require('events');
const errorUtils = require('./errorUtils');
const childModule =
parseInt(process.versions.node, 10) < 8
? require.resolve('../../lib/workerfarm/child')
: require.resolve('../../src/workerfarm/child');
let WORKER_ID = 0;
class Worker extends EventEmitter {
constructor(forkModule, options) {
super();
this.options = options;
this.id = WORKER_ID++;
this.sendQueue = [];
this.processQueue = true;
this.calls = new Map();
this.exitCode = null;
this.callId = 0;
this.stopped = false;
this.fork(forkModule);
}
fork(forkModule) {
let filteredArgs = process.execArgv.filter(
v => !/^--(debug|inspect)/.test(v)
);
let options = {
execArgv: filteredArgs,
env: process.env,
cwd: process.cwd()
};
this.child = childProcess.fork(childModule, process.argv, options);
this.send({
type: 'module',
module: forkModule,
child: this.id
});
this.child.on('message', this.receive.bind(this));
this.child.once('exit', code => {
this.exitCode = code;
this.emit('exit', code);
});
this.child.on('error', err => {
this.emit('error', err);
});
}
send(data) {
if (!this.processQueue) {
return this.sendQueue.push(data);
}
let result = this.child.send(data, error => {
if (error && error instanceof Error) {
// Ignore this, the workerfarm handles child errors
return;
}
this.processQueue = true;
if (this.sendQueue.length > 0) {
let queueCopy = this.sendQueue.slice(0);
this.sendQueue = [];
queueCopy.forEach(entry => this.send(entry));
}
});
if (!result || /^win/.test(process.platform)) {
// Queue is handling too much messages throttle it
this.processQueue = false;
}
}
call(call) {
let idx = this.callId++;
this.calls.set(idx, call);
this.send({
type: 'request',
idx: idx,
child: this.id,
method: call.method,
args: call.args
});
}
receive(data) {
if (this.stopped) {
return;
}
let idx = data.idx;
let type = data.type;
let content = data.content;
let contentType = data.contentType;
if (type === 'request') {
this.emit('request', data);
} else if (type === 'response') {
let call = this.calls.get(idx);
if (!call) {
// Return for unknown calls, these might accur if a third party process uses workers
return;
}
if (contentType === 'error') {
call.reject(errorUtils.jsonToError(content));
} else {
call.resolve(content);
}
this.calls.delete(idx);
this.emit('response', data);
}
}
stop() {
this.stopped = true;
this.send('die');
setTimeout(() => {
if (this.exitCode === null) {
this.child.kill('SIGKILL');
}
}, this.options.forcedKillTime);
}
}
module.exports = Worker;

View file

@ -0,0 +1,281 @@
const {EventEmitter} = require('events');
const os = require('os');
const errorUtils = require('./errorUtils');
const Worker = require('./Worker');
let shared = null;
class WorkerFarm extends EventEmitter {
constructor(options, farmOptions = {}) {
super();
this.options = Object.assign(
{
maxConcurrentWorkers: WorkerFarm.getNumWorkers(),
maxConcurrentCallsPerWorker: 10,
forcedKillTime: 100,
warmWorkers: true,
useLocalWorker: true,
workerPath: '../worker'
},
farmOptions
);
this.started = false;
this.warmWorkers = 0;
this.children = new Map();
this.callQueue = [];
this.localWorker = require(this.options.workerPath);
this.run = this.mkhandle('run');
this.init(options);
}
warmupWorker(method, args) {
// Workers have started, but are not warmed up yet.
// Send the job to a remote worker in the background,
// but use the result from the local worker - it will be faster.
if (this.started) {
let promise = this.addCall(method, [...args, true]);
if (promise) {
promise
.then(() => {
this.warmWorkers++;
if (this.warmWorkers >= this.children.size) {
this.emit('warmedup');
}
})
.catch(() => {});
}
}
}
mkhandle(method) {
return function(...args) {
// Child process workers are slow to start (~600ms).
// While we're waiting, just run on the main thread.
// This significantly speeds up startup time.
if (this.shouldUseRemoteWorkers()) {
return this.addCall(method, [...args, false]);
} else {
if (this.options.warmWorkers) {
this.warmupWorker(method, args);
}
return this.localWorker[method](...args, false);
}
}.bind(this);
}
onError(error, childId) {
// Handle ipc errors
if (error.code === 'ERR_IPC_CHANNEL_CLOSED') {
return this.stopChild(childId);
}
}
onExit(childId) {
// delay this to give any sends a chance to finish
setTimeout(() => {
let doQueue = false;
let child = this.children.get(childId);
if (child && child.calls.size) {
for (let call of child.calls.values()) {
call.retries++;
this.callQueue.unshift(call);
doQueue = true;
}
}
this.stopChild(childId);
if (doQueue) {
this.processQueue();
}
}, 10);
}
startChild() {
let worker = new Worker(this.options.workerPath, this.options);
worker.on('request', data => {
this.processRequest(data, worker);
});
worker.on('response', () => {
// allow any outstanding calls to be processed
this.processQueue();
});
worker.once('exit', () => {
this.onExit(worker.id);
});
worker.on('error', err => {
this.onError(err, worker.id);
});
this.children.set(worker.id, worker);
}
stopChild(childId) {
let child = this.children.get(childId);
if (child) {
child.stop();
this.children.delete(childId);
}
}
async processQueue() {
if (this.ending || !this.callQueue.length) return;
if (this.children.size < this.options.maxConcurrentWorkers) {
this.startChild();
}
for (let child of this.children.values()) {
if (!this.callQueue.length) {
break;
}
if (child.calls.size < this.options.maxConcurrentCallsPerWorker) {
child.call(this.callQueue.shift());
}
}
}
async processRequest(data, child = false) {
let result = {
idx: data.idx,
type: 'response'
};
let method = data.method;
let args = data.args;
let location = data.location;
let awaitResponse = data.awaitResponse;
if (!location) {
throw new Error('Unknown request');
}
const mod = require(location);
try {
let func;
if (method) {
func = mod[method];
} else {
func = mod;
}
result.contentType = 'data';
result.content = await func(...args);
} catch (e) {
result.contentType = 'error';
result.content = errorUtils.errorToJson(e);
}
if (awaitResponse) {
if (child) {
child.send(result);
} else {
return result;
}
}
}
addCall(method, args) {
if (this.ending) return; // don't add anything new to the queue
return new Promise((resolve, reject) => {
this.callQueue.push({
method,
args: args,
retries: 0,
resolve,
reject
});
this.processQueue();
});
}
async end() {
this.ending = true;
for (let childId of this.children.keys()) {
this.stopChild(childId);
}
this.ending = false;
shared = null;
}
init(options) {
this.localWorker.init(options, true);
this.initRemoteWorkers(options);
}
async initRemoteWorkers(options) {
this.started = false;
this.warmWorkers = 0;
// Start workers if there isn't enough workers already
for (
let i = this.children.size;
i < this.options.maxConcurrentWorkers;
i++
) {
this.startChild();
}
// Reliable way of initialising workers
let promises = [];
for (let child of this.children.values()) {
promises.push(
new Promise((resolve, reject) => {
child.call({
method: 'init',
args: [options],
retries: 0,
resolve,
reject
});
})
);
}
await Promise.all(promises);
if (this.options.maxConcurrentWorkers > 0) {
this.started = true;
this.emit('started');
}
}
shouldUseRemoteWorkers() {
return (
!this.options.useLocalWorker ||
(this.started &&
(this.warmWorkers >= this.children.size || !this.options.warmWorkers))
);
}
static getShared(options) {
if (!shared) {
shared = new WorkerFarm(options);
} else if (options) {
shared.init(options);
}
return shared;
}
static getNumWorkers() {
if (process.env.PARCEL_WORKERS) {
return parseInt(process.env.PARCEL_WORKERS, 10);
}
let cores;
try {
cores = require('physical-cpu-count');
} catch (err) {
cores = os.cpus().length;
}
return cores || 1;
}
}
module.exports = WorkerFarm;

View file

@ -0,0 +1,140 @@
const errorUtils = require('./errorUtils');
class Child {
constructor() {
this.module = undefined;
this.childId = undefined;
this.callQueue = [];
this.responseQueue = new Map();
this.responseId = 0;
this.maxConcurrentCalls = 10;
}
messageListener(data) {
if (data === 'die') {
return this.end();
}
if (data.type === 'module' && data.module && !this.module) {
this.module = require(data.module);
this.childId = data.child;
if (this.module.setChildReference) {
this.module.setChildReference(this);
}
return;
}
let type = data.type;
if (type === 'response') {
return this.handleResponse(data);
} else if (type === 'request') {
return this.handleRequest(data);
}
}
async send(data) {
process.send(data, err => {
if (err && err instanceof Error) {
if (err.code === 'ERR_IPC_CHANNEL_CLOSED') {
// IPC connection closed
// no need to keep the worker running if it can't send or receive data
return this.end();
}
}
});
}
async handleRequest(data) {
let idx = data.idx;
let child = data.child;
let method = data.method;
let args = data.args;
let result = {idx, child, type: 'response'};
try {
result.contentType = 'data';
result.content = await this.module[method](...args);
} catch (e) {
result.contentType = 'error';
result.content = errorUtils.errorToJson(e);
}
this.send(result);
}
async handleResponse(data) {
let idx = data.idx;
let contentType = data.contentType;
let content = data.content;
let call = this.responseQueue.get(idx);
if (contentType === 'error') {
call.reject(errorUtils.jsonToError(content));
} else {
call.resolve(content);
}
this.responseQueue.delete(idx);
// Process the next call
this.processQueue();
}
// Keep in mind to make sure responses to these calls are JSON.Stringify safe
async addCall(request, awaitResponse = true) {
let call = request;
call.type = 'request';
call.child = this.childId;
call.awaitResponse = awaitResponse;
let promise;
if (awaitResponse) {
promise = new Promise((resolve, reject) => {
call.resolve = resolve;
call.reject = reject;
});
}
this.callQueue.push(call);
this.processQueue();
return promise;
}
async sendRequest(call) {
let idx;
if (call.awaitResponse) {
idx = this.responseId++;
this.responseQueue.set(idx, call);
}
this.send({
idx: idx,
child: call.child,
type: call.type,
location: call.location,
method: call.method,
args: call.args,
awaitResponse: call.awaitResponse
});
}
async processQueue() {
if (!this.callQueue.length) {
return;
}
if (this.responseQueue.size < this.maxConcurrentCalls) {
this.sendRequest(this.callQueue.shift());
}
}
end() {
return process.exit(0);
}
}
let child = new Child();
process.on('message', child.messageListener.bind(child));
module.exports = child;

View file

@ -0,0 +1,23 @@
function errorToJson(error) {
let jsonError = {
message: error.message,
stack: error.stack,
name: error.name
};
// Add all custom codeFrame properties
Object.keys(error).forEach(key => {
jsonError[key] = error[key];
});
return jsonError;
}
function jsonToError(json) {
let error = new Error(json.message);
Object.keys(json).forEach(key => {
error[key] = json[key];
});
return error;
}
exports.errorToJson = errorToJson;
exports.jsonToError = jsonToError;