flow like the river
This commit is contained in:
commit
013fe673f3
42435 changed files with 5764238 additions and 0 deletions
109
BACK_BACK/node_modules/parcel-bundler/lib/utils/PromiseQueue.js
generated
vendored
Executable file
109
BACK_BACK/node_modules/parcel-bundler/lib/utils/PromiseQueue.js
generated
vendored
Executable file
|
|
@ -0,0 +1,109 @@
|
|||
"use strict";
|
||||
|
||||
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
||||
|
||||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
||||
|
||||
class PromiseQueue {
|
||||
constructor(callback, options = {}) {
|
||||
this.process = callback;
|
||||
this.maxConcurrent = options.maxConcurrent || Infinity;
|
||||
this.retry = options.retry !== false;
|
||||
this.queue = [];
|
||||
this.processing = new Set();
|
||||
this.processed = new Set();
|
||||
this.numRunning = 0;
|
||||
this.runPromise = null;
|
||||
this.resolve = null;
|
||||
this.reject = null;
|
||||
}
|
||||
|
||||
add(job, ...args) {
|
||||
if (this.processing.has(job)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.runPromise && this.numRunning < this.maxConcurrent) {
|
||||
this._runJob(job, args);
|
||||
} else {
|
||||
this.queue.push([job, args]);
|
||||
}
|
||||
|
||||
this.processing.add(job);
|
||||
}
|
||||
|
||||
run() {
|
||||
if (this.runPromise) {
|
||||
return this.runPromise;
|
||||
}
|
||||
|
||||
const runPromise = new Promise((resolve, reject) => {
|
||||
this.resolve = resolve;
|
||||
this.reject = reject;
|
||||
});
|
||||
this.runPromise = runPromise;
|
||||
|
||||
this._next();
|
||||
|
||||
return runPromise;
|
||||
}
|
||||
|
||||
_runJob(job, args) {
|
||||
var _this = this;
|
||||
|
||||
return (0, _asyncToGenerator2.default)(function* () {
|
||||
try {
|
||||
_this.numRunning++;
|
||||
yield _this.process(job, ...args);
|
||||
|
||||
_this.processing.delete(job);
|
||||
|
||||
_this.processed.add(job);
|
||||
|
||||
_this.numRunning--;
|
||||
|
||||
_this._next();
|
||||
} catch (err) {
|
||||
_this.numRunning--;
|
||||
|
||||
if (_this.retry) {
|
||||
_this.queue.push([job, args]);
|
||||
} else {
|
||||
_this.processing.delete(job);
|
||||
}
|
||||
|
||||
if (_this.reject) {
|
||||
_this.reject(err);
|
||||
}
|
||||
|
||||
_this._reset();
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
_next() {
|
||||
if (!this.runPromise) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.queue.length > 0) {
|
||||
while (this.queue.length > 0 && this.numRunning < this.maxConcurrent) {
|
||||
this._runJob(...this.queue.shift());
|
||||
}
|
||||
} else if (this.processing.size === 0) {
|
||||
this.resolve(this.processed);
|
||||
|
||||
this._reset();
|
||||
}
|
||||
}
|
||||
|
||||
_reset() {
|
||||
this.processed = new Set();
|
||||
this.runPromise = null;
|
||||
this.resolve = null;
|
||||
this.reject = null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = PromiseQueue;
|
||||
127
BACK_BACK/node_modules/parcel-bundler/lib/utils/bundleReport.js
generated
vendored
Executable file
127
BACK_BACK/node_modules/parcel-bundler/lib/utils/bundleReport.js
generated
vendored
Executable file
|
|
@ -0,0 +1,127 @@
|
|||
"use strict";
|
||||
|
||||
function _createForOfIteratorHelper(o, allowArrayLike) { var it; if (typeof Symbol === "undefined" || o[Symbol.iterator] == null) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = o[Symbol.iterator](); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; }
|
||||
|
||||
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
|
||||
|
||||
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i]; return arr2; }
|
||||
|
||||
const path = require('path');
|
||||
|
||||
const prettifyTime = require('./prettifyTime');
|
||||
|
||||
const logger = require('@parcel/logger');
|
||||
|
||||
const filesize = require('filesize');
|
||||
|
||||
const LARGE_BUNDLE_SIZE = 1024 * 1024;
|
||||
const DEFAULT_NUM_LARGE_ASSETS = 10;
|
||||
const COLUMNS = [{
|
||||
align: 'left'
|
||||
}, // name
|
||||
{
|
||||
align: 'right'
|
||||
}, // size
|
||||
{
|
||||
align: 'right'
|
||||
} // time
|
||||
];
|
||||
|
||||
function bundleReport(mainBundle, detailed = false) {
|
||||
// Get a list of bundles sorted by size
|
||||
let bundles = Array.from(iterateBundles(mainBundle)).sort((a, b) => b.totalSize - a.totalSize);
|
||||
let rows = [];
|
||||
|
||||
var _iterator = _createForOfIteratorHelper(bundles),
|
||||
_step;
|
||||
|
||||
try {
|
||||
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||||
let bundle = _step.value;
|
||||
// Add a row for the bundle
|
||||
rows.push([formatFilename(bundle.name, logger.chalk.cyan.bold), logger.chalk.bold(prettifySize(bundle.totalSize, bundle.totalSize > LARGE_BUNDLE_SIZE)), logger.chalk.green.bold(prettifyTime(bundle.bundleTime))]); // If detailed, generate a list of the largest assets in the bundle
|
||||
|
||||
if (detailed && bundle.assets.size > 1) {
|
||||
let assets = Array.from(bundle.assets).filter(a => a.type === bundle.type).sort((a, b) => b.bundledSize - a.bundledSize);
|
||||
|
||||
let largestAssets = (() => {
|
||||
if (detailed === 'all') {
|
||||
return assets;
|
||||
}
|
||||
|
||||
return assets.slice(0, isNaN(detailed) || typeof detailed === 'boolean' ? DEFAULT_NUM_LARGE_ASSETS : parseInt(detailed, 10));
|
||||
})();
|
||||
|
||||
var _iterator2 = _createForOfIteratorHelper(largestAssets),
|
||||
_step2;
|
||||
|
||||
try {
|
||||
for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) {
|
||||
let asset = _step2.value;
|
||||
// Add a row for the asset.
|
||||
rows.push([(asset == assets[assets.length - 1] ? '└── ' : '├── ') + formatFilename(asset.name, logger.chalk.reset), logger.chalk.dim(prettifySize(asset.bundledSize)), logger.chalk.dim(logger.chalk.green(prettifyTime(asset.buildTime)))]);
|
||||
} // Show how many more assets there are
|
||||
|
||||
} catch (err) {
|
||||
_iterator2.e(err);
|
||||
} finally {
|
||||
_iterator2.f();
|
||||
}
|
||||
|
||||
if (assets.length > largestAssets.length) {
|
||||
rows.push(['└── ' + logger.chalk.dim(`+ ${assets.length - largestAssets.length} more assets`)]);
|
||||
} // If this isn't the last bundle, add an empty row before the next one
|
||||
|
||||
|
||||
if (bundle !== bundles[bundles.length - 1]) {
|
||||
rows.push([]);
|
||||
}
|
||||
}
|
||||
} // Render table
|
||||
|
||||
} catch (err) {
|
||||
_iterator.e(err);
|
||||
} finally {
|
||||
_iterator.f();
|
||||
}
|
||||
|
||||
logger.log('');
|
||||
logger.table(COLUMNS, rows);
|
||||
}
|
||||
|
||||
module.exports = bundleReport;
|
||||
|
||||
function* iterateBundles(bundle) {
|
||||
if (!bundle.isEmpty) {
|
||||
yield bundle;
|
||||
}
|
||||
|
||||
var _iterator3 = _createForOfIteratorHelper(bundle.childBundles),
|
||||
_step3;
|
||||
|
||||
try {
|
||||
for (_iterator3.s(); !(_step3 = _iterator3.n()).done;) {
|
||||
let child = _step3.value;
|
||||
yield* iterateBundles(child);
|
||||
}
|
||||
} catch (err) {
|
||||
_iterator3.e(err);
|
||||
} finally {
|
||||
_iterator3.f();
|
||||
}
|
||||
}
|
||||
|
||||
function prettifySize(size, isLarge) {
|
||||
let res = filesize(size);
|
||||
|
||||
if (isLarge) {
|
||||
return logger.chalk.yellow(logger.emoji.warning + ' ' + res);
|
||||
}
|
||||
|
||||
return logger.chalk.magenta(res);
|
||||
}
|
||||
|
||||
function formatFilename(filename, color = logger.chalk.reset) {
|
||||
let dir = path.relative(process.cwd(), path.dirname(filename));
|
||||
return logger.chalk.dim(dir + (dir ? path.sep : '')) + color(path.basename(filename));
|
||||
}
|
||||
97
BACK_BACK/node_modules/parcel-bundler/lib/utils/config.js
generated
vendored
Executable file
97
BACK_BACK/node_modules/parcel-bundler/lib/utils/config.js
generated
vendored
Executable file
|
|
@ -0,0 +1,97 @@
|
|||
"use strict";
|
||||
|
||||
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
||||
|
||||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
||||
|
||||
function _createForOfIteratorHelper(o, allowArrayLike) { var it; if (typeof Symbol === "undefined" || o[Symbol.iterator] == null) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = o[Symbol.iterator](); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; }
|
||||
|
||||
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
|
||||
|
||||
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i]; return arr2; }
|
||||
|
||||
const fs = require('@parcel/fs');
|
||||
|
||||
const path = require('path');
|
||||
|
||||
const clone = require('clone');
|
||||
|
||||
const PARSERS = {
|
||||
json: require('json5').parse,
|
||||
toml: require('@iarna/toml').parse
|
||||
};
|
||||
const existsCache = new Map();
|
||||
|
||||
function resolve(_x, _x2) {
|
||||
return _resolve.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _resolve() {
|
||||
_resolve = (0, _asyncToGenerator2.default)(function* (filepath, filenames, root = path.parse(filepath).root) {
|
||||
filepath = path.dirname(filepath); // Don't traverse above the module root
|
||||
|
||||
if (filepath === root || path.basename(filepath) === 'node_modules') {
|
||||
return null;
|
||||
}
|
||||
|
||||
var _iterator = _createForOfIteratorHelper(filenames),
|
||||
_step;
|
||||
|
||||
try {
|
||||
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||||
const filename = _step.value;
|
||||
let file = path.join(filepath, filename);
|
||||
let exists = existsCache.has(file) ? existsCache.get(file) : yield fs.exists(file);
|
||||
|
||||
if (exists) {
|
||||
existsCache.set(file, true);
|
||||
return file;
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
_iterator.e(err);
|
||||
} finally {
|
||||
_iterator.f();
|
||||
}
|
||||
|
||||
return resolve(filepath, filenames, root);
|
||||
});
|
||||
return _resolve.apply(this, arguments);
|
||||
}
|
||||
|
||||
function load(_x3, _x4) {
|
||||
return _load.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _load() {
|
||||
_load = (0, _asyncToGenerator2.default)(function* (filepath, filenames, root = path.parse(filepath).root) {
|
||||
let configFile = yield resolve(filepath, filenames, root);
|
||||
|
||||
if (configFile) {
|
||||
try {
|
||||
let extname = path.extname(configFile).slice(1);
|
||||
|
||||
if (extname === 'js') {
|
||||
return clone(require(configFile));
|
||||
}
|
||||
|
||||
let configContent = (yield fs.readFile(configFile)).toString();
|
||||
let parse = PARSERS[extname] || PARSERS.json;
|
||||
return configContent ? parse(configContent) : null;
|
||||
} catch (err) {
|
||||
if (err.code === 'MODULE_NOT_FOUND' || err.code === 'ENOENT') {
|
||||
existsCache.delete(configFile);
|
||||
return null;
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
});
|
||||
return _load.apply(this, arguments);
|
||||
}
|
||||
|
||||
exports.resolve = resolve;
|
||||
exports.load = load;
|
||||
18
BACK_BACK/node_modules/parcel-bundler/lib/utils/customErrors.js
generated
vendored
Executable file
18
BACK_BACK/node_modules/parcel-bundler/lib/utils/customErrors.js
generated
vendored
Executable file
|
|
@ -0,0 +1,18 @@
|
|||
"use strict";
|
||||
|
||||
const serverErrorList = {
|
||||
EACCES: "You don't have access to bind the server to port {port}.",
|
||||
EADDRINUSE: 'There is already a process listening on port {port}.'
|
||||
};
|
||||
|
||||
function serverErrors(err, port) {
|
||||
let desc = `Error: ${err.code} occurred while setting up server on port ${port}.`;
|
||||
|
||||
if (serverErrorList[err.code]) {
|
||||
desc = serverErrorList[err.code].replace(/{port}/g, port);
|
||||
}
|
||||
|
||||
return desc;
|
||||
}
|
||||
|
||||
module.exports.serverErrors = serverErrors;
|
||||
44
BACK_BACK/node_modules/parcel-bundler/lib/utils/env.js
generated
vendored
Executable file
44
BACK_BACK/node_modules/parcel-bundler/lib/utils/env.js
generated
vendored
Executable file
|
|
@ -0,0 +1,44 @@
|
|||
"use strict";
|
||||
|
||||
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
||||
|
||||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
||||
|
||||
const config = require('./config');
|
||||
|
||||
const dotenv = require('dotenv');
|
||||
|
||||
const variableExpansion = require('dotenv-expand');
|
||||
|
||||
function loadEnv(_x) {
|
||||
return _loadEnv.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _loadEnv() {
|
||||
_loadEnv = (0, _asyncToGenerator2.default)(function* (filepath) {
|
||||
const NODE_ENV = process.env.NODE_ENV || 'development';
|
||||
const dotenvFiles = [`.env.${NODE_ENV}.local`, `.env.${NODE_ENV}`, // Don't include `.env.local` for `test` environment
|
||||
// since normally you expect tests to produce the same
|
||||
// results for everyone
|
||||
NODE_ENV !== 'test' && '.env.local', '.env'].filter(Boolean);
|
||||
yield Promise.all(dotenvFiles.map( /*#__PURE__*/function () {
|
||||
var _ref = (0, _asyncToGenerator2.default)(function* (dotenvFile) {
|
||||
const envPath = yield config.resolve(filepath, [dotenvFile]);
|
||||
|
||||
if (envPath) {
|
||||
const envs = dotenv.config({
|
||||
path: envPath
|
||||
});
|
||||
variableExpansion(envs);
|
||||
}
|
||||
});
|
||||
|
||||
return function (_x2) {
|
||||
return _ref.apply(this, arguments);
|
||||
};
|
||||
}()));
|
||||
});
|
||||
return _loadEnv.apply(this, arguments);
|
||||
}
|
||||
|
||||
module.exports = loadEnv;
|
||||
115
BACK_BACK/node_modules/parcel-bundler/lib/utils/generateCertificate.js
generated
vendored
Executable file
115
BACK_BACK/node_modules/parcel-bundler/lib/utils/generateCertificate.js
generated
vendored
Executable file
|
|
@ -0,0 +1,115 @@
|
|||
"use strict";
|
||||
|
||||
const forge = require('node-forge');
|
||||
|
||||
const fs = require('fs');
|
||||
|
||||
const mkdirp = require('mkdirp');
|
||||
|
||||
const path = require('path');
|
||||
|
||||
const logger = require('@parcel/logger');
|
||||
|
||||
function generateCertificate(options = {}) {
|
||||
const privateKeyPath = path.join(options.cacheDir, 'private.pem');
|
||||
const certPath = path.join(options.cacheDir, 'primary.crt');
|
||||
|
||||
if (options.cache) {
|
||||
const cachedKey = fs.existsSync(privateKeyPath) && fs.readFileSync(privateKeyPath);
|
||||
const cachedCert = fs.existsSync(certPath) && fs.readFileSync(certPath);
|
||||
|
||||
if (cachedKey && cachedCert) {
|
||||
return {
|
||||
key: cachedKey,
|
||||
cert: cachedCert
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
logger.progress('Generating SSL Certificate...');
|
||||
const pki = forge.pki;
|
||||
const keys = pki.rsa.generateKeyPair(2048);
|
||||
const cert = pki.createCertificate();
|
||||
cert.publicKey = keys.publicKey;
|
||||
cert.serialNumber = Date.now().toString();
|
||||
cert.validity.notBefore = new Date();
|
||||
cert.validity.notAfter = new Date();
|
||||
cert.validity.notAfter.setFullYear(cert.validity.notBefore.getFullYear() + 1);
|
||||
const attrs = [{
|
||||
name: 'commonName',
|
||||
value: 'parceljs.org'
|
||||
}, {
|
||||
name: 'countryName',
|
||||
value: 'US'
|
||||
}, {
|
||||
shortName: 'ST',
|
||||
value: 'Virginia'
|
||||
}, {
|
||||
name: 'localityName',
|
||||
value: 'Blacksburg'
|
||||
}, {
|
||||
name: 'organizationName',
|
||||
value: 'parcelBundler'
|
||||
}, {
|
||||
shortName: 'OU',
|
||||
value: 'Test'
|
||||
}];
|
||||
cert.setSubject(attrs);
|
||||
cert.setIssuer(attrs);
|
||||
cert.setExtensions([{
|
||||
name: 'basicConstraints',
|
||||
cA: true
|
||||
}, {
|
||||
name: 'keyUsage',
|
||||
keyCertSign: true,
|
||||
digitalSignature: true,
|
||||
nonRepudiation: true,
|
||||
keyEncipherment: true,
|
||||
dataEncipherment: true
|
||||
}, {
|
||||
name: 'extKeyUsage',
|
||||
serverAuth: true,
|
||||
clientAuth: true,
|
||||
codeSigning: true,
|
||||
emailProtection: true,
|
||||
timeStamping: true
|
||||
}, {
|
||||
name: 'nsCertType',
|
||||
client: true,
|
||||
server: true,
|
||||
email: true,
|
||||
objsign: true,
|
||||
sslCA: true,
|
||||
emailCA: true,
|
||||
objCA: true
|
||||
}, {
|
||||
name: 'subjectAltName',
|
||||
altNames: [{
|
||||
type: 6,
|
||||
// URI
|
||||
value: 'http://example.org/webid#me'
|
||||
}, {
|
||||
type: 7,
|
||||
// IP
|
||||
ip: '127.0.0.1'
|
||||
}]
|
||||
}, {
|
||||
name: 'subjectKeyIdentifier'
|
||||
}]);
|
||||
cert.sign(keys.privateKey, forge.md.sha256.create());
|
||||
const privPem = pki.privateKeyToPem(keys.privateKey);
|
||||
const certPem = pki.certificateToPem(cert);
|
||||
|
||||
if (options.cache) {
|
||||
mkdirp.sync(options.cacheDir);
|
||||
fs.writeFileSync(privateKeyPath, privPem);
|
||||
fs.writeFileSync(certPath, certPem);
|
||||
}
|
||||
|
||||
return {
|
||||
key: privPem,
|
||||
cert: certPem
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = generateCertificate;
|
||||
29
BACK_BACK/node_modules/parcel-bundler/lib/utils/getCertificate.js
generated
vendored
Executable file
29
BACK_BACK/node_modules/parcel-bundler/lib/utils/getCertificate.js
generated
vendored
Executable file
|
|
@ -0,0 +1,29 @@
|
|||
"use strict";
|
||||
|
||||
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
||||
|
||||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
||||
|
||||
const fs = require('@parcel/fs');
|
||||
|
||||
function getCertificate(_x) {
|
||||
return _getCertificate.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _getCertificate() {
|
||||
_getCertificate = (0, _asyncToGenerator2.default)(function* (options) {
|
||||
try {
|
||||
let cert = yield fs.readFile(options.cert);
|
||||
let key = yield fs.readFile(options.key);
|
||||
return {
|
||||
key,
|
||||
cert
|
||||
};
|
||||
} catch (err) {
|
||||
throw new Error('Certificate and/or key not found');
|
||||
}
|
||||
});
|
||||
return _getCertificate.apply(this, arguments);
|
||||
}
|
||||
|
||||
module.exports = getCertificate;
|
||||
16
BACK_BACK/node_modules/parcel-bundler/lib/utils/getExisting.js
generated
vendored
Executable file
16
BACK_BACK/node_modules/parcel-bundler/lib/utils/getExisting.js
generated
vendored
Executable file
|
|
@ -0,0 +1,16 @@
|
|||
"use strict";
|
||||
|
||||
const fs = require('fs');
|
||||
/**
|
||||
* Creates an object that contains both source and minified (using the source as a fallback).
|
||||
* e.g. builtins.min.js and builtins.js.
|
||||
*/
|
||||
|
||||
|
||||
module.exports = (minifiedPath, sourcePath) => {
|
||||
let source = fs.readFileSync(sourcePath, 'utf8').trim();
|
||||
return {
|
||||
source,
|
||||
minified: fs.existsSync(minifiedPath) ? fs.readFileSync(minifiedPath, 'utf8').trim().replace(/;$/, '') : source
|
||||
};
|
||||
};
|
||||
14
BACK_BACK/node_modules/parcel-bundler/lib/utils/getModuleParts.js
generated
vendored
Executable file
14
BACK_BACK/node_modules/parcel-bundler/lib/utils/getModuleParts.js
generated
vendored
Executable file
|
|
@ -0,0 +1,14 @@
|
|||
"use strict";
|
||||
|
||||
const path = require('path');
|
||||
|
||||
module.exports = function (name) {
|
||||
let parts = path.normalize(name).split(path.sep);
|
||||
|
||||
if (parts[0].charAt(0) === '@') {
|
||||
// Scoped module (e.g. @scope/module). Merge the first two parts back together.
|
||||
parts.splice(0, 2, `${parts[0]}/${parts[1]}`);
|
||||
}
|
||||
|
||||
return parts;
|
||||
};
|
||||
51
BACK_BACK/node_modules/parcel-bundler/lib/utils/getRootDir.js
generated
vendored
Executable file
51
BACK_BACK/node_modules/parcel-bundler/lib/utils/getRootDir.js
generated
vendored
Executable file
|
|
@ -0,0 +1,51 @@
|
|||
"use strict";
|
||||
|
||||
function _createForOfIteratorHelper(o, allowArrayLike) { var it; if (typeof Symbol === "undefined" || o[Symbol.iterator] == null) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = o[Symbol.iterator](); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; }
|
||||
|
||||
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
|
||||
|
||||
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i]; return arr2; }
|
||||
|
||||
const path = require('path');
|
||||
|
||||
function getRootDir(files) {
|
||||
let cur = null;
|
||||
|
||||
var _iterator = _createForOfIteratorHelper(files),
|
||||
_step;
|
||||
|
||||
try {
|
||||
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||||
let file = _step.value;
|
||||
let parsed = path.parse(file);
|
||||
|
||||
if (!cur) {
|
||||
cur = parsed;
|
||||
} else if (parsed.root !== cur.root) {
|
||||
// bail out. there is no common root.
|
||||
// this can happen on windows, e.g. C:\foo\bar vs. D:\foo\bar
|
||||
return process.cwd();
|
||||
} else {
|
||||
// find the common path parts.
|
||||
let curParts = cur.dir.split(path.sep);
|
||||
let newParts = parsed.dir.split(path.sep);
|
||||
let len = Math.min(curParts.length, newParts.length);
|
||||
let i = 0;
|
||||
|
||||
while (i < len && curParts[i] === newParts[i]) {
|
||||
i++;
|
||||
}
|
||||
|
||||
cur.dir = i > 1 ? curParts.slice(0, i).join(path.sep) : cur.root;
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
_iterator.e(err);
|
||||
} finally {
|
||||
_iterator.f();
|
||||
}
|
||||
|
||||
return cur ? cur.dir : process.cwd();
|
||||
}
|
||||
|
||||
module.exports = getRootDir;
|
||||
148
BACK_BACK/node_modules/parcel-bundler/lib/utils/getTargetEngines.js
generated
vendored
Executable file
148
BACK_BACK/node_modules/parcel-bundler/lib/utils/getTargetEngines.js
generated
vendored
Executable file
|
|
@ -0,0 +1,148 @@
|
|||
"use strict";
|
||||
|
||||
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
||||
|
||||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
||||
|
||||
const browserslist = require('browserslist');
|
||||
|
||||
const semver = require('semver');
|
||||
|
||||
const Path = require('path');
|
||||
|
||||
const DEFAULT_ENGINES = {
|
||||
browsers: ['> 0.25%'],
|
||||
node: '8'
|
||||
};
|
||||
/**
|
||||
* Loads target node and browser versions from the following locations:
|
||||
* - package.json engines field
|
||||
* - package.json browserslist field
|
||||
* - browserslist or .browserslistrc files
|
||||
* - .babelrc or .babelrc.js files with @babel/preset-env
|
||||
*/
|
||||
|
||||
function getTargetEngines(_x, _x2) {
|
||||
return _getTargetEngines.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _getTargetEngines() {
|
||||
_getTargetEngines = (0, _asyncToGenerator2.default)(function* (asset, isTargetApp) {
|
||||
let targets = {};
|
||||
let path = isTargetApp ? Path.join(asset.options.rootDir, 'index') : asset.name;
|
||||
let compileTarget = asset.options.target === 'browser' ? 'browsers' : asset.options.target;
|
||||
let pkg = yield asset.getConfig(['package.json'], {
|
||||
path
|
||||
});
|
||||
let engines = pkg && pkg.engines;
|
||||
let nodeVersion = engines && getMinSemver(engines.node);
|
||||
|
||||
if (compileTarget === 'node') {
|
||||
// Use package.engines.node by default if we are compiling for node.
|
||||
if (typeof nodeVersion === 'string') {
|
||||
targets.node = nodeVersion;
|
||||
}
|
||||
} else {
|
||||
if (engines && (typeof engines.browsers === 'string' || Array.isArray(engines.browsers))) {
|
||||
targets.browsers = engines.browsers;
|
||||
} else if (pkg && pkg.browserslist) {
|
||||
targets.browsers = pkg.browserslist;
|
||||
} else {
|
||||
let browserslist = yield loadBrowserslist(asset, path);
|
||||
|
||||
if (browserslist) {
|
||||
targets.browsers = browserslist;
|
||||
} else {
|
||||
let babelTargets = yield loadBabelrc(asset, path);
|
||||
|
||||
if (babelTargets && babelTargets.browsers) {
|
||||
targets.browsers = babelTargets.browsers;
|
||||
} else if (babelTargets && babelTargets.node && !nodeVersion) {
|
||||
nodeVersion = babelTargets.node;
|
||||
}
|
||||
}
|
||||
} // Fall back to package.engines.node for node_modules without any browser target info.
|
||||
|
||||
|
||||
if (!isTargetApp && !targets.browsers && typeof nodeVersion === 'string') {
|
||||
targets.node = nodeVersion;
|
||||
}
|
||||
} // If we didn't find any targets, set some default engines for the target app.
|
||||
|
||||
|
||||
if (isTargetApp && !targets[compileTarget] && DEFAULT_ENGINES[compileTarget]) {
|
||||
targets[compileTarget] = DEFAULT_ENGINES[compileTarget];
|
||||
} // Parse browser targets
|
||||
|
||||
|
||||
if (targets.browsers) {
|
||||
if (typeof targets.browsers === 'object' && !Array.isArray(targets.browsers)) {
|
||||
let env = asset.options.production ? 'production' : process.env.NODE_ENV || 'development';
|
||||
targets.browsers = targets.browsers[env] || targets.browsers.defaults;
|
||||
}
|
||||
|
||||
if (targets.browsers) {
|
||||
targets.browsers = browserslist(targets.browsers).sort();
|
||||
}
|
||||
} // Dont compile if we couldn't find any targets
|
||||
|
||||
|
||||
if (Object.keys(targets).length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return targets;
|
||||
});
|
||||
return _getTargetEngines.apply(this, arguments);
|
||||
}
|
||||
|
||||
function getMinSemver(version) {
|
||||
try {
|
||||
let range = new semver.Range(version);
|
||||
let sorted = range.set.sort((a, b) => a[0].semver.compare(b[0].semver));
|
||||
return sorted[0][0].semver.version;
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function loadBrowserslist(_x3, _x4) {
|
||||
return _loadBrowserslist.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _loadBrowserslist() {
|
||||
_loadBrowserslist = (0, _asyncToGenerator2.default)(function* (asset, path) {
|
||||
let config = yield asset.getConfig(['browserslist', '.browserslistrc'], {
|
||||
path,
|
||||
load: false
|
||||
});
|
||||
|
||||
if (config) {
|
||||
return browserslist.readConfig(config);
|
||||
}
|
||||
});
|
||||
return _loadBrowserslist.apply(this, arguments);
|
||||
}
|
||||
|
||||
function loadBabelrc(_x5, _x6) {
|
||||
return _loadBabelrc.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _loadBabelrc() {
|
||||
_loadBabelrc = (0, _asyncToGenerator2.default)(function* (asset, path) {
|
||||
let config = yield asset.getConfig(['.babelrc', '.babelrc.js'], {
|
||||
path
|
||||
});
|
||||
|
||||
if (config && config.presets) {
|
||||
let env = config.presets.find(plugin => Array.isArray(plugin) && (plugin[0] === 'env' || plugin[0] === '@babel/env'));
|
||||
|
||||
if (env && env[1] && env[1].targets) {
|
||||
return env[1].targets;
|
||||
}
|
||||
}
|
||||
});
|
||||
return _loadBabelrc.apply(this, arguments);
|
||||
}
|
||||
|
||||
module.exports = getTargetEngines;
|
||||
21
BACK_BACK/node_modules/parcel-bundler/lib/utils/glob.js
generated
vendored
Executable file
21
BACK_BACK/node_modules/parcel-bundler/lib/utils/glob.js
generated
vendored
Executable file
|
|
@ -0,0 +1,21 @@
|
|||
"use strict";
|
||||
|
||||
const isGlob = require('is-glob');
|
||||
|
||||
const fastGlob = require('fast-glob');
|
||||
|
||||
function normalisePath(p) {
|
||||
return p.replace(/\\/g, '/');
|
||||
}
|
||||
|
||||
exports.isGlob = function (p) {
|
||||
return isGlob(normalisePath(p));
|
||||
};
|
||||
|
||||
exports.glob = function (p, options) {
|
||||
return fastGlob(normalisePath(p), options);
|
||||
};
|
||||
|
||||
exports.glob.sync = function (p, options) {
|
||||
return fastGlob.sync(normalisePath(p), options);
|
||||
};
|
||||
191
BACK_BACK/node_modules/parcel-bundler/lib/utils/installPackage.js
generated
vendored
Executable file
191
BACK_BACK/node_modules/parcel-bundler/lib/utils/installPackage.js
generated
vendored
Executable file
|
|
@ -0,0 +1,191 @@
|
|||
"use strict";
|
||||
|
||||
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
||||
|
||||
var _slicedToArray2 = _interopRequireDefault(require("@babel/runtime/helpers/slicedToArray"));
|
||||
|
||||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
||||
|
||||
const config = require('./config');
|
||||
|
||||
const _require = require('@parcel/utils'),
|
||||
promisify = _require.promisify;
|
||||
|
||||
const resolve = promisify(require('resolve'));
|
||||
|
||||
const commandExists = require('command-exists');
|
||||
|
||||
const logger = require('@parcel/logger');
|
||||
|
||||
const pipeSpawn = require('./pipeSpawn');
|
||||
|
||||
const PromiseQueue = require('./PromiseQueue');
|
||||
|
||||
const path = require('path');
|
||||
|
||||
const fs = require('@parcel/fs');
|
||||
|
||||
const WorkerFarm = require('@parcel/workers');
|
||||
|
||||
const YARN_LOCK = 'yarn.lock';
|
||||
|
||||
function install(_x, _x2) {
|
||||
return _install.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _install() {
|
||||
_install = (0, _asyncToGenerator2.default)(function* (modules, filepath, options = {}) {
|
||||
let _options$installPeers = options.installPeers,
|
||||
installPeers = _options$installPeers === void 0 ? true : _options$installPeers,
|
||||
_options$saveDev = options.saveDev,
|
||||
saveDev = _options$saveDev === void 0 ? true : _options$saveDev,
|
||||
packageManager = options.packageManager;
|
||||
|
||||
if (typeof modules === 'string') {
|
||||
modules = [modules];
|
||||
}
|
||||
|
||||
logger.progress(`Installing ${modules.join(', ')}...`);
|
||||
let packageLocation = yield config.resolve(filepath, ['package.json']);
|
||||
let cwd = packageLocation ? path.dirname(packageLocation) : process.cwd();
|
||||
|
||||
if (!packageManager) {
|
||||
packageManager = yield determinePackageManager(filepath);
|
||||
}
|
||||
|
||||
let commandToUse = packageManager === 'npm' ? 'install' : 'add';
|
||||
let args = [commandToUse, ...modules];
|
||||
|
||||
if (saveDev) {
|
||||
args.push('-D');
|
||||
} else if (packageManager === 'npm') {
|
||||
args.push('--save');
|
||||
} // npm doesn't auto-create a package.json when installing,
|
||||
// so create an empty one if needed.
|
||||
|
||||
|
||||
if (packageManager === 'npm' && !packageLocation) {
|
||||
yield fs.writeFile(path.join(cwd, 'package.json'), '{}');
|
||||
}
|
||||
|
||||
try {
|
||||
yield pipeSpawn(packageManager, args, {
|
||||
cwd
|
||||
});
|
||||
} catch (err) {
|
||||
throw new Error(`Failed to install ${modules.join(', ')}.`);
|
||||
}
|
||||
|
||||
if (installPeers) {
|
||||
yield Promise.all(modules.map(m => installPeerDependencies(filepath, m, options)));
|
||||
}
|
||||
});
|
||||
return _install.apply(this, arguments);
|
||||
}
|
||||
|
||||
function installPeerDependencies(_x3, _x4, _x5) {
|
||||
return _installPeerDependencies.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _installPeerDependencies() {
|
||||
_installPeerDependencies = (0, _asyncToGenerator2.default)(function* (filepath, name, options) {
|
||||
let basedir = path.dirname(filepath);
|
||||
|
||||
const _yield$resolve = yield resolve(name, {
|
||||
basedir
|
||||
}),
|
||||
_yield$resolve2 = (0, _slicedToArray2.default)(_yield$resolve, 1),
|
||||
resolved = _yield$resolve2[0];
|
||||
|
||||
const pkg = yield config.load(resolved, ['package.json']);
|
||||
const peers = pkg.peerDependencies || {};
|
||||
const modules = [];
|
||||
|
||||
for (const peer in peers) {
|
||||
modules.push(`${peer}@${peers[peer]}`);
|
||||
}
|
||||
|
||||
if (modules.length) {
|
||||
yield install(modules, filepath, Object.assign({}, options, {
|
||||
installPeers: false
|
||||
}));
|
||||
}
|
||||
});
|
||||
return _installPeerDependencies.apply(this, arguments);
|
||||
}
|
||||
|
||||
function determinePackageManager(_x6) {
|
||||
return _determinePackageManager.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _determinePackageManager() {
|
||||
_determinePackageManager = (0, _asyncToGenerator2.default)(function* (filepath) {
|
||||
const yarnLockFile = yield config.resolve(filepath, [YARN_LOCK]);
|
||||
/**
|
||||
* no yarn.lock => use npm
|
||||
* yarn.lock => Use yarn, fallback to npm
|
||||
*/
|
||||
|
||||
if (!yarnLockFile) {
|
||||
return 'npm';
|
||||
}
|
||||
|
||||
const hasYarn = yield checkForYarnCommand();
|
||||
|
||||
if (hasYarn) {
|
||||
return 'yarn';
|
||||
}
|
||||
|
||||
return 'npm';
|
||||
});
|
||||
return _determinePackageManager.apply(this, arguments);
|
||||
}
|
||||
|
||||
let hasYarn = null;
|
||||
|
||||
function checkForYarnCommand() {
|
||||
return _checkForYarnCommand.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _checkForYarnCommand() {
|
||||
_checkForYarnCommand = (0, _asyncToGenerator2.default)(function* () {
|
||||
if (hasYarn != null) {
|
||||
return hasYarn;
|
||||
}
|
||||
|
||||
try {
|
||||
hasYarn = yield commandExists('yarn');
|
||||
} catch (err) {
|
||||
hasYarn = false;
|
||||
}
|
||||
|
||||
return hasYarn;
|
||||
});
|
||||
return _checkForYarnCommand.apply(this, arguments);
|
||||
}
|
||||
|
||||
let queue = new PromiseQueue(install, {
|
||||
maxConcurrent: 1,
|
||||
retry: false
|
||||
});
|
||||
|
||||
module.exports = /*#__PURE__*/function () {
|
||||
var _ref = (0, _asyncToGenerator2.default)(function* (...args) {
|
||||
// Ensure that this function is always called on the master process so we
|
||||
// don't call multiple installs in parallel.
|
||||
if (WorkerFarm.isWorker()) {
|
||||
yield WorkerFarm.callMaster({
|
||||
location: __filename,
|
||||
args
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
queue.add(...args);
|
||||
return queue.run();
|
||||
});
|
||||
|
||||
return function () {
|
||||
return _ref.apply(this, arguments);
|
||||
};
|
||||
}();
|
||||
12
BACK_BACK/node_modules/parcel-bundler/lib/utils/is-url.js
generated
vendored
Executable file
12
BACK_BACK/node_modules/parcel-bundler/lib/utils/is-url.js
generated
vendored
Executable file
|
|
@ -0,0 +1,12 @@
|
|||
"use strict";
|
||||
|
||||
const isURL = require('is-url'); // Matches anchor (ie: #raptors)
|
||||
|
||||
|
||||
const ANCHOR_REGEXP = /^#/; // Matches scheme (ie: tel:, mailto:, data:, itms-apps:)
|
||||
|
||||
const SCHEME_REGEXP = /^[a-z][a-z0-9\-+.]*:/i;
|
||||
|
||||
module.exports = function (url) {
|
||||
return isURL(url) || ANCHOR_REGEXP.test(url) || SCHEME_REGEXP.test(url);
|
||||
};
|
||||
16
BACK_BACK/node_modules/parcel-bundler/lib/utils/isAccessedVarChanged.js
generated
vendored
Executable file
16
BACK_BACK/node_modules/parcel-bundler/lib/utils/isAccessedVarChanged.js
generated
vendored
Executable file
|
|
@ -0,0 +1,16 @@
|
|||
"use strict";
|
||||
|
||||
/*
|
||||
Checks if any of the used variable from process.env is changed
|
||||
*/
|
||||
function isAccessedVarChanged(cacheData) {
|
||||
for (let key in cacheData.env) {
|
||||
if (cacheData.env[key] !== process.env[key]) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
module.exports = isAccessedVarChanged;
|
||||
15
BACK_BACK/node_modules/parcel-bundler/lib/utils/lineCounter.js
generated
vendored
Executable file
15
BACK_BACK/node_modules/parcel-bundler/lib/utils/lineCounter.js
generated
vendored
Executable file
|
|
@ -0,0 +1,15 @@
|
|||
"use strict";
|
||||
|
||||
function lineCounter(string) {
|
||||
let lines = 1;
|
||||
|
||||
for (let i = 0; i < string.length; i++) {
|
||||
if (string.charAt(i) === '\n') {
|
||||
lines++;
|
||||
}
|
||||
}
|
||||
|
||||
return lines;
|
||||
}
|
||||
|
||||
module.exports = lineCounter;
|
||||
52
BACK_BACK/node_modules/parcel-bundler/lib/utils/loadPlugins.js
generated
vendored
Executable file
52
BACK_BACK/node_modules/parcel-bundler/lib/utils/loadPlugins.js
generated
vendored
Executable file
|
|
@ -0,0 +1,52 @@
|
|||
"use strict";
|
||||
|
||||
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
||||
|
||||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
||||
|
||||
const localRequire = require('./localRequire');
|
||||
|
||||
module.exports = /*#__PURE__*/function () {
|
||||
var _loadPlugins = (0, _asyncToGenerator2.default)(function* (plugins, relative) {
|
||||
if (Array.isArray(plugins)) {
|
||||
return Promise.all(plugins.map(p => loadPlugin(p, relative)).filter(Boolean));
|
||||
} else if (typeof plugins === 'object') {
|
||||
let mapPlugins = yield Promise.all(Object.keys(plugins).map(p => loadPlugin(p, relative, plugins[p])));
|
||||
return mapPlugins.filter(Boolean);
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
});
|
||||
|
||||
function loadPlugins(_x, _x2) {
|
||||
return _loadPlugins.apply(this, arguments);
|
||||
}
|
||||
|
||||
return loadPlugins;
|
||||
}();
|
||||
|
||||
function loadPlugin(_x3, _x4, _x5) {
|
||||
return _loadPlugin.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _loadPlugin() {
|
||||
_loadPlugin = (0, _asyncToGenerator2.default)(function* (plugin, relative, options) {
|
||||
if (typeof plugin === 'string') {
|
||||
plugin = yield localRequire(plugin, relative);
|
||||
plugin = plugin.default || plugin;
|
||||
|
||||
if (typeof options !== 'object') {
|
||||
options = {};
|
||||
}
|
||||
|
||||
if (Object.keys(options).length > 0) {
|
||||
plugin = plugin(options);
|
||||
}
|
||||
|
||||
plugin = plugin.default || plugin;
|
||||
}
|
||||
|
||||
return plugin;
|
||||
});
|
||||
return _loadPlugin.apply(this, arguments);
|
||||
}
|
||||
86
BACK_BACK/node_modules/parcel-bundler/lib/utils/loadSourceMap.js
generated
vendored
Executable file
86
BACK_BACK/node_modules/parcel-bundler/lib/utils/loadSourceMap.js
generated
vendored
Executable file
|
|
@ -0,0 +1,86 @@
|
|||
"use strict";
|
||||
|
||||
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
||||
|
||||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
||||
|
||||
const logger = require('@parcel/logger');
|
||||
|
||||
const path = require('path');
|
||||
|
||||
const fs = require('@parcel/fs');
|
||||
|
||||
const SOURCEMAP_RE = /(?:\/\*|\/\/)\s*[@#]\s*sourceMappingURL\s*=\s*([^\s*]+)(?:\s*\*\/)?/;
|
||||
const DATA_URL_RE = /^data:[^;]+(?:;charset=[^;]+)?;base64,(.*)/;
|
||||
|
||||
function loadSourceMap(_x) {
|
||||
return _loadSourceMap.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _loadSourceMap() {
|
||||
_loadSourceMap = (0, _asyncToGenerator2.default)(function* (asset) {
|
||||
// Get original sourcemap if there is any
|
||||
let match = asset.contents.match(SOURCEMAP_RE);
|
||||
let sourceMap;
|
||||
|
||||
if (match) {
|
||||
asset.contents = asset.contents.replace(SOURCEMAP_RE, '');
|
||||
let url = match[1];
|
||||
let dataURLMatch = url.match(DATA_URL_RE);
|
||||
|
||||
try {
|
||||
let json, filename;
|
||||
|
||||
if (dataURLMatch) {
|
||||
filename = asset.name;
|
||||
json = Buffer.from(dataURLMatch[1], 'base64').toString();
|
||||
} else {
|
||||
filename = path.join(path.dirname(asset.name), url);
|
||||
json = yield fs.readFile(filename, 'utf8'); // Add as a dep so we watch the source map for changes.
|
||||
|
||||
asset.addDependency(filename, {
|
||||
includedInParent: true
|
||||
});
|
||||
}
|
||||
|
||||
sourceMap = JSON.parse(json); // Attempt to read missing source contents
|
||||
|
||||
if (!sourceMap.sourcesContent) {
|
||||
sourceMap.sourcesContent = [];
|
||||
}
|
||||
|
||||
let missingSources = sourceMap.sources.slice(sourceMap.sourcesContent.length);
|
||||
|
||||
if (missingSources.length) {
|
||||
let contents = yield Promise.all(missingSources.map( /*#__PURE__*/function () {
|
||||
var _ref = (0, _asyncToGenerator2.default)(function* (source) {
|
||||
try {
|
||||
let sourceFile = path.join(path.dirname(filename), sourceMap.sourceRoot || '', source);
|
||||
let result = yield fs.readFile(sourceFile, 'utf8');
|
||||
asset.addDependency(sourceFile, {
|
||||
includedInParent: true
|
||||
});
|
||||
return result;
|
||||
} catch (err) {
|
||||
logger.warn(`Could not load source file "${source}" in source map of "${asset.relativeName}".`);
|
||||
}
|
||||
});
|
||||
|
||||
return function (_x2) {
|
||||
return _ref.apply(this, arguments);
|
||||
};
|
||||
}()));
|
||||
sourceMap.sourcesContent = sourceMap.sourcesContent.concat(contents);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.warn(`Could not load existing sourcemap of "${asset.relativeName}".`);
|
||||
sourceMap = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
return sourceMap;
|
||||
});
|
||||
return _loadSourceMap.apply(this, arguments);
|
||||
}
|
||||
|
||||
module.exports = loadSourceMap;
|
||||
72
BACK_BACK/node_modules/parcel-bundler/lib/utils/localRequire.js
generated
vendored
Executable file
72
BACK_BACK/node_modules/parcel-bundler/lib/utils/localRequire.js
generated
vendored
Executable file
|
|
@ -0,0 +1,72 @@
|
|||
"use strict";
|
||||
|
||||
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
||||
|
||||
var _slicedToArray2 = _interopRequireDefault(require("@babel/runtime/helpers/slicedToArray"));
|
||||
|
||||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
||||
|
||||
const _require = require('path'),
|
||||
dirname = _require.dirname;
|
||||
|
||||
const _require2 = require('@parcel/utils'),
|
||||
promisify = _require2.promisify;
|
||||
|
||||
const resolve = promisify(require('resolve'));
|
||||
|
||||
const installPackage = require('./installPackage');
|
||||
|
||||
const getModuleParts = require('./getModuleParts');
|
||||
|
||||
const cache = new Map();
|
||||
|
||||
function localRequire(_x, _x2) {
|
||||
return _localRequire.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _localRequire() {
|
||||
_localRequire = (0, _asyncToGenerator2.default)(function* (name, path, triedInstall = false) {
|
||||
let _yield$localResolve = yield localResolve(name, path, triedInstall),
|
||||
_yield$localResolve2 = (0, _slicedToArray2.default)(_yield$localResolve, 1),
|
||||
resolved = _yield$localResolve2[0];
|
||||
|
||||
return require(resolved);
|
||||
});
|
||||
return _localRequire.apply(this, arguments);
|
||||
}
|
||||
|
||||
function localResolve(_x3, _x4) {
|
||||
return _localResolve.apply(this, arguments);
|
||||
}
|
||||
|
||||
function _localResolve() {
|
||||
_localResolve = (0, _asyncToGenerator2.default)(function* (name, path, triedInstall = false) {
|
||||
let basedir = dirname(path);
|
||||
let key = basedir + ':' + name;
|
||||
let resolved = cache.get(key);
|
||||
|
||||
if (!resolved) {
|
||||
try {
|
||||
resolved = yield resolve(name, {
|
||||
basedir
|
||||
});
|
||||
} catch (e) {
|
||||
if (e.code === 'MODULE_NOT_FOUND' && !triedInstall) {
|
||||
const packageName = getModuleParts(name)[0];
|
||||
yield installPackage(packageName, path);
|
||||
return localResolve(name, path, true);
|
||||
}
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
cache.set(key, resolved);
|
||||
}
|
||||
|
||||
return resolved;
|
||||
});
|
||||
return _localResolve.apply(this, arguments);
|
||||
}
|
||||
|
||||
localRequire.resolve = localResolve;
|
||||
module.exports = localRequire;
|
||||
19
BACK_BACK/node_modules/parcel-bundler/lib/utils/md5.js
generated
vendored
Executable file
19
BACK_BACK/node_modules/parcel-bundler/lib/utils/md5.js
generated
vendored
Executable file
|
|
@ -0,0 +1,19 @@
|
|||
"use strict";
|
||||
|
||||
const crypto = require('crypto');
|
||||
|
||||
const fs = require('fs');
|
||||
|
||||
function md5(string, encoding = 'hex') {
|
||||
return crypto.createHash('md5').update(string).digest(encoding);
|
||||
}
|
||||
|
||||
md5.file = function (filename) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.createReadStream(filename).on('error', reject).pipe(crypto.createHash('md5').setEncoding('hex')).on('finish', function () {
|
||||
resolve(this.read());
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = md5;
|
||||
37
BACK_BACK/node_modules/parcel-bundler/lib/utils/objectHash.js
generated
vendored
Executable file
37
BACK_BACK/node_modules/parcel-bundler/lib/utils/objectHash.js
generated
vendored
Executable file
|
|
@ -0,0 +1,37 @@
|
|||
"use strict";
|
||||
|
||||
function _createForOfIteratorHelper(o, allowArrayLike) { var it; if (typeof Symbol === "undefined" || o[Symbol.iterator] == null) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = o[Symbol.iterator](); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; }
|
||||
|
||||
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
|
||||
|
||||
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i]; return arr2; }
|
||||
|
||||
const crypto = require('crypto');
|
||||
|
||||
function objectHash(object) {
|
||||
let hash = crypto.createHash('md5');
|
||||
|
||||
var _iterator = _createForOfIteratorHelper(Object.keys(object).sort()),
|
||||
_step;
|
||||
|
||||
try {
|
||||
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||||
let key = _step.value;
|
||||
let val = object[key];
|
||||
|
||||
if (typeof val === 'object' && val) {
|
||||
hash.update(key + objectHash(val));
|
||||
} else {
|
||||
hash.update(key + val);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
_iterator.e(err);
|
||||
} finally {
|
||||
_iterator.f();
|
||||
}
|
||||
|
||||
return hash.digest('hex');
|
||||
}
|
||||
|
||||
module.exports = objectHash;
|
||||
27
BACK_BACK/node_modules/parcel-bundler/lib/utils/openInBrowser.js
generated
vendored
Executable file
27
BACK_BACK/node_modules/parcel-bundler/lib/utils/openInBrowser.js
generated
vendored
Executable file
|
|
@ -0,0 +1,27 @@
|
|||
"use strict";
|
||||
|
||||
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
||||
|
||||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
||||
|
||||
const opn = require('opn');
|
||||
|
||||
const openInBrowser = /*#__PURE__*/function () {
|
||||
var _ref = (0, _asyncToGenerator2.default)(function* (url, browser) {
|
||||
try {
|
||||
const options = typeof browser === 'string' ? {
|
||||
app: browser
|
||||
} : undefined;
|
||||
yield opn(url, options);
|
||||
} catch (err) {
|
||||
console.error(`Unexpected error while opening in browser: ${browser}`);
|
||||
console.error(err);
|
||||
}
|
||||
});
|
||||
|
||||
return function openInBrowser(_x, _x2) {
|
||||
return _ref.apply(this, arguments);
|
||||
};
|
||||
}();
|
||||
|
||||
module.exports = openInBrowser;
|
||||
13
BACK_BACK/node_modules/parcel-bundler/lib/utils/parseCSSImport.js
generated
vendored
Executable file
13
BACK_BACK/node_modules/parcel-bundler/lib/utils/parseCSSImport.js
generated
vendored
Executable file
|
|
@ -0,0 +1,13 @@
|
|||
"use strict";
|
||||
|
||||
function parseCSSImport(url) {
|
||||
if (!/^(~|\.\/|\/)/.test(url)) {
|
||||
url = './' + url;
|
||||
} else if (!/^(~\/|\.\/|\/)/.test(url)) {
|
||||
url = url.substring(1);
|
||||
}
|
||||
|
||||
return url;
|
||||
}
|
||||
|
||||
module.exports = parseCSSImport;
|
||||
33
BACK_BACK/node_modules/parcel-bundler/lib/utils/pipeSpawn.js
generated
vendored
Executable file
33
BACK_BACK/node_modules/parcel-bundler/lib/utils/pipeSpawn.js
generated
vendored
Executable file
|
|
@ -0,0 +1,33 @@
|
|||
"use strict";
|
||||
|
||||
const spawn = require('cross-spawn');
|
||||
|
||||
const logger = require('@parcel/logger');
|
||||
|
||||
function pipeSpawn(cmd, params, opts) {
|
||||
const cp = spawn(cmd, params, Object.assign({
|
||||
env: Object.assign({
|
||||
FORCE_COLOR: logger.color,
|
||||
npm_config_color: logger.color ? 'always' : '',
|
||||
npm_config_progress: true
|
||||
}, process.env, {
|
||||
NODE_ENV: null
|
||||
} // Passing NODE_ENV through causes strange issues with yarn
|
||||
)
|
||||
}, opts));
|
||||
cp.stdout.setEncoding('utf8').on('data', d => logger.writeRaw(d));
|
||||
cp.stderr.setEncoding('utf8').on('data', d => logger.writeRaw(d));
|
||||
return new Promise((resolve, reject) => {
|
||||
cp.on('error', reject);
|
||||
cp.on('close', function (code) {
|
||||
if (code !== 0) {
|
||||
return reject(new Error(cmd + ' failed.'));
|
||||
}
|
||||
|
||||
logger.clear();
|
||||
return resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = pipeSpawn;
|
||||
5
BACK_BACK/node_modules/parcel-bundler/lib/utils/prettifyTime.js
generated
vendored
Executable file
5
BACK_BACK/node_modules/parcel-bundler/lib/utils/prettifyTime.js
generated
vendored
Executable file
|
|
@ -0,0 +1,5 @@
|
|||
"use strict";
|
||||
|
||||
module.exports = function (time) {
|
||||
return time < 1000 ? `${time}ms` : `${(time / 1000).toFixed(2)}s`;
|
||||
};
|
||||
24
BACK_BACK/node_modules/parcel-bundler/lib/utils/serializeObject.js
generated
vendored
Executable file
24
BACK_BACK/node_modules/parcel-bundler/lib/utils/serializeObject.js
generated
vendored
Executable file
|
|
@ -0,0 +1,24 @@
|
|||
"use strict";
|
||||
|
||||
const _require = require('terser'),
|
||||
minify = _require.minify;
|
||||
|
||||
const serialize = require('serialize-to-js');
|
||||
|
||||
function serializeObject(obj, shouldMinify = false) {
|
||||
let code = `module.exports = ${serialize(obj)};`;
|
||||
|
||||
if (shouldMinify) {
|
||||
let minified = minify(code);
|
||||
|
||||
if (minified.error) {
|
||||
throw minified.error;
|
||||
}
|
||||
|
||||
code = minified.code;
|
||||
}
|
||||
|
||||
return code;
|
||||
}
|
||||
|
||||
module.exports = serializeObject;
|
||||
29
BACK_BACK/node_modules/parcel-bundler/lib/utils/syncPromise.js
generated
vendored
Executable file
29
BACK_BACK/node_modules/parcel-bundler/lib/utils/syncPromise.js
generated
vendored
Executable file
|
|
@ -0,0 +1,29 @@
|
|||
"use strict";
|
||||
|
||||
const deasync = require('deasync');
|
||||
/**
|
||||
* Synchronously waits for a promise to return by
|
||||
* yielding to the node event loop as needed.
|
||||
*/
|
||||
|
||||
|
||||
function syncPromise(promise) {
|
||||
let isDone = false;
|
||||
let res, err;
|
||||
promise.then(value => {
|
||||
res = value;
|
||||
isDone = true;
|
||||
}, error => {
|
||||
err = error;
|
||||
isDone = true;
|
||||
});
|
||||
deasync.loopWhile(() => !isDone);
|
||||
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
module.exports = syncPromise;
|
||||
19
BACK_BACK/node_modules/parcel-bundler/lib/utils/urlJoin.js
generated
vendored
Executable file
19
BACK_BACK/node_modules/parcel-bundler/lib/utils/urlJoin.js
generated
vendored
Executable file
|
|
@ -0,0 +1,19 @@
|
|||
"use strict";
|
||||
|
||||
const URL = require('url');
|
||||
|
||||
const path = require('path');
|
||||
/**
|
||||
* Joins a path onto a URL, and normalizes Windows paths
|
||||
* e.g. from \path\to\res.js to /path/to/res.js.
|
||||
*/
|
||||
|
||||
|
||||
module.exports = function (publicURL, assetPath) {
|
||||
const url = URL.parse(publicURL, false, true);
|
||||
const assetUrl = URL.parse(assetPath);
|
||||
url.pathname = path.posix.join(url.pathname, assetUrl.pathname);
|
||||
url.search = assetUrl.search;
|
||||
url.hash = assetUrl.hash;
|
||||
return URL.format(url);
|
||||
};
|
||||
Loading…
Add table
Add a link
Reference in a new issue