flow like the river

This commit is contained in:
root 2025-11-07 00:06:12 +01:00
commit 013fe673f3
42435 changed files with 5764238 additions and 0 deletions

21
BACK_BACK/node_modules/@parcel/fs/LICENSE generated vendored Executable file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017-present Devon Govett
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

5
BACK_BACK/node_modules/@parcel/fs/index.js generated vendored Executable file
View file

@ -0,0 +1,5 @@
// Node 8 supports native async functions - no need to use compiled code!
module.exports =
parseInt(process.versions.node, 10) < 8
? require('./lib/fs')
: require('./src/fs');

31
BACK_BACK/node_modules/@parcel/fs/package.json generated vendored Executable file
View file

@ -0,0 +1,31 @@
{
"name": "@parcel/fs",
"version": "1.11.0",
"description": "Blazing fast, zero configuration web application bundler",
"main": "index.js",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/parcel-bundler/parcel.git"
},
"engines": {
"node": ">= 6.0.0"
},
"publishConfig": {
"access": "public"
},
"scripts": {
"test": "echo this package has no tests yet",
"test-ci": "yarn build && yarn test",
"format": "prettier --write \"./{src,bin,test}/**/*.{js,json,md}\"",
"lint": "eslint . && prettier \"./{src,bin,test}/**/*.{js,json,md}\" --list-different",
"build": "babel src -d lib",
"prepublish": "yarn build"
},
"dependencies": {
"@parcel/utils": "^1.11.0",
"mkdirp": "^0.5.1",
"rimraf": "^2.6.2"
},
"gitHead": "34eb91e8e6991073e594bff731c333d09b0403b5"
}

8
BACK_BACK/node_modules/@parcel/fs/src/.babelrc generated vendored Executable file
View file

@ -0,0 +1,8 @@
{
"presets": [["@babel/preset-env", {
"targets": {
"node": "6"
}
}]],
"plugins": ["@babel/plugin-transform-runtime"]
}

3
BACK_BACK/node_modules/@parcel/fs/src/.eslintrc.json generated vendored Executable file
View file

@ -0,0 +1,3 @@
{
"extends": "../../../../.eslintrc.json"
}

29
BACK_BACK/node_modules/@parcel/fs/src/fs.js generated vendored Executable file
View file

@ -0,0 +1,29 @@
const {promisify} = require('@parcel/utils');
const fs = require('fs');
const mkdirp = require('mkdirp');
const rimraf = require('rimraf');
exports.readFile = promisify(fs.readFile);
exports.writeFile = promisify(fs.writeFile);
exports.stat = promisify(fs.stat);
exports.readdir = promisify(fs.readdir);
exports.unlink = promisify(fs.unlink);
exports.rimraf = promisify(rimraf);
exports.realpath = async function(path) {
const realpath = promisify(fs.realpath);
try {
path = await realpath(path);
} catch (e) {
// do nothing
}
return path;
};
exports.lstat = promisify(fs.lstat);
exports.exists = function(filename) {
return new Promise(resolve => {
fs.exists(filename, resolve);
});
};
exports.mkdirp = promisify(mkdirp);

21
BACK_BACK/node_modules/@parcel/logger/LICENSE generated vendored Executable file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017-present Devon Govett
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

5
BACK_BACK/node_modules/@parcel/logger/index.js generated vendored Executable file
View file

@ -0,0 +1,5 @@
// Node 8 supports native async functions - no need to use compiled code!
module.exports =
parseInt(process.versions.node, 10) < 8
? require('./lib/Logger')
: require('./src/Logger');

38
BACK_BACK/node_modules/@parcel/logger/package.json generated vendored Executable file
View file

@ -0,0 +1,38 @@
{
"name": "@parcel/logger",
"version": "1.11.1",
"description": "Blazing fast, zero configuration web application bundler",
"main": "index.js",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/parcel-bundler/parcel.git"
},
"engines": {
"node": ">= 6.0.0"
},
"publishConfig": {
"access": "public"
},
"scripts": {
"test": "cross-env NODE_ENV=test mocha",
"test-ci": "yarn build && yarn test",
"format": "prettier --write \"./{src,bin,test}/**/*.{js,json,md}\"",
"lint": "eslint . && prettier \"./{src,bin,test}/**/*.{js,json,md}\" --list-different",
"build": "babel src -d lib",
"prepublish": "yarn build"
},
"dependencies": {
"@parcel/workers": "^1.11.0",
"chalk": "^2.1.0",
"grapheme-breaker": "^0.3.2",
"ora": "^2.1.0",
"strip-ansi": "^4.0.0"
},
"devDependencies": {
"@parcel/babel-register": "^1.11.1",
"mocha": "^5.2.0",
"sinon": "^5.0.1"
},
"gitHead": "d9ec7af22f85134dc1a97fe00f35950f2fe1f57a"
}

8
BACK_BACK/node_modules/@parcel/logger/src/.babelrc generated vendored Executable file
View file

@ -0,0 +1,8 @@
{
"presets": [["@babel/preset-env", {
"targets": {
"node": "6"
}
}]],
"plugins": ["@babel/plugin-transform-runtime"]
}

3
BACK_BACK/node_modules/@parcel/logger/src/.eslintrc.json generated vendored Executable file
View file

@ -0,0 +1,3 @@
{
"extends": "../../../../.eslintrc.json"
}

248
BACK_BACK/node_modules/@parcel/logger/src/Logger.js generated vendored Executable file
View file

@ -0,0 +1,248 @@
const chalk = require('chalk');
const readline = require('readline');
const prettyError = require('./prettyError');
const emoji = require('./emoji');
const {countBreaks} = require('grapheme-breaker');
const stripAnsi = require('strip-ansi');
const ora = require('ora');
const WorkerFarm = require('@parcel/workers');
const path = require('path');
const fs = require('fs');
class Logger {
constructor(options) {
this.lines = 0;
this.spinner = null;
this.setOptions(options);
}
setOptions(options) {
this.logLevel =
options && isNaN(options.logLevel) === false
? Number(options.logLevel)
: 3;
this.color =
options && typeof options.color === 'boolean'
? options.color
: chalk.supportsColor;
this.emoji = (options && options.emoji) || emoji;
this.chalk = new chalk.constructor({enabled: this.color});
this.isTest =
options && typeof options.isTest === 'boolean'
? options.isTest
: process.env.NODE_ENV === 'test';
}
countLines(message) {
return stripAnsi(message)
.split('\n')
.reduce((p, line) => {
if (process.stdout.columns) {
return p + Math.ceil((line.length || 1) / process.stdout.columns);
}
return p + 1;
}, 0);
}
writeRaw(message) {
this.stopSpinner();
this.lines += this.countLines(message) - 1;
process.stdout.write(message);
}
write(message, persistent = false) {
if (this.logLevel > 3) {
return this.verbose(message);
}
if (!persistent) {
this.lines += this.countLines(message);
}
this.stopSpinner();
this._log(message);
}
verbose(message) {
if (this.logLevel < 4) {
return;
}
let currDate = new Date();
message = `[${currDate.toLocaleTimeString()}]: ${message}`;
if (this.logLevel > 4) {
if (!this.logFile) {
this.logFile = fs.createWriteStream(
path.join(process.cwd(), `parcel-debug-${currDate.toISOString()}.log`)
);
}
this.logFile.write(stripAnsi(message) + '\n');
}
this._log(message);
}
log(message) {
if (this.logLevel < 3) {
return;
}
this.write(message);
}
persistent(message) {
if (this.logLevel < 3) {
return;
}
this.write(this.chalk.bold(message), true);
}
warn(err) {
if (this.logLevel < 2) {
return;
}
this._writeError(err, this.emoji.warning, this.chalk.yellow);
}
error(err) {
if (this.logLevel < 1) {
return;
}
this._writeError(err, this.emoji.error, this.chalk.red.bold);
}
success(message) {
this.log(`${this.emoji.success} ${this.chalk.green.bold(message)}`);
}
formatError(err, opts) {
return prettyError(err, opts);
}
_writeError(err, emoji, color) {
let {message, stack} = this.formatError(err, {color: this.color});
this.write(color(`${emoji} ${message}`));
if (stack) {
this.write(stack);
}
}
clear() {
if (!this.color || this.isTest || this.logLevel > 3) {
return;
}
while (this.lines > 0) {
readline.clearLine(process.stdout, 0);
readline.moveCursor(process.stdout, 0, -1);
this.lines--;
}
readline.cursorTo(process.stdout, 0);
this.stopSpinner();
}
progress(message) {
if (this.logLevel < 3) {
return;
}
if (this.logLevel > 3) {
return this.verbose(message);
}
let styledMessage = this.chalk.gray.bold(message);
if (!this.spinner) {
this.spinner = ora({
text: styledMessage,
stream: process.stdout,
enabled: this.isTest ? false : undefined // fall back to ora default unless we need to explicitly disable it.
}).start();
} else {
this.spinner.text = styledMessage;
}
}
stopSpinner() {
if (this.spinner) {
this.spinner.stop();
this.spinner = null;
}
}
handleMessage(options) {
this[options.method](...options.args);
}
_log(message) {
// eslint-disable-next-line no-console
console.log(message);
}
table(columns, table) {
// Measure column widths
let colWidths = [];
for (let row of table) {
let i = 0;
for (let item of row) {
colWidths[i] = Math.max(colWidths[i] || 0, stringWidth(item));
i++;
}
}
// Render rows
for (let row of table) {
let items = row.map((item, i) => {
// Add padding between columns unless the alignment is the opposite to the
// next column and pad to the column width.
let padding =
!columns[i + 1] || columns[i + 1].align === columns[i].align ? 4 : 0;
return pad(item, colWidths[i] + padding, columns[i].align);
});
this.log(items.join(''));
}
}
}
// Pad a string with spaces on either side
function pad(text, length, align = 'left') {
let pad = ' '.repeat(length - stringWidth(text));
if (align === 'right') {
return pad + text;
}
return text + pad;
}
// Count visible characters in a string
function stringWidth(string) {
return countBreaks(stripAnsi('' + string));
}
// If we are in a worker, make a proxy class which will
// send the logger calls to the main process via IPC.
// These are handled in WorkerFarm and directed to handleMessage above.
if (WorkerFarm.isWorker()) {
class LoggerProxy {}
for (let method of Object.getOwnPropertyNames(Logger.prototype)) {
LoggerProxy.prototype[method] = (...args) => {
WorkerFarm.callMaster(
{
location: __filename,
method,
args
},
false
);
};
}
module.exports = new LoggerProxy();
} else {
module.exports = new Logger();
}

8
BACK_BACK/node_modules/@parcel/logger/src/emoji.js generated vendored Executable file
View file

@ -0,0 +1,8 @@
const supportsEmoji =
process.platform !== 'win32' || process.env.TERM === 'xterm-256color';
// Fallback symbols for Windows from https://en.wikipedia.org/wiki/Code_page_437
exports.progress = supportsEmoji ? '⏳' : '∞';
exports.success = supportsEmoji ? '✨' : '√';
exports.error = supportsEmoji ? '🚨' : '×';
exports.warning = supportsEmoji ? '⚠️' : '‼';

24
BACK_BACK/node_modules/@parcel/logger/src/prettyError.js generated vendored Executable file
View file

@ -0,0 +1,24 @@
module.exports = function(err, opts = {}) {
let message = typeof err === 'string' ? err : err.message;
if (!message) {
message = 'Unknown error';
}
if (err.fileName) {
let fileName = err.fileName;
if (err.loc) {
fileName += `:${err.loc.line}:${err.loc.column}`;
}
message = `${fileName}: ${message}`;
}
let stack;
if (err.codeFrame) {
stack = (opts.color && err.highlightedCodeFrame) || err.codeFrame;
} else if (err.stack) {
stack = err.stack.slice(err.stack.indexOf('\n') + 1);
}
return {message, stack};
};

8
BACK_BACK/node_modules/@parcel/logger/test/.babelrc generated vendored Executable file
View file

@ -0,0 +1,8 @@
{
"presets": [["@babel/preset-env", {
"targets": {
"node": "current"
}
}]],
"plugins": ["@babel/plugin-transform-runtime"]
}

6
BACK_BACK/node_modules/@parcel/logger/test/.eslintrc.json generated vendored Executable file
View file

@ -0,0 +1,6 @@
{
"extends": "../../../../.eslintrc.json",
"env": {
"mocha": true
}
}

123
BACK_BACK/node_modules/@parcel/logger/test/logger.js generated vendored Executable file
View file

@ -0,0 +1,123 @@
const assert = require('assert');
const sinon = require('sinon');
const Logger = require('../src/Logger');
describe('Logger', () => {
let log;
beforeEach(function() {
log = [];
});
const stub = instance => {
sinon.stub(instance, '_log').callsFake(message => {
log.push(message);
});
};
it('should log message on write', () => {
const l = new Logger.constructor({});
stub(l);
l.write('hello');
assert.equal(log[0], 'hello');
});
it('should track number of lines on persist false', () => {
const l = new Logger.constructor({});
stub(l);
const count = l.lines;
l.write('hello\nworld', false);
assert.equal(l.lines, count + 2);
});
it('should not track number of lines on persist true', () => {
const l = new Logger.constructor({});
stub(l);
const count = l.lines;
l.write('hello\nworld', true);
assert.equal(l.lines, count);
});
it('should respect log levels', () => {
const l = new Logger.constructor({logLevel: 2, color: false});
stub(l);
l.log('message');
l.persistent('message');
l.progress('message');
l.logLevel = 1;
l.warn('message');
l.logLevel = 0;
l.error({message: 'message', stack: 'stack'});
assert.equal(log.length, 0);
l.logLevel = 1;
l.error({message: 'message', stack: 'stack'});
assert.equal(log.length, 2);
l.logLevel = 2;
l.warn('message');
assert.equal(log.length, 3);
l.logLevel = 3;
l.log('message');
l.persistent('message');
l.progress('message');
assert.equal(log.length, 5);
});
it('should handle lack of color support with alternatives', () => {
const l = new Logger.constructor({color: false});
stub(l);
// clear is a no-op
l.lines = 4;
l.clear();
assert.equal(l.lines, 4);
});
it('should reset on clear', () => {
const l = new Logger.constructor({color: true, isTest: false});
stub(l);
// stub readline so we don't actually clear the test output
const sandbox = sinon.createSandbox();
sandbox.stub(require('readline'));
l.lines = 10;
l.clear();
assert.equal(l.lines, 0);
sandbox.restore();
});
it('should use ora for progress', () => {
const l = new Logger.constructor({color: false});
l.progress('message');
assert(l.spinner);
assert(l.spinner.text.includes('message'));
});
it('should use internal _log function for writes', () => {
const l = new Logger.constructor({color: false});
const sandbox = sinon.createSandbox(); // use sandbox to silence console.log
let spy;
try {
spy = sandbox.spy(l, '_log');
sandbox.stub(console, 'log');
l.write('hello world');
} finally {
l._log.restore();
sandbox.restore();
}
assert(spy.called);
});
});

2
BACK_BACK/node_modules/@parcel/logger/test/mocha.opts generated vendored Executable file
View file

@ -0,0 +1,2 @@
--require @parcel/babel-register
--exit

104
BACK_BACK/node_modules/@parcel/logger/test/prettyError.js generated vendored Executable file
View file

@ -0,0 +1,104 @@
const assert = require('assert');
const prettyError = require('../src/prettyError');
const message = 'Error Message!';
const fileName = 'Test.js';
const codeFrame = '<code>frame</code>';
const stack =
'Error: Uh-oh. Something went wrong. Line 88. \n Oh no. Something else went wrong. Line 77 \n';
describe('prettyError', () => {
it('should handle passing error as string', () => {
const err = prettyError(message);
assert.equal(err.message, message);
assert.equal(err.stack, undefined);
});
it('should handle passing error as object', () => {
const err = prettyError({message});
assert.equal(err.message, message);
assert.equal(err.stack, undefined);
});
it('should handle unknown input', () => {
const err = prettyError(Number.NaN);
assert(err.message.length); // non-empty error message
assert.equal(err.stack, undefined);
});
it('should prepend fileName', () => {
const err = prettyError({
message,
fileName
});
assert(err.message.startsWith(fileName));
assert.equal(err.stack, undefined);
});
it('should prepend line and column location', () => {
const err = prettyError({
message,
fileName,
loc: {
line: 1,
column: 10
}
});
assert(err.message.startsWith(`${fileName}:1:10`));
assert.equal(err.stack, undefined);
});
it('should support providing a codeFrame as stack', () => {
const err = prettyError({
message,
stack,
codeFrame: codeFrame
});
assert.equal(err.message, message);
assert.equal(err.stack, codeFrame);
});
it('should support highlightedCodeFrame when opts.color is true', () => {
let err = prettyError(
{
message,
stack,
codeFrame: '<not>a code frame</not>',
highlightedCodeFrame: codeFrame
},
{color: true}
);
assert.equal(err.message, message);
assert.equal(err.stack, codeFrame);
err = prettyError(
{
message,
stack,
codeFrame: codeFrame,
highlightedCodeFrame: '<not>a code frame</not>'
},
{color: false}
);
assert.equal(err.message, message);
assert.equal(err.stack, codeFrame);
});
it('should support stack', () => {
const err = prettyError({
message,
stack
});
assert.equal(err.message, message);
assert(err.stack.includes('Line'));
});
});

9578
BACK_BACK/node_modules/@parcel/logger/yarn-error.log generated vendored Executable file

File diff suppressed because it is too large Load diff

21
BACK_BACK/node_modules/@parcel/utils/LICENSE generated vendored Executable file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017-present Devon Govett
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

10
BACK_BACK/node_modules/@parcel/utils/index.js generated vendored Executable file
View file

@ -0,0 +1,10 @@
// Node 8 supports native async functions - no need to use compiled code!
exports.promisify =
parseInt(process.versions.node, 10) < 8
? require('./lib/promisify')
: require('./src/promisify');
exports.errorUtils =
parseInt(process.versions.node, 10) < 8
? require('./lib/errorUtils')
: require('./src/errorUtils');

26
BACK_BACK/node_modules/@parcel/utils/package.json generated vendored Executable file
View file

@ -0,0 +1,26 @@
{
"name": "@parcel/utils",
"version": "1.11.0",
"description": "Blazing fast, zero configuration web application bundler",
"main": "index.js",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/parcel-bundler/parcel.git"
},
"engines": {
"node": ">= 6.0.0"
},
"publishConfig": {
"access": "public"
},
"scripts": {
"test": "echo this package has no tests yet",
"test-ci": "yarn build && yarn test",
"format": "prettier --write \"./{src,bin,test}/**/*.{js,json,md}\"",
"lint": "eslint . && prettier \"./{src,bin,test}/**/*.{js,json,md}\" --list-different",
"build": "babel src -d lib",
"prepublish": "yarn build"
},
"gitHead": "34eb91e8e6991073e594bff731c333d09b0403b5"
}

8
BACK_BACK/node_modules/@parcel/utils/src/.babelrc generated vendored Executable file
View file

@ -0,0 +1,8 @@
{
"presets": [["@babel/preset-env", {
"targets": {
"node": "6"
}
}]],
"plugins": ["@babel/plugin-transform-runtime"]
}

3
BACK_BACK/node_modules/@parcel/utils/src/.eslintrc.json generated vendored Executable file
View file

@ -0,0 +1,3 @@
{
"extends": "../../../../.eslintrc.json"
}

31
BACK_BACK/node_modules/@parcel/utils/src/errorUtils.js generated vendored Executable file
View file

@ -0,0 +1,31 @@
function errorToJson(error) {
if (typeof error === 'string') {
return {message: error};
}
if (error instanceof Error) {
let jsonError = {
message: error.message,
stack: error.stack,
name: error.name
};
// Add all custom codeFrame properties
Object.keys(error).forEach(key => {
jsonError[key] = error[key];
});
return jsonError;
}
}
function jsonToError(json) {
if (json) {
let error = new Error(json.message);
Object.keys(json).forEach(key => {
error[key] = json[key];
});
return error;
}
}
exports.errorToJson = errorToJson;
exports.jsonToError = jsonToError;

13
BACK_BACK/node_modules/@parcel/utils/src/promisify.js generated vendored Executable file
View file

@ -0,0 +1,13 @@
module.exports = function(fn) {
return function(...args) {
return new Promise(function(resolve, reject) {
fn(...args, function(err, ...res) {
if (err) return reject(err);
if (res.length === 1) return resolve(res[0]);
resolve(res);
});
});
};
};

13
BACK_BACK/node_modules/@parcel/watcher/.babelrc generated vendored Executable file
View file

@ -0,0 +1,13 @@
{
"presets": [
[
"@babel/preset-env",
{
"targets": {
"node": "6"
}
}
]
],
"plugins": ["@babel/plugin-transform-runtime"]
}

21
BACK_BACK/node_modules/@parcel/watcher/LICENSE generated vendored Executable file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017-present Devon Govett
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

5
BACK_BACK/node_modules/@parcel/watcher/index.js generated vendored Executable file
View file

@ -0,0 +1,5 @@
// Node 8 supports native async functions - no need to use compiled code!
module.exports =
parseInt(process.versions.node, 10) < 8
? require('./lib/Watcher')
: require('./src/Watcher');

415
BACK_BACK/node_modules/@parcel/watcher/lib/Watcher.js generated vendored Executable file
View file

@ -0,0 +1,415 @@
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
const fork = require('child_process').fork;
const optionsTransfer = require('./options');
const Path = require('path');
const _require = require('events'),
EventEmitter = _require.EventEmitter;
const _require2 = require('@parcel/utils'),
errorUtils = _require2.errorUtils;
/**
* This watcher wraps chokidar so that we watch directories rather than individual files on macOS.
* This prevents us from hitting EMFILE errors when running out of file descriptors.
* Chokidar does not have support for watching directories on non-macOS platforms, so we disable
* this behavior in order to prevent watching more individual files than necessary (e.g. node_modules).
*/
class Watcher extends EventEmitter {
constructor(options = {
// FS events on macOS are flakey in the tests, which write lots of files very quickly
// See https://github.com/paulmillr/chokidar/issues/612
useFsEvents: process.platform === 'darwin' && process.env.NODE_ENV !== 'test',
ignoreInitial: true,
ignorePermissionErrors: true,
ignored: /(^|[/\\])\.(git|cache)/
}) {
super();
this.options = optionsTransfer.encode(options);
this.watchedPaths = new Set();
this.child = null;
this.ready = false;
this.readyQueue = [];
this.watchedDirectories = new Map();
this.stopped = false;
this.on('ready', () => {
this.ready = true;
var _iteratorNormalCompletion = true;
var _didIteratorError = false;
var _iteratorError = undefined;
try {
for (var _iterator = this.readyQueue[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
let func = _step.value;
func();
}
} catch (err) {
_didIteratorError = true;
_iteratorError = err;
} finally {
try {
if (!_iteratorNormalCompletion && _iterator.return != null) {
_iterator.return();
}
} finally {
if (_didIteratorError) {
throw _iteratorError;
}
}
}
this.readyQueue = [];
});
this.startchild();
}
startchild() {
if (this.child) return;
let filteredArgs = process.execArgv.filter(v => !/^--(debug|inspect)/.test(v));
let options = {
execArgv: filteredArgs,
env: process.env,
cwd: process.cwd()
};
this.child = fork(Path.join(__dirname, 'child'), process.argv, options);
if (this.watchedPaths.size > 0) {
this.sendCommand('add', [Array.from(this.watchedPaths)]);
}
this.child.send({
type: 'init',
options: this.options
});
this.child.on('message', msg => this.handleEmit(msg.event, msg.path));
this.child.on('error', () => {});
this.child.on('exit', () => this.handleClosed()); // this.child.on('close', () => this.handleClosed());
}
handleClosed() {
if (!this.stopped) {
// Restart the child
this.child = null;
this.ready = false;
this.startchild();
}
this.emit('childDead');
}
handleEmit(event, data) {
if (event === 'watcherError') {
data = errorUtils.jsonToError(data);
}
this.emit(event, data);
}
sendCommand(func, args) {
if (!this.ready) {
return this.readyQueue.push(() => this.sendCommand(func, args));
}
this.child.send({
type: 'function',
name: func,
args: args
});
}
_addPath(path) {
if (!this.watchedPaths.has(path)) {
this.watchedPaths.add(path);
return true;
}
}
add(paths) {
let added = false;
if (Array.isArray(paths)) {
var _iteratorNormalCompletion2 = true;
var _didIteratorError2 = false;
var _iteratorError2 = undefined;
try {
for (var _iterator2 = paths[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) {
let path = _step2.value;
added = !added ? this._addPath(path) : true;
}
} catch (err) {
_didIteratorError2 = true;
_iteratorError2 = err;
} finally {
try {
if (!_iteratorNormalCompletion2 && _iterator2.return != null) {
_iterator2.return();
}
} finally {
if (_didIteratorError2) {
throw _iteratorError2;
}
}
}
} else {
added = this._addPath(paths);
}
if (added) this.sendCommand('add', [paths]);
}
_closePath(path) {
if (this.watchedPaths.has(path)) {
this.watchedPaths.delete(path);
}
this.sendCommand('_closePath', [path]);
}
_emulateChildDead() {
if (!this.child) {
return;
}
this.child.send({
type: 'die'
});
}
_emulateChildError() {
if (!this.child) {
return;
}
this.child.send({
type: 'emulate_error'
});
}
getWatched() {
let watchList = {};
var _iteratorNormalCompletion3 = true;
var _didIteratorError3 = false;
var _iteratorError3 = undefined;
try {
for (var _iterator3 = this.watchedPaths[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) {
let path = _step3.value;
let key = this.options.cwd ? Path.relative(this.options.cwd, path) : path;
watchList[key || '.'] = [];
}
} catch (err) {
_didIteratorError3 = true;
_iteratorError3 = err;
} finally {
try {
if (!_iteratorNormalCompletion3 && _iterator3.return != null) {
_iterator3.return();
}
} finally {
if (_didIteratorError3) {
throw _iteratorError3;
}
}
}
return watchList;
}
/**
* Find a parent directory of `path` which is already watched
*/
getWatchedParent(path) {
path = Path.dirname(path);
let root = Path.parse(path).root;
while (path !== root) {
if (this.watchedDirectories.has(path)) {
return path;
}
path = Path.dirname(path);
}
return null;
}
/**
* Find a list of child directories of `path` which are already watched
*/
getWatchedChildren(path) {
path = Path.dirname(path) + Path.sep;
let res = [];
var _iteratorNormalCompletion4 = true;
var _didIteratorError4 = false;
var _iteratorError4 = undefined;
try {
for (var _iterator4 = this.watchedDirectories.keys()[Symbol.iterator](), _step4; !(_iteratorNormalCompletion4 = (_step4 = _iterator4.next()).done); _iteratorNormalCompletion4 = true) {
let dir = _step4.value;
if (dir.startsWith(path)) {
res.push(dir);
}
}
} catch (err) {
_didIteratorError4 = true;
_iteratorError4 = err;
} finally {
try {
if (!_iteratorNormalCompletion4 && _iterator4.return != null) {
_iterator4.return();
}
} finally {
if (_didIteratorError4) {
throw _iteratorError4;
}
}
}
return res;
}
/**
* Add a path to the watcher
*/
watch(path) {
if (this.shouldWatchDirs) {
// If there is no parent directory already watching this path, add a new watcher.
let parent = this.getWatchedParent(path);
if (!parent) {
// Find watchers on child directories, and remove them. They will be handled by the new parent watcher.
let children = this.getWatchedChildren(path);
let count = 1;
var _iteratorNormalCompletion5 = true;
var _didIteratorError5 = false;
var _iteratorError5 = undefined;
try {
for (var _iterator5 = children[Symbol.iterator](), _step5; !(_iteratorNormalCompletion5 = (_step5 = _iterator5.next()).done); _iteratorNormalCompletion5 = true) {
let dir = _step5.value;
count += this.watchedDirectories.get(dir);
this._closePath(dir);
this.watchedDirectories.delete(dir);
}
} catch (err) {
_didIteratorError5 = true;
_iteratorError5 = err;
} finally {
try {
if (!_iteratorNormalCompletion5 && _iterator5.return != null) {
_iterator5.return();
}
} finally {
if (_didIteratorError5) {
throw _iteratorError5;
}
}
}
let dir = Path.dirname(path);
this.add(dir);
this.watchedDirectories.set(dir, count);
} else {
// Otherwise, increment the reference count of the parent watcher.
this.watchedDirectories.set(parent, this.watchedDirectories.get(parent) + 1);
}
} else {
this.add(path);
}
}
_unwatch(paths) {
let removed = false;
if (Array.isArray(paths)) {
var _iteratorNormalCompletion6 = true;
var _didIteratorError6 = false;
var _iteratorError6 = undefined;
try {
for (var _iterator6 = paths[Symbol.iterator](), _step6; !(_iteratorNormalCompletion6 = (_step6 = _iterator6.next()).done); _iteratorNormalCompletion6 = true) {
let p = _step6.value;
removed = !removed ? this.watchedPaths.delete(p) : true;
}
} catch (err) {
_didIteratorError6 = true;
_iteratorError6 = err;
} finally {
try {
if (!_iteratorNormalCompletion6 && _iterator6.return != null) {
_iterator6.return();
}
} finally {
if (_didIteratorError6) {
throw _iteratorError6;
}
}
}
} else {
removed = this.watchedPaths.delete(paths);
}
if (removed) this.sendCommand('unwatch', [paths]);
}
/**
* Remove a path from the watcher
*/
unwatch(path) {
if (this.shouldWatchDirs) {
let dir = this.getWatchedParent(path);
if (dir) {
// When the count of files watching a directory reaches zero, unwatch it.
let count = this.watchedDirectories.get(dir) - 1;
if (count === 0) {
this.watchedDirectories.delete(dir);
this._unwatch(dir);
} else {
this.watchedDirectories.set(dir, count);
}
}
} else {
this._unwatch(path);
}
}
/**
* Stop watching all paths
*/
stop() {
var _this = this;
return (0, _asyncToGenerator2.default)(function* () {
_this.stopped = true;
if (_this.child) {
_this.child.kill();
return new Promise(resolve => _this.once('childDead', resolve));
}
})();
}
}
module.exports = Watcher;

61
BACK_BACK/node_modules/@parcel/watcher/lib/child.js generated vendored Executable file
View file

@ -0,0 +1,61 @@
"use strict";
const _require = require('chokidar'),
FSWatcher = _require.FSWatcher;
const _require2 = require('@parcel/utils'),
errorUtils = _require2.errorUtils;
const optionsTransfer = require('./options');
let watcher;
function sendEvent(event, path) {
process.send({
event: event,
path: path
});
}
function handleError(e) {
sendEvent('watcherError', errorUtils.errorToJson(e));
}
function init(options) {
options = optionsTransfer.decode(options);
watcher = new FSWatcher(options);
watcher.on('all', sendEvent);
sendEvent('ready');
}
function executeFunction(functionName, args) {
try {
watcher[functionName](...args);
} catch (e) {
handleError(e);
}
}
process.on('message', msg => {
switch (msg.type) {
case 'init':
init(msg.options);
break;
case 'function':
executeFunction(msg.name, msg.args);
break;
case 'die':
process.exit();
break;
case 'emulate_error':
throw new Error('this is an emulated error');
}
});
process.on('error', handleError);
process.on('uncaughtException', handleError);
process.on('disconnect', () => {
process.exit();
});

66
BACK_BACK/node_modules/@parcel/watcher/lib/options.js generated vendored Executable file
View file

@ -0,0 +1,66 @@
"use strict";
function type(options) {
return Object.prototype.toString.call(options).slice(8, -1);
}
function encode(options) {
if (options && options.ignored) {
const ignoredType = type(options.ignored);
if (ignoredType !== 'Array') {
options.ignored = [options.ignored];
}
options.ignored.forEach((value, index) => {
const valueType = type(value);
if (valueType === 'RegExp') {
options.ignored[index] = value.source;
if (!options._regIndexs) {
options._regIndexs = [];
}
options._regIndexs.push(index);
}
});
}
return options;
}
function decode(options) {
if (options && options.ignored && options._regIndexs) {
var _iteratorNormalCompletion = true;
var _didIteratorError = false;
var _iteratorError = undefined;
try {
for (var _iterator = options._regIndexs[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
let index = _step.value;
options.ignored[index] = new RegExp(options.ignored[index]);
}
} catch (err) {
_didIteratorError = true;
_iteratorError = err;
} finally {
try {
if (!_iteratorNormalCompletion && _iterator.return != null) {
_iterator.return();
}
} finally {
if (_didIteratorError) {
throw _iteratorError;
}
}
}
delete options._regIndexs;
}
return options;
}
exports.encode = encode;
exports.decode = decode;

33
BACK_BACK/node_modules/@parcel/watcher/package.json generated vendored Executable file
View file

@ -0,0 +1,33 @@
{
"name": "@parcel/watcher",
"version": "1.12.1",
"description": "Blazing fast, zero configuration web application bundler",
"main": "index.js",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/parcel-bundler/parcel.git"
},
"publishConfig": {
"access": "public"
},
"scripts": {
"test": "cross-env NODE_ENV=test mocha",
"test-ci": "yarn build && yarn test",
"format": "prettier --write \"./{src,bin,test}/**/*.{js,json,md}\"",
"lint": "eslint . && prettier \"./{src,bin,test}/**/*.{js,json,md}\" --list-different",
"build": "babel src -d lib",
"prepublish": "yarn build"
},
"dependencies": {
"@parcel/utils": "^1.11.0",
"chokidar": "^2.1.5"
},
"devDependencies": {
"@parcel/babel-register": "^1.11.1",
"@parcel/fs": "^1.11.0",
"@parcel/test-utils": "^1.12.0",
"mocha": "^5.2.0"
},
"gitHead": "d9ec7af22f85134dc1a97fe00f35950f2fe1f57a"
}

275
BACK_BACK/node_modules/@parcel/watcher/src/Watcher.js generated vendored Executable file
View file

@ -0,0 +1,275 @@
const fork = require('child_process').fork;
const optionsTransfer = require('./options');
const Path = require('path');
const {EventEmitter} = require('events');
const {errorUtils} = require('@parcel/utils');
/**
* This watcher wraps chokidar so that we watch directories rather than individual files on macOS.
* This prevents us from hitting EMFILE errors when running out of file descriptors.
* Chokidar does not have support for watching directories on non-macOS platforms, so we disable
* this behavior in order to prevent watching more individual files than necessary (e.g. node_modules).
*/
class Watcher extends EventEmitter {
constructor(
options = {
// FS events on macOS are flakey in the tests, which write lots of files very quickly
// See https://github.com/paulmillr/chokidar/issues/612
useFsEvents:
process.platform === 'darwin' && process.env.NODE_ENV !== 'test',
ignoreInitial: true,
ignorePermissionErrors: true,
ignored: /(^|[/\\])\.(git|cache)/
}
) {
super();
this.options = optionsTransfer.encode(options);
this.watchedPaths = new Set();
this.child = null;
this.ready = false;
this.readyQueue = [];
this.watchedDirectories = new Map();
this.stopped = false;
this.on('ready', () => {
this.ready = true;
for (let func of this.readyQueue) {
func();
}
this.readyQueue = [];
});
this.startchild();
}
startchild() {
if (this.child) return;
let filteredArgs = process.execArgv.filter(
v => !/^--(debug|inspect)/.test(v)
);
let options = {
execArgv: filteredArgs,
env: process.env,
cwd: process.cwd()
};
this.child = fork(Path.join(__dirname, 'child'), process.argv, options);
if (this.watchedPaths.size > 0) {
this.sendCommand('add', [Array.from(this.watchedPaths)]);
}
this.child.send({
type: 'init',
options: this.options
});
this.child.on('message', msg => this.handleEmit(msg.event, msg.path));
this.child.on('error', () => {});
this.child.on('exit', () => this.handleClosed());
// this.child.on('close', () => this.handleClosed());
}
handleClosed() {
if (!this.stopped) {
// Restart the child
this.child = null;
this.ready = false;
this.startchild();
}
this.emit('childDead');
}
handleEmit(event, data) {
if (event === 'watcherError') {
data = errorUtils.jsonToError(data);
}
this.emit(event, data);
}
sendCommand(func, args) {
if (!this.ready) {
return this.readyQueue.push(() => this.sendCommand(func, args));
}
this.child.send({
type: 'function',
name: func,
args: args
});
}
_addPath(path) {
if (!this.watchedPaths.has(path)) {
this.watchedPaths.add(path);
return true;
}
}
add(paths) {
let added = false;
if (Array.isArray(paths)) {
for (let path of paths) {
added = !added ? this._addPath(path) : true;
}
} else {
added = this._addPath(paths);
}
if (added) this.sendCommand('add', [paths]);
}
_closePath(path) {
if (this.watchedPaths.has(path)) {
this.watchedPaths.delete(path);
}
this.sendCommand('_closePath', [path]);
}
_emulateChildDead() {
if (!this.child) {
return;
}
this.child.send({
type: 'die'
});
}
_emulateChildError() {
if (!this.child) {
return;
}
this.child.send({
type: 'emulate_error'
});
}
getWatched() {
let watchList = {};
for (let path of this.watchedPaths) {
let key = this.options.cwd ? Path.relative(this.options.cwd, path) : path;
watchList[key || '.'] = [];
}
return watchList;
}
/**
* Find a parent directory of `path` which is already watched
*/
getWatchedParent(path) {
path = Path.dirname(path);
let root = Path.parse(path).root;
while (path !== root) {
if (this.watchedDirectories.has(path)) {
return path;
}
path = Path.dirname(path);
}
return null;
}
/**
* Find a list of child directories of `path` which are already watched
*/
getWatchedChildren(path) {
path = Path.dirname(path) + Path.sep;
let res = [];
for (let dir of this.watchedDirectories.keys()) {
if (dir.startsWith(path)) {
res.push(dir);
}
}
return res;
}
/**
* Add a path to the watcher
*/
watch(path) {
if (this.shouldWatchDirs) {
// If there is no parent directory already watching this path, add a new watcher.
let parent = this.getWatchedParent(path);
if (!parent) {
// Find watchers on child directories, and remove them. They will be handled by the new parent watcher.
let children = this.getWatchedChildren(path);
let count = 1;
for (let dir of children) {
count += this.watchedDirectories.get(dir);
this._closePath(dir);
this.watchedDirectories.delete(dir);
}
let dir = Path.dirname(path);
this.add(dir);
this.watchedDirectories.set(dir, count);
} else {
// Otherwise, increment the reference count of the parent watcher.
this.watchedDirectories.set(
parent,
this.watchedDirectories.get(parent) + 1
);
}
} else {
this.add(path);
}
}
_unwatch(paths) {
let removed = false;
if (Array.isArray(paths)) {
for (let p of paths) {
removed = !removed ? this.watchedPaths.delete(p) : true;
}
} else {
removed = this.watchedPaths.delete(paths);
}
if (removed) this.sendCommand('unwatch', [paths]);
}
/**
* Remove a path from the watcher
*/
unwatch(path) {
if (this.shouldWatchDirs) {
let dir = this.getWatchedParent(path);
if (dir) {
// When the count of files watching a directory reaches zero, unwatch it.
let count = this.watchedDirectories.get(dir) - 1;
if (count === 0) {
this.watchedDirectories.delete(dir);
this._unwatch(dir);
} else {
this.watchedDirectories.set(dir, count);
}
}
} else {
this._unwatch(path);
}
}
/**
* Stop watching all paths
*/
async stop() {
this.stopped = true;
if (this.child) {
this.child.kill();
return new Promise(resolve => this.once('childDead', resolve));
}
}
}
module.exports = Watcher;

52
BACK_BACK/node_modules/@parcel/watcher/src/child.js generated vendored Executable file
View file

@ -0,0 +1,52 @@
const {FSWatcher} = require('chokidar');
const {errorUtils} = require('@parcel/utils');
const optionsTransfer = require('./options');
let watcher;
function sendEvent(event, path) {
process.send({
event: event,
path: path
});
}
function handleError(e) {
sendEvent('watcherError', errorUtils.errorToJson(e));
}
function init(options) {
options = optionsTransfer.decode(options);
watcher = new FSWatcher(options);
watcher.on('all', sendEvent);
sendEvent('ready');
}
function executeFunction(functionName, args) {
try {
watcher[functionName](...args);
} catch (e) {
handleError(e);
}
}
process.on('message', msg => {
switch (msg.type) {
case 'init':
init(msg.options);
break;
case 'function':
executeFunction(msg.name, msg.args);
break;
case 'die':
process.exit();
break;
case 'emulate_error':
throw new Error('this is an emulated error');
}
});
process.on('error', handleError);
process.on('uncaughtException', handleError);
process.on('disconnect', () => {
process.exit();
});

39
BACK_BACK/node_modules/@parcel/watcher/src/options.js generated vendored Executable file
View file

@ -0,0 +1,39 @@
function type(options) {
return Object.prototype.toString.call(options).slice(8, -1);
}
function encode(options) {
if (options && options.ignored) {
const ignoredType = type(options.ignored);
if (ignoredType !== 'Array') {
options.ignored = [options.ignored];
}
options.ignored.forEach((value, index) => {
const valueType = type(value);
if (valueType === 'RegExp') {
options.ignored[index] = value.source;
if (!options._regIndexs) {
options._regIndexs = [];
}
options._regIndexs.push(index);
}
});
}
return options;
}
function decode(options) {
if (options && options.ignored && options._regIndexs) {
for (let index of options._regIndexs) {
options.ignored[index] = new RegExp(options.ignored[index]);
}
delete options._regIndexs;
}
return options;
}
exports.encode = encode;
exports.decode = decode;

8
BACK_BACK/node_modules/@parcel/watcher/test/.babelrc generated vendored Executable file
View file

@ -0,0 +1,8 @@
{
"presets": [["@babel/preset-env", {
"targets": {
"node": "current"
}
}]],
"plugins": ["@babel/plugin-transform-runtime"]
}

6
BACK_BACK/node_modules/@parcel/watcher/test/.eslintrc.json generated vendored Executable file
View file

@ -0,0 +1,6 @@
{
"extends": "../../../../.eslintrc.json",
"env": {
"mocha": true
}
}

104
BACK_BACK/node_modules/@parcel/watcher/test/changeEvent.js generated vendored Executable file
View file

@ -0,0 +1,104 @@
const Watcher = require('../index');
const fs = require('@parcel/fs');
const path = require('path');
const assert = require('assert');
const {sleep} = require('@parcel/test-utils');
describe('change event', function() {
let tmpFolder = path.join(__dirname, './tmp/');
before(() => {
fs.mkdirp(tmpFolder);
});
it('Should emit event on filechange', async () => {
let watcher = new Watcher({});
let filepath = path.join(tmpFolder, 'file1.txt');
await fs.writeFile(filepath, 'this is a text document');
watcher.add(filepath);
let changed = false;
watcher.once('change', () => {
changed = true;
});
if (!watcher.ready) {
await new Promise(resolve => watcher.once('ready', resolve));
}
await sleep(250);
await fs.writeFile(filepath, 'this is not a text document');
await sleep(500);
assert(changed, 'File should be flagged as changed.');
await watcher.stop();
});
it('Should emit event on filechange using arrays', async () => {
let watcher = new Watcher({});
let filepath = path.join(tmpFolder, 'file1.txt');
await fs.writeFile(filepath, 'this is a text document');
watcher.add([filepath]);
let changed = false;
watcher.once('change', () => {
changed = true;
});
if (!watcher.ready) {
await new Promise(resolve => watcher.once('ready', resolve));
}
await sleep(250);
await fs.writeFile(filepath, 'this is not a text document');
await sleep(500);
assert(changed, 'File should be flagged as changed.');
await watcher.stop();
});
it('Should not emit event if file has been added and removed', async () => {
let watcher = new Watcher({});
let filepath = path.join(tmpFolder, 'file1.txt');
await fs.writeFile(filepath, 'this is a text document');
await sleep(250);
watcher.add(filepath);
let changed = false;
watcher.once('change', () => {
changed = true;
});
if (!watcher.ready) {
await new Promise(resolve => watcher.once('ready', resolve));
}
await sleep(250);
watcher.unwatch(filepath);
await fs.writeFile(filepath, 'this is not a text document');
await sleep(500);
assert(!changed, 'Should not have emitted a change event.');
await watcher.stop();
});
});

84
BACK_BACK/node_modules/@parcel/watcher/test/errorHandling.js generated vendored Executable file
View file

@ -0,0 +1,84 @@
const Watcher = require('../index');
const fs = require('@parcel/fs');
const path = require('path');
const assert = require('assert');
const {sleep} = require('@parcel/test-utils');
describe('error handling', function() {
let tmpFolder = path.join(__dirname, './tmp/');
before(() => {
fs.mkdirp(tmpFolder);
});
it('Should restart child process if it dies', async () => {
let watcher = new Watcher({});
let filepath = path.join(tmpFolder, 'file1.txt');
await fs.writeFile(filepath, 'this is a text document');
watcher.add(filepath);
let changed = false;
watcher.once('change', () => {
changed = true;
});
if (!watcher.ready) {
await new Promise(resolve => watcher.once('ready', resolve));
}
await sleep(250);
watcher._emulateChildDead();
await sleep(1000);
await fs.writeFile(filepath, 'this is not a text document');
await sleep(500);
assert(changed, 'Should have emitted a change event.');
await watcher.stop();
});
it('Should restart child process on errors', async () => {
let watcher = new Watcher({});
let filepath = path.join(tmpFolder, 'file1.txt');
await fs.writeFile(filepath, 'this is a text document');
watcher.add(filepath);
let hasThrown = false;
watcher.on('watcherError', () => (hasThrown = true));
let changed = false;
watcher.once('change', () => {
changed = true;
});
if (!watcher.ready) {
await new Promise(resolve => watcher.once('ready', resolve));
}
await sleep(250);
watcher._emulateChildError();
await sleep(1000);
await fs.writeFile(filepath, 'this is not a text document');
await sleep(500);
assert(changed, 'Should have emitted a change event.');
await watcher.stop();
assert(hasThrown, 'Should have emitted an error event.');
});
});

31
BACK_BACK/node_modules/@parcel/watcher/test/fswatcher.js generated vendored Executable file
View file

@ -0,0 +1,31 @@
const Watcher = require('../index');
const {sleep} = require('@parcel/test-utils');
const assert = require('assert');
describe('Watcher', function() {
it('Should be able to create a new watcher', async () => {
let watcher = new Watcher();
assert(!!watcher.child);
assert(!watcher.ready);
await sleep(1000);
assert(!!watcher.child);
assert(watcher.ready);
await watcher.stop();
});
it('Should be able to properly destroy the watcher', async () => {
let watcher = new Watcher();
await sleep(1000);
assert(!!watcher.child);
assert(watcher.ready);
await watcher.stop();
assert(watcher.child.killed);
});
});

3
BACK_BACK/node_modules/@parcel/watcher/test/mocha.opts generated vendored Executable file
View file

@ -0,0 +1,3 @@
--require @parcel/babel-register
--timeout 10000
--exit

124
BACK_BACK/node_modules/@parcel/watcher/test/options.js generated vendored Executable file
View file

@ -0,0 +1,124 @@
const Watcher = require('../index');
const fs = require('@parcel/fs');
const path = require('path');
const assert = require('assert');
const {sleep} = require('@parcel/test-utils');
describe('options', function() {
let tmpFolder = path.join(__dirname, './tmp/');
before(() => {
fs.mkdirp(tmpFolder);
});
it('Should pass init options with correct ignored regex', async () => {
let watcher = new Watcher({
ignored: /file/
});
let filepath = path.join(tmpFolder, 'file1.txt');
await fs.writeFile(filepath, 'this is a text document');
watcher.add(filepath);
let changed = false;
watcher.once('change', () => {
changed = true;
});
if (!watcher.ready) {
await new Promise(resolve => watcher.once('ready', resolve));
}
await sleep(250);
await fs.writeFile(filepath, 'this is not a text document');
await sleep(500);
assert(!changed, 'File should not be flagged as changed.');
await watcher.stop();
});
it('Should pass init options with a more complex ignored regex', async () => {
let watcher = new Watcher({
ignored: /file|config/
});
let filepaths = [
path.join(tmpFolder, 'file1.txt'),
path.join(tmpFolder, 'config.json')
];
for (let filepath of filepaths) {
await fs.writeFile(filepath, 'this is a text document');
watcher.add(filepath);
}
let changed = false;
watcher.once('change', () => {
changed = true;
});
if (!watcher.ready) {
await new Promise(resolve => watcher.once('ready', resolve));
}
await sleep(250);
for (let filepath of filepaths) {
await fs.writeFile(filepath, 'this is not a text document');
watcher.add(filepath);
}
await sleep(500);
assert(!changed, 'File should not be flagged as changed.');
await watcher.stop();
});
it('Should not ignore any files outside of the regex', async () => {
let watcher = new Watcher({
ignored: /file|config/
});
let filepaths = [
path.join(tmpFolder, 'file1.txt'),
path.join(tmpFolder, 'config.json'),
path.join(tmpFolder, 'something')
];
for (let filepath of filepaths) {
await fs.writeFile(filepath, 'this is a text document');
watcher.add(filepath);
}
let changed = 0;
watcher.once('change', () => {
changed++;
});
if (!watcher.ready) {
await new Promise(resolve => watcher.once('ready', resolve));
}
await sleep(250);
for (let filepath of filepaths) {
await fs.writeFile(filepath, 'this is not a text document');
watcher.add(filepath);
}
await sleep(500);
assert.equal(changed, 1, 'One file should have changed once.');
await watcher.stop();
});
});

View file

@ -0,0 +1 @@
this is not a text document

1
BACK_BACK/node_modules/@parcel/watcher/test/tmp/file1.txt generated vendored Executable file
View file

@ -0,0 +1 @@
this is a text document

1
BACK_BACK/node_modules/@parcel/watcher/test/tmp/something generated vendored Executable file
View file

@ -0,0 +1 @@
this is not a text document

28
BACK_BACK/node_modules/@parcel/watcher/test/watched.js generated vendored Executable file
View file

@ -0,0 +1,28 @@
const Watcher = require('../index');
const fs = require('@parcel/fs');
const path = require('path');
const assert = require('assert');
describe('watched paths', function() {
let tmpFolder = path.join(__dirname, './tmp/');
before(() => {
fs.mkdirp(tmpFolder);
});
it('Should return watched paths', async () => {
let watcher = new Watcher({});
let filepath = path.join(tmpFolder, 'file1.txt');
await fs.writeFile(filepath, 'this is a text document');
watcher.add(filepath);
assert(
Object.keys(watcher.getWatched())[0] === filepath,
'getWatched should return all the watched paths.'
);
await watcher.stop();
});
});

21
BACK_BACK/node_modules/@parcel/workers/LICENSE generated vendored Executable file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017-present Devon Govett
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

5
BACK_BACK/node_modules/@parcel/workers/index.js generated vendored Executable file
View file

@ -0,0 +1,5 @@
// Node 8 supports native async functions - no need to use compiled code!
module.exports =
parseInt(process.versions.node, 10) < 8
? require('./lib/WorkerFarm')
: require('./src/WorkerFarm');

33
BACK_BACK/node_modules/@parcel/workers/package.json generated vendored Executable file
View file

@ -0,0 +1,33 @@
{
"name": "@parcel/workers",
"version": "1.11.0",
"description": "Blazing fast, zero configuration web application bundler",
"main": "index.js",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/parcel-bundler/parcel.git"
},
"engines": {
"node": ">= 6.0.0"
},
"publishConfig": {
"access": "public"
},
"scripts": {
"test": "cross-env NODE_ENV=test mocha",
"test-ci": "yarn build && yarn test",
"format": "prettier --write \"./{src,bin,test}/**/*.{js,json,md}\"",
"lint": "eslint . && prettier \"./{src,bin,test}/**/*.{js,json,md}\" --list-different",
"build": "babel src -d lib",
"prepublish": "yarn build"
},
"devDependencies": {
"mocha": "^5.2.0"
},
"dependencies": {
"@parcel/utils": "^1.11.0",
"physical-cpu-count": "^2.0.0"
},
"gitHead": "34eb91e8e6991073e594bff731c333d09b0403b5"
}

8
BACK_BACK/node_modules/@parcel/workers/src/.babelrc generated vendored Executable file
View file

@ -0,0 +1,8 @@
{
"presets": [["@babel/preset-env", {
"targets": {
"node": "6"
}
}]],
"plugins": ["@babel/plugin-transform-runtime"]
}

3
BACK_BACK/node_modules/@parcel/workers/src/.eslintrc.json generated vendored Executable file
View file

@ -0,0 +1,3 @@
{
"extends": "../../../../.eslintrc.json"
}

176
BACK_BACK/node_modules/@parcel/workers/src/Worker.js generated vendored Executable file
View file

@ -0,0 +1,176 @@
const childProcess = require('child_process');
const {EventEmitter} = require('events');
const {errorUtils} = require('@parcel/utils');
const childModule = require.resolve('./child');
let WORKER_ID = 0;
class Worker extends EventEmitter {
constructor(options) {
super();
this.options = options;
this.id = WORKER_ID++;
this.sendQueue = [];
this.processQueue = true;
this.calls = new Map();
this.exitCode = null;
this.callId = 0;
this.ready = false;
this.stopped = false;
this.isStopping = false;
}
async fork(forkModule, bundlerOptions) {
let filteredArgs = process.execArgv.filter(
v => !/^--(debug|inspect)/.test(v)
);
let options = {
execArgv: filteredArgs,
env: process.env,
cwd: process.cwd()
};
this.child = childProcess.fork(childModule, process.argv, options);
this.child.on('message', data => this.receive(data));
this.child.once('exit', code => {
this.exitCode = code;
this.emit('exit', code);
});
this.child.on('error', err => {
this.emit('error', err);
});
await new Promise((resolve, reject) => {
this.call({
method: 'childInit',
args: [forkModule],
retries: 0,
resolve,
reject
});
});
await this.init(bundlerOptions);
}
async init(bundlerOptions) {
this.ready = false;
return new Promise((resolve, reject) => {
this.call({
method: 'init',
args: [bundlerOptions],
retries: 0,
resolve: (...args) => {
this.ready = true;
this.emit('ready');
resolve(...args);
},
reject
});
});
}
send(data) {
if (!this.processQueue) {
return this.sendQueue.push(data);
}
let result = this.child.send(data, error => {
if (error && error instanceof Error) {
// Ignore this, the workerfarm handles child errors
return;
}
this.processQueue = true;
if (this.sendQueue.length > 0) {
let queueCopy = this.sendQueue.slice(0);
this.sendQueue = [];
queueCopy.forEach(entry => this.send(entry));
}
});
if (!result || /^win/.test(process.platform)) {
// Queue is handling too much messages throttle it
this.processQueue = false;
}
}
call(call) {
if (this.stopped || this.isStopping) {
return;
}
let idx = this.callId++;
this.calls.set(idx, call);
this.send({
type: 'request',
idx: idx,
child: this.id,
method: call.method,
args: call.args
});
}
receive(data) {
if (this.stopped || this.isStopping) {
return;
}
let idx = data.idx;
let type = data.type;
let content = data.content;
let contentType = data.contentType;
if (type === 'request') {
this.emit('request', data);
} else if (type === 'response') {
let call = this.calls.get(idx);
if (!call) {
// Return for unknown calls, these might accur if a third party process uses workers
return;
}
if (contentType === 'error') {
call.reject(errorUtils.jsonToError(content));
} else {
call.resolve(content);
}
this.calls.delete(idx);
this.emit('response', data);
}
}
async stop() {
if (!this.stopped) {
this.stopped = true;
if (this.child) {
this.child.send('die');
let forceKill = setTimeout(
() => this.child.kill('SIGINT'),
this.options.forcedKillTime
);
await new Promise(resolve => {
this.child.once('exit', resolve);
});
clearTimeout(forceKill);
}
}
}
}
module.exports = Worker;

300
BACK_BACK/node_modules/@parcel/workers/src/WorkerFarm.js generated vendored Executable file
View file

@ -0,0 +1,300 @@
const {EventEmitter} = require('events');
const {errorUtils} = require('@parcel/utils');
const Worker = require('./Worker');
const cpuCount = require('./cpuCount');
let shared = null;
/**
* workerPath should always be defined inside farmOptions
*/
class WorkerFarm extends EventEmitter {
constructor(options, farmOptions = {}) {
super();
this.options = {
maxConcurrentWorkers: WorkerFarm.getNumWorkers(),
maxConcurrentCallsPerWorker: WorkerFarm.getConcurrentCallsPerWorker(),
forcedKillTime: 500,
warmWorkers: true,
useLocalWorker: true
};
if (farmOptions) {
this.options = Object.assign(this.options, farmOptions);
}
this.warmWorkers = 0;
this.workers = new Map();
this.callQueue = [];
if (!this.options.workerPath) {
throw new Error('Please provide a worker path!');
}
this.localWorker = require(this.options.workerPath);
this.run = this.mkhandle('run');
this.init(options);
}
warmupWorker(method, args) {
// Workers are already stopping
if (this.ending) {
return;
}
// Workers are not warmed up yet.
// Send the job to a remote worker in the background,
// but use the result from the local worker - it will be faster.
let promise = this.addCall(method, [...args, true]);
if (promise) {
promise
.then(() => {
this.warmWorkers++;
if (this.warmWorkers >= this.workers.size) {
this.emit('warmedup');
}
})
.catch(() => {});
}
}
shouldStartRemoteWorkers() {
return (
this.options.maxConcurrentWorkers > 1 ||
process.env.NODE_ENV === 'test' ||
!this.options.useLocalWorker
);
}
mkhandle(method) {
return (...args) => {
// Child process workers are slow to start (~600ms).
// While we're waiting, just run on the main thread.
// This significantly speeds up startup time.
if (this.shouldUseRemoteWorkers()) {
return this.addCall(method, [...args, false]);
} else {
if (this.options.warmWorkers && this.shouldStartRemoteWorkers()) {
this.warmupWorker(method, args);
}
return this.localWorker[method](...args, false);
}
};
}
onError(error, worker) {
// Handle ipc errors
if (error.code === 'ERR_IPC_CHANNEL_CLOSED') {
return this.stopWorker(worker);
}
}
startChild() {
let worker = new Worker(this.options);
worker.fork(this.options.workerPath, this.bundlerOptions);
worker.on('request', data => this.processRequest(data, worker));
worker.on('ready', () => this.processQueue());
worker.on('response', () => this.processQueue());
worker.on('error', err => this.onError(err, worker));
worker.once('exit', () => this.stopWorker(worker));
this.workers.set(worker.id, worker);
}
async stopWorker(worker) {
if (!worker.stopped) {
this.workers.delete(worker.id);
worker.isStopping = true;
if (worker.calls.size) {
for (let call of worker.calls.values()) {
call.retries++;
this.callQueue.unshift(call);
}
}
worker.calls = null;
await worker.stop();
// Process any requests that failed and start a new worker
this.processQueue();
}
}
async processQueue() {
if (this.ending || !this.callQueue.length) return;
if (this.workers.size < this.options.maxConcurrentWorkers) {
this.startChild();
}
for (let worker of this.workers.values()) {
if (!this.callQueue.length) {
break;
}
if (!worker.ready || worker.stopped || worker.isStopping) {
continue;
}
if (worker.calls.size < this.options.maxConcurrentCallsPerWorker) {
worker.call(this.callQueue.shift());
}
}
}
async processRequest(data, worker = false) {
let result = {
idx: data.idx,
type: 'response'
};
let method = data.method;
let args = data.args;
let location = data.location;
let awaitResponse = data.awaitResponse;
if (!location) {
throw new Error('Unknown request');
}
const mod = require(location);
try {
result.contentType = 'data';
if (method) {
result.content = await mod[method](...args);
} else {
result.content = await mod(...args);
}
} catch (e) {
result.contentType = 'error';
result.content = errorUtils.errorToJson(e);
}
if (awaitResponse) {
if (worker) {
worker.send(result);
} else {
return result;
}
}
}
addCall(method, args) {
if (this.ending) {
throw new Error('Cannot add a worker call if workerfarm is ending.');
}
return new Promise((resolve, reject) => {
this.callQueue.push({
method,
args: args,
retries: 0,
resolve,
reject
});
this.processQueue();
});
}
async end() {
this.ending = true;
await Promise.all(
Array.from(this.workers.values()).map(worker => this.stopWorker(worker))
);
this.ending = false;
shared = null;
}
init(bundlerOptions) {
this.bundlerOptions = bundlerOptions;
if (this.shouldStartRemoteWorkers()) {
this.persistBundlerOptions();
}
this.localWorker.init(bundlerOptions);
this.startMaxWorkers();
}
persistBundlerOptions() {
for (let worker of this.workers.values()) {
worker.init(this.bundlerOptions);
}
}
startMaxWorkers() {
// Starts workers untill the maximum is reached
if (this.workers.size < this.options.maxConcurrentWorkers) {
for (
let i = 0;
i < this.options.maxConcurrentWorkers - this.workers.size;
i++
) {
this.startChild();
}
}
}
shouldUseRemoteWorkers() {
return (
!this.options.useLocalWorker ||
(this.warmWorkers >= this.workers.size || !this.options.warmWorkers)
);
}
static async getShared(options, farmOptions) {
// Farm options shouldn't be considered safe to overwrite
// and require an entire new instance to be created
if (shared && farmOptions) {
await shared.end();
shared = null;
}
if (!shared) {
shared = new WorkerFarm(options, farmOptions);
} else if (options) {
shared.init(options);
}
if (!shared && !options) {
throw new Error('Workerfarm should be initialised using options');
}
return shared;
}
static getNumWorkers() {
return process.env.PARCEL_WORKERS
? parseInt(process.env.PARCEL_WORKERS, 10)
: cpuCount();
}
static async callMaster(request, awaitResponse = true) {
if (WorkerFarm.isWorker()) {
const child = require('./child');
return child.addCall(request, awaitResponse);
} else {
return (await WorkerFarm.getShared()).processRequest(request);
}
}
static isWorker() {
return process.send && require.main.filename === require.resolve('./child');
}
static getConcurrentCallsPerWorker() {
return parseInt(process.env.PARCEL_MAX_CONCURRENT_CALLS, 10) || 5;
}
}
module.exports = WorkerFarm;

144
BACK_BACK/node_modules/@parcel/workers/src/child.js generated vendored Executable file
View file

@ -0,0 +1,144 @@
const {errorUtils} = require('@parcel/utils');
class Child {
constructor() {
if (!process.send) {
throw new Error('Only create Child instances in a worker!');
}
this.module = undefined;
this.childId = undefined;
this.callQueue = [];
this.responseQueue = new Map();
this.responseId = 0;
this.maxConcurrentCalls = 10;
}
messageListener(data) {
if (data === 'die') {
return this.end();
}
let type = data.type;
if (type === 'response') {
return this.handleResponse(data);
} else if (type === 'request') {
return this.handleRequest(data);
}
}
async send(data) {
process.send(data, err => {
if (err && err instanceof Error) {
if (err.code === 'ERR_IPC_CHANNEL_CLOSED') {
// IPC connection closed
// no need to keep the worker running if it can't send or receive data
return this.end();
}
}
});
}
childInit(module, childId) {
this.module = require(module);
this.childId = childId;
}
async handleRequest(data) {
let idx = data.idx;
let child = data.child;
let method = data.method;
let args = data.args;
let result = {idx, child, type: 'response'};
try {
result.contentType = 'data';
if (method === 'childInit') {
result.content = this.childInit(...args, child);
} else {
result.content = await this.module[method](...args);
}
} catch (e) {
result.contentType = 'error';
result.content = errorUtils.errorToJson(e);
}
this.send(result);
}
async handleResponse(data) {
let idx = data.idx;
let contentType = data.contentType;
let content = data.content;
let call = this.responseQueue.get(idx);
if (contentType === 'error') {
call.reject(errorUtils.jsonToError(content));
} else {
call.resolve(content);
}
this.responseQueue.delete(idx);
// Process the next call
this.processQueue();
}
// Keep in mind to make sure responses to these calls are JSON.Stringify safe
async addCall(request, awaitResponse = true) {
let call = request;
call.type = 'request';
call.child = this.childId;
call.awaitResponse = awaitResponse;
let promise;
if (awaitResponse) {
promise = new Promise((resolve, reject) => {
call.resolve = resolve;
call.reject = reject;
});
}
this.callQueue.push(call);
this.processQueue();
return promise;
}
async sendRequest(call) {
let idx;
if (call.awaitResponse) {
idx = this.responseId++;
this.responseQueue.set(idx, call);
}
this.send({
idx: idx,
child: call.child,
type: call.type,
location: call.location,
method: call.method,
args: call.args,
awaitResponse: call.awaitResponse
});
}
async processQueue() {
if (!this.callQueue.length) {
return;
}
if (this.responseQueue.size < this.maxConcurrentCalls) {
this.sendRequest(this.callQueue.shift());
}
}
end() {
process.exit();
}
}
let child = new Child();
process.on('message', child.messageListener.bind(child));
module.exports = child;

11
BACK_BACK/node_modules/@parcel/workers/src/cpuCount.js generated vendored Executable file
View file

@ -0,0 +1,11 @@
const os = require('os');
module.exports = function() {
let cores;
try {
cores = require('physical-cpu-count');
} catch (err) {
cores = os.cpus().length;
}
return cores || 1;
};

9
BACK_BACK/node_modules/@parcel/workers/test/.babelrc generated vendored Executable file
View file

@ -0,0 +1,9 @@
{
"presets": [["@babel/preset-env", {
"targets": {
"node": "current"
}
}]],
"plugins": ["@babel/plugin-transform-runtime"],
"ignore": ["integration"]
}

6
BACK_BACK/node_modules/@parcel/workers/test/.eslintrc.json generated vendored Executable file
View file

@ -0,0 +1,6 @@
{
"extends": "../../../../.eslintrc.json",
"env": {
"mocha": true
}
}

View file

@ -0,0 +1,10 @@
function run(data) {
return data;
}
function init() {
// do nothing
}
exports.run = run;
exports.init = init;

View file

@ -0,0 +1,12 @@
let options = {};
function run() {
return options;
}
function init(opt) {
options = opt;
}
exports.run = run;
exports.init = init;

View file

@ -0,0 +1,25 @@
const WorkerFarm = require(`../../../${
parseInt(process.versions.node, 10) < 8 ? 'lib' : 'src'
}/WorkerFarm`);
function run() {
let result = [process.pid];
return new Promise((resolve, reject) => {
WorkerFarm.callMaster({
location: require.resolve('./master-process-id.js'),
args: []
})
.then(pid => {
result.push(pid);
resolve(result);
})
.catch(reject);
});
}
function init() {
// Do nothing
}
exports.run = run;
exports.init = init;

View file

@ -0,0 +1,17 @@
const WorkerFarm = require(`../../../${
parseInt(process.versions.node, 10) < 8 ? 'lib' : 'src'
}/WorkerFarm`);
function run(a, b) {
return WorkerFarm.callMaster({
location: require.resolve('./master-sum.js'),
args: [a, b]
});
}
function init() {
// Do nothing
}
exports.run = run;
exports.init = init;

View file

@ -0,0 +1,3 @@
module.exports = function() {
return process.pid;
};

View file

@ -0,0 +1,3 @@
module.exports = function(a, b) {
return a + b;
};

View file

@ -0,0 +1,10 @@
function run() {
return 'pong';
}
function init() {
// do nothing
}
exports.run = run;
exports.init = init;

3
BACK_BACK/node_modules/@parcel/workers/test/mocha.opts generated vendored Executable file
View file

@ -0,0 +1,3 @@
--require @parcel/babel-register
--exit
--timeout 20s

178
BACK_BACK/node_modules/@parcel/workers/test/workerfarm.js generated vendored Executable file
View file

@ -0,0 +1,178 @@
const assert = require('assert');
const WorkerFarm = require('../index');
describe('WorkerFarm', () => {
it('Should start up workers', async () => {
let workerfarm = new WorkerFarm(
{},
{
warmWorkers: false,
useLocalWorker: false,
workerPath: require.resolve('./integration/workerfarm/ping.js')
}
);
assert.equal(await workerfarm.run(), 'pong');
await workerfarm.end();
});
it('Should handle 1000 requests without any issue', async () => {
let workerfarm = new WorkerFarm(
{},
{
warmWorkers: false,
useLocalWorker: false,
workerPath: require.resolve('./integration/workerfarm/echo.js')
}
);
let promises = [];
for (let i = 0; i < 1000; i++) {
promises.push(workerfarm.run(i));
}
await Promise.all(promises);
await workerfarm.end();
});
it('Should consistently initialise workers, even after 100 re-inits', async () => {
let options = {
key: 0
};
let workerfarm = new WorkerFarm(options, {
warmWorkers: false,
useLocalWorker: false,
workerPath: require.resolve('./integration/workerfarm/init.js')
});
for (let i = 0; i < 100; i++) {
options.key = i;
workerfarm.init(options);
for (let i = 0; i < workerfarm.workers.size; i++) {
assert.equal((await workerfarm.run()).key, options.key);
}
assert.equal(workerfarm.shouldUseRemoteWorkers(), true);
}
await workerfarm.end();
});
it('Should warm up workers', async () => {
let workerfarm = new WorkerFarm(
{},
{
warmWorkers: true,
useLocalWorker: true,
workerPath: require.resolve('./integration/workerfarm/echo.js')
}
);
for (let i = 0; i < 100; i++) {
assert.equal(await workerfarm.run(i), i);
}
await new Promise(resolve => workerfarm.once('warmedup', resolve));
assert(workerfarm.workers.size > 0, 'Should have spawned workers.');
assert(
workerfarm.warmWorkers >= workerfarm.workers.size,
'Should have warmed up workers.'
);
await workerfarm.end();
});
it('Should use the local worker', async () => {
let workerfarm = new WorkerFarm(
{},
{
warmWorkers: true,
useLocalWorker: true,
workerPath: require.resolve('./integration/workerfarm/echo.js')
}
);
assert.equal(await workerfarm.run('hello world'), 'hello world');
assert.equal(workerfarm.shouldUseRemoteWorkers(), false);
await workerfarm.end();
});
it('Should be able to use bi-directional communication', async () => {
let workerfarm = new WorkerFarm(
{},
{
warmWorkers: false,
useLocalWorker: false,
workerPath: require.resolve('./integration/workerfarm/ipc.js')
}
);
assert.equal(await workerfarm.run(1, 2), 3);
await workerfarm.end();
});
it('Should be able to handle 1000 bi-directional calls', async () => {
let workerfarm = new WorkerFarm(
{},
{
warmWorkers: false,
useLocalWorker: false,
workerPath: require.resolve('./integration/workerfarm/ipc.js')
}
);
for (let i = 0; i < 1000; i++) {
assert.equal(await workerfarm.run(1 + i, 2), 3 + i);
}
await workerfarm.end();
});
it('Bi-directional call should return masters pid', async () => {
let workerfarm = new WorkerFarm(
{},
{
warmWorkers: false,
useLocalWorker: false,
workerPath: require.resolve('./integration/workerfarm/ipc-pid.js')
}
);
let result = await workerfarm.run();
assert.equal(result.length, 2);
assert.equal(result[1], process.pid);
assert.notEqual(result[0], process.pid);
await workerfarm.end();
});
it('Should handle 10 big concurrent requests without any issue', async () => {
// This emulates the node.js ipc bug for win32
let workerfarm = new WorkerFarm(
{},
{
warmWorkers: false,
useLocalWorker: false,
workerPath: require.resolve('./integration/workerfarm/echo.js')
}
);
let bigData = [];
for (let i = 0; i < 10000; i++) {
bigData.push('This is some big data');
}
let promises = [];
for (let i = 0; i < 10; i++) {
promises.push(workerfarm.run(bigData));
}
await Promise.all(promises);
await workerfarm.end();
});
});