Initial
This commit is contained in:
145
resources/app/node_modules/peggy/lib/compiler/asts.js
generated
vendored
Normal file
145
resources/app/node_modules/peggy/lib/compiler/asts.js
generated
vendored
Normal file
@@ -0,0 +1,145 @@
|
||||
"use strict";
|
||||
|
||||
const visitor = require("./visitor");
|
||||
|
||||
/**
|
||||
* Combine two things, each of which might be an array, into a single value,
|
||||
* in the order [...a, ...b].
|
||||
*
|
||||
* @template T
|
||||
* @param {T | T[]} a
|
||||
* @param {T | T[]} b
|
||||
* @returns {T | T[]}
|
||||
*/
|
||||
function combinePossibleArrays(a, b) {
|
||||
// First might be an array, second will not. Either might be null.
|
||||
if (!(a && b)) {
|
||||
return a || b;
|
||||
}
|
||||
const aa = Array.isArray(a) ? a : [a];
|
||||
aa.push(b);
|
||||
return aa;
|
||||
}
|
||||
|
||||
// AST utilities.
|
||||
const asts = {
|
||||
/**
|
||||
* Find the rule with the given name, if it exists.
|
||||
*
|
||||
* @param {PEG.ast.Grammar} ast
|
||||
* @param {string} name
|
||||
* @returns {PEG.ast.Rule | undefined}
|
||||
*/
|
||||
findRule(ast, name) {
|
||||
for (let i = 0; i < ast.rules.length; i++) {
|
||||
if (ast.rules[i].name === name) {
|
||||
return ast.rules[i];
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
},
|
||||
|
||||
/**
|
||||
* Find the index of the rule with the given name, if it exists.
|
||||
* Otherwise returns -1.
|
||||
*
|
||||
* @param {PEG.ast.Grammar} ast
|
||||
* @param {string} name
|
||||
* @returns {number}
|
||||
*/
|
||||
indexOfRule(ast, name) {
|
||||
for (let i = 0; i < ast.rules.length; i++) {
|
||||
if (ast.rules[i].name === name) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
|
||||
// istanbul ignore next Presence of rules checked using another approach that not involve this function
|
||||
// Any time when it is called the rules always exist
|
||||
return -1;
|
||||
},
|
||||
|
||||
alwaysConsumesOnSuccess(ast, node) {
|
||||
function consumesTrue() { return true; }
|
||||
function consumesFalse() { return false; }
|
||||
|
||||
const consumes = visitor.build({
|
||||
choice(node) {
|
||||
return node.alternatives.every(consumes);
|
||||
},
|
||||
|
||||
sequence(node) {
|
||||
return node.elements.some(consumes);
|
||||
},
|
||||
|
||||
simple_and: consumesFalse,
|
||||
simple_not: consumesFalse,
|
||||
optional: consumesFalse,
|
||||
zero_or_more: consumesFalse,
|
||||
repeated(node) {
|
||||
// If minimum is `null` it is equals to maximum (parsed from `|exact|` syntax)
|
||||
const min = node.min ? node.min : node.max;
|
||||
|
||||
// If the low boundary is variable then it can be zero.
|
||||
// Expression, repeated zero times, does not consume any input
|
||||
// but always matched - so it does not always consumes on success
|
||||
if (min.type !== "constant" || min.value === 0) {
|
||||
return false;
|
||||
}
|
||||
if (consumes(node.expression)) {
|
||||
return true;
|
||||
}
|
||||
// |node.delimiter| used only when |node.expression| match at least two times
|
||||
// The first `if` filtered out all non-constant minimums, so at this point
|
||||
// |min.value| is always a constant
|
||||
if (min.value > 1 && node.delimiter && consumes(node.delimiter)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
semantic_and: consumesFalse,
|
||||
semantic_not: consumesFalse,
|
||||
|
||||
rule_ref(node) {
|
||||
const rule = asts.findRule(ast, node.name);
|
||||
|
||||
// Because we run all checks in one stage, some rules could be missing.
|
||||
// Checking for missing rules could be executed in parallel to this check
|
||||
return rule ? consumes(rule) : undefined;
|
||||
},
|
||||
|
||||
library_ref() {
|
||||
// No way to know for external rules.
|
||||
return false;
|
||||
},
|
||||
|
||||
literal(node) {
|
||||
return node.value !== "";
|
||||
},
|
||||
|
||||
class: consumesTrue,
|
||||
any: consumesTrue,
|
||||
});
|
||||
|
||||
return consumes(node);
|
||||
},
|
||||
|
||||
combine(asts) {
|
||||
return asts.reduce((combined, ast) => {
|
||||
combined.topLevelInitializer = combinePossibleArrays(
|
||||
combined.topLevelInitializer,
|
||||
ast.topLevelInitializer
|
||||
);
|
||||
combined.initializer = combinePossibleArrays(
|
||||
combined.initializer,
|
||||
ast.initializer
|
||||
);
|
||||
combined.rules = combined.rules.concat(ast.rules);
|
||||
return combined;
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = asts;
|
||||
170
resources/app/node_modules/peggy/lib/compiler/index.js
generated
vendored
Normal file
170
resources/app/node_modules/peggy/lib/compiler/index.js
generated
vendored
Normal file
@@ -0,0 +1,170 @@
|
||||
"use strict";
|
||||
|
||||
const addImportedRules = require("./passes/add-imported-rules");
|
||||
const fixLibraryNumbers = require("./passes/fix-library-numbers");
|
||||
const generateBytecode = require("./passes/generate-bytecode");
|
||||
const generateJS = require("./passes/generate-js");
|
||||
const inferenceMatchResult = require("./passes/inference-match-result");
|
||||
const removeProxyRules = require("./passes/remove-proxy-rules");
|
||||
const mergeCharacterClasses = require("./passes/merge-character-classes");
|
||||
const reportDuplicateImports = require("./passes/report-duplicate-imports");
|
||||
const reportDuplicateLabels = require("./passes/report-duplicate-labels");
|
||||
const reportDuplicateRules = require("./passes/report-duplicate-rules");
|
||||
const reportInfiniteRecursion = require("./passes/report-infinite-recursion");
|
||||
const reportInfiniteRepetition = require("./passes/report-infinite-repetition");
|
||||
const reportUndefinedRules = require("./passes/report-undefined-rules");
|
||||
const reportIncorrectPlucking = require("./passes/report-incorrect-plucking");
|
||||
const Session = require("./session");
|
||||
const visitor = require("./visitor");
|
||||
const { base64 } = require("./utils");
|
||||
|
||||
function processOptions(options, defaults) {
|
||||
const processedOptions = {};
|
||||
|
||||
Object.keys(options).forEach(name => {
|
||||
processedOptions[name] = options[name];
|
||||
});
|
||||
|
||||
Object.keys(defaults).forEach(name => {
|
||||
if (!Object.prototype.hasOwnProperty.call(processedOptions, name)) {
|
||||
processedOptions[name] = defaults[name];
|
||||
}
|
||||
});
|
||||
|
||||
return processedOptions;
|
||||
}
|
||||
|
||||
function isSourceMapCapable(target) {
|
||||
if (typeof target === "string") {
|
||||
return target.length > 0;
|
||||
}
|
||||
return target && (typeof target.offset === "function");
|
||||
}
|
||||
|
||||
const compiler = {
|
||||
// AST node visitor builder. Useful mainly for plugins which manipulate the
|
||||
// AST.
|
||||
visitor,
|
||||
|
||||
// Compiler passes.
|
||||
//
|
||||
// Each pass is a function that is passed the AST. It can perform checks on it
|
||||
// or modify it as needed. If the pass encounters a semantic error, it throws
|
||||
// |peg.GrammarError|.
|
||||
passes: {
|
||||
prepare: [
|
||||
addImportedRules,
|
||||
reportInfiniteRecursion,
|
||||
],
|
||||
check: [
|
||||
reportUndefinedRules,
|
||||
reportDuplicateRules,
|
||||
reportDuplicateLabels,
|
||||
reportInfiniteRepetition,
|
||||
reportIncorrectPlucking,
|
||||
reportDuplicateImports,
|
||||
],
|
||||
transform: [
|
||||
fixLibraryNumbers,
|
||||
removeProxyRules,
|
||||
mergeCharacterClasses,
|
||||
inferenceMatchResult,
|
||||
],
|
||||
generate: [
|
||||
generateBytecode,
|
||||
generateJS,
|
||||
],
|
||||
},
|
||||
|
||||
// Generates a parser from a specified grammar AST. Throws |peg.GrammarError|
|
||||
// if the AST contains a semantic error. Note that not all errors are detected
|
||||
// during the generation and some may protrude to the generated parser and
|
||||
// cause its malfunction.
|
||||
compile(ast, passes, options) {
|
||||
options = options !== undefined ? options : {};
|
||||
|
||||
options = processOptions(options, {
|
||||
allowedStartRules: [ast.rules[0].name],
|
||||
cache: false,
|
||||
dependencies: {},
|
||||
exportVar: null,
|
||||
format: "bare",
|
||||
output: "parser",
|
||||
trace: false,
|
||||
});
|
||||
|
||||
if (!Array.isArray(options.allowedStartRules)) {
|
||||
throw new Error("allowedStartRules must be an array");
|
||||
}
|
||||
if (options.allowedStartRules.length === 0) {
|
||||
throw new Error("Must have at least one start rule");
|
||||
}
|
||||
const allRules = ast.rules.map(r => r.name);
|
||||
// "*" means all rules are start rules. "*" is not a valid rule name.
|
||||
if (options.allowedStartRules.some(r => r === "*")) {
|
||||
options.allowedStartRules = allRules;
|
||||
} else {
|
||||
for (const rule of options.allowedStartRules) {
|
||||
if (allRules.indexOf(rule) === -1) {
|
||||
throw new Error(`Unknown start rule "${rule}"`);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Due to https://github.com/mozilla/source-map/issues/444
|
||||
// grammarSource is required
|
||||
if (((options.output === "source-and-map")
|
||||
|| (options.output === "source-with-inline-map"))
|
||||
&& !isSourceMapCapable(options.grammarSource)) {
|
||||
throw new Error("Must provide grammarSource (as a string or GrammarLocation) in order to generate source maps");
|
||||
}
|
||||
|
||||
const session = new Session(options);
|
||||
Object.keys(passes).forEach(stage => {
|
||||
session.stage = stage;
|
||||
session.info(`Process stage ${stage}`);
|
||||
|
||||
passes[stage].forEach(pass => {
|
||||
session.info(`Process pass ${stage}.${pass.name}`);
|
||||
|
||||
pass(ast, options, session);
|
||||
});
|
||||
|
||||
// Collect all errors by stage
|
||||
session.checkErrors();
|
||||
});
|
||||
|
||||
switch (options.output) {
|
||||
case "parser":
|
||||
// eslint-disable-next-line no-eval -- Required
|
||||
return eval(ast.code.toString());
|
||||
|
||||
case "source":
|
||||
return ast.code.toString();
|
||||
|
||||
case "source-and-map":
|
||||
return ast.code;
|
||||
|
||||
case "source-with-inline-map": {
|
||||
if (typeof TextEncoder === "undefined") {
|
||||
throw new Error("TextEncoder is not supported by this platform");
|
||||
}
|
||||
const sourceMap = ast.code.toStringWithSourceMap();
|
||||
const encoder = new TextEncoder();
|
||||
const b64 = base64(
|
||||
encoder.encode(JSON.stringify(sourceMap.map.toJSON()))
|
||||
);
|
||||
return sourceMap.code + `\
|
||||
//\x23 sourceMappingURL=data:application/json;charset=utf-8;base64,${b64}
|
||||
`;
|
||||
}
|
||||
|
||||
case "ast":
|
||||
return ast;
|
||||
|
||||
default:
|
||||
throw new Error("Invalid output format: " + options.output + ".");
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = compiler;
|
||||
77
resources/app/node_modules/peggy/lib/compiler/intern.js
generated
vendored
Normal file
77
resources/app/node_modules/peggy/lib/compiler/intern.js
generated
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
// @ts-check
|
||||
"use strict";
|
||||
|
||||
/**
|
||||
* Intern strings or objects, so there is only one copy of each, by value.
|
||||
* Objects may need to be converted to another representation before storing.
|
||||
* Each inputs corresponds to a number, starting with 0.
|
||||
*
|
||||
* @template [T=string],[V=T]
|
||||
*/
|
||||
class Intern {
|
||||
/**
|
||||
* @typedef {object} InternOptions
|
||||
* @property {(input: V) => string} [stringify=String] Represent the
|
||||
* converted input as a string, for value comparison.
|
||||
* @property {(input: T) => V} [convert=(x) => x] Convert the input to its
|
||||
* stored form. Required if type V is not the same as type T. Return
|
||||
* falsy value to have this input not be added; add() will return -1 in
|
||||
* this case.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {InternOptions} [options]
|
||||
*/
|
||||
constructor(options) {
|
||||
/** @type {Required<InternOptions>} */
|
||||
this.options = {
|
||||
stringify: String,
|
||||
convert: x => /** @type {V} */ (/** @type {unknown} */ (x)),
|
||||
...options,
|
||||
};
|
||||
/** @type {V[]} */
|
||||
this.items = [];
|
||||
/** @type {Record<string, number>} */
|
||||
this.offsets = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Intern an item, getting it's asssociated number. Returns -1 for falsy
|
||||
* inputs. O(1) with constants tied to the convert and stringify options.
|
||||
*
|
||||
* @param {T} input
|
||||
* @return {number}
|
||||
*/
|
||||
add(input) {
|
||||
const c = this.options.convert(input);
|
||||
if (!c) {
|
||||
return -1;
|
||||
}
|
||||
const s = this.options.stringify(c);
|
||||
let num = this.offsets[s];
|
||||
if (num === undefined) {
|
||||
num = this.items.push(c) - 1;
|
||||
this.offsets[s] = num;
|
||||
}
|
||||
return num;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} i
|
||||
* @returns {V}
|
||||
*/
|
||||
get(i) {
|
||||
return this.items[i];
|
||||
}
|
||||
|
||||
/**
|
||||
* @template U
|
||||
* @param {(value: V, index: number, array: V[]) => U} fn
|
||||
* @returns {U[]}
|
||||
*/
|
||||
map(fn) {
|
||||
return this.items.map(fn);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Intern;
|
||||
83
resources/app/node_modules/peggy/lib/compiler/opcodes.js
generated
vendored
Normal file
83
resources/app/node_modules/peggy/lib/compiler/opcodes.js
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
"use strict";
|
||||
|
||||
// Bytecode instruction opcodes.
|
||||
const opcodes = {
|
||||
// Stack Manipulation
|
||||
|
||||
/** @deprecated Unused */
|
||||
PUSH: 0, // PUSH c
|
||||
PUSH_EMPTY_STRING: 35, // PUSH_EMPTY_STRING
|
||||
PUSH_UNDEFINED: 1, // PUSH_UNDEFINED
|
||||
PUSH_NULL: 2, // PUSH_NULL
|
||||
PUSH_FAILED: 3, // PUSH_FAILED
|
||||
PUSH_EMPTY_ARRAY: 4, // PUSH_EMPTY_ARRAY
|
||||
PUSH_CURR_POS: 5, // PUSH_CURR_POS
|
||||
POP: 6, // POP
|
||||
POP_CURR_POS: 7, // POP_CURR_POS
|
||||
POP_N: 8, // POP_N n
|
||||
NIP: 9, // NIP
|
||||
APPEND: 10, // APPEND
|
||||
WRAP: 11, // WRAP n
|
||||
TEXT: 12, // TEXT
|
||||
PLUCK: 36, // PLUCK n, k, p1, ..., pK
|
||||
|
||||
// Conditions and Loops
|
||||
|
||||
IF: 13, // IF t, f
|
||||
IF_ERROR: 14, // IF_ERROR t, f
|
||||
IF_NOT_ERROR: 15, // IF_NOT_ERROR t, f
|
||||
IF_LT: 30, // IF_LT min, t, f
|
||||
IF_GE: 31, // IF_GE max, t, f
|
||||
IF_LT_DYNAMIC: 32, // IF_LT_DYNAMIC min, t, f
|
||||
IF_GE_DYNAMIC: 33, // IF_GE_DYNAMIC max, t, f
|
||||
WHILE_NOT_ERROR: 16, // WHILE_NOT_ERROR b
|
||||
|
||||
// Matching
|
||||
|
||||
MATCH_ANY: 17, // MATCH_ANY a, f, ...
|
||||
MATCH_STRING: 18, // MATCH_STRING s, a, f, ...
|
||||
MATCH_STRING_IC: 19, // MATCH_STRING_IC s, a, f, ...
|
||||
MATCH_CHAR_CLASS: 20, // MATCH_CHAR_CLASS c, a, f, ...
|
||||
/** @deprecated Replaced with `MATCH_CHAR_CLASS` */
|
||||
MATCH_REGEXP: 20, // MATCH_REGEXP r, a, f, ...
|
||||
ACCEPT_N: 21, // ACCEPT_N n
|
||||
ACCEPT_STRING: 22, // ACCEPT_STRING s
|
||||
FAIL: 23, // FAIL e
|
||||
|
||||
// Calls
|
||||
|
||||
LOAD_SAVED_POS: 24, // LOAD_SAVED_POS p
|
||||
UPDATE_SAVED_POS: 25, // UPDATE_SAVED_POS
|
||||
CALL: 26, // CALL f, n, pc, p1, p2, ..., pN
|
||||
|
||||
// Rules
|
||||
|
||||
RULE: 27, // RULE r
|
||||
LIBRARY_RULE: 41, // LIBRARY_RULE moduleIndex, whatIndex
|
||||
|
||||
// Failure Reporting
|
||||
|
||||
SILENT_FAILS_ON: 28, // SILENT_FAILS_ON
|
||||
SILENT_FAILS_OFF: 29, // SILENT_FAILS_OFF
|
||||
|
||||
// Because the tests have hard-coded opcode numbers, don't renumber
|
||||
// existing opcodes. New opcodes that have been put in the correct
|
||||
// sections above are repeated here in order to ensure we don't
|
||||
// reuse them.
|
||||
//
|
||||
// IF_LT: 30
|
||||
// IF_GE: 31
|
||||
// IF_LT_DYNAMIC: 32
|
||||
// IF_GE_DYNAMIC: 33
|
||||
// 34 reserved for @mingun
|
||||
// PUSH_EMPTY_STRING: 35
|
||||
// PLUCK: 36
|
||||
|
||||
SOURCE_MAP_PUSH: 37, // SOURCE_MAP_PUSH loc-index
|
||||
SOURCE_MAP_POP: 38, // SOURCE_MAP_POP
|
||||
SOURCE_MAP_LABEL_PUSH: 39, // SOURCE_MAP_LABEL_PUSH sp, literal-index, loc-index
|
||||
SOURCE_MAP_LABEL_POP: 40, // SOURCE_MAP_LABEL_POP sp
|
||||
// LIBRARY_RULE: 41,
|
||||
};
|
||||
|
||||
module.exports = opcodes;
|
||||
52
resources/app/node_modules/peggy/lib/compiler/passes/add-imported-rules.js
generated
vendored
Normal file
52
resources/app/node_modules/peggy/lib/compiler/passes/add-imported-rules.js
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
// @ts-check
|
||||
"use strict";
|
||||
|
||||
/**
|
||||
* Generate trampoline stubs for each rule imported into this namespace.
|
||||
*
|
||||
* @example
|
||||
* import bar from "./lib.js" // Default rule imported into this namespace
|
||||
* import {baz} from "./lib.js" // One rule imported into this namespace by name
|
||||
*
|
||||
* @type {PEG.Pass}
|
||||
*/
|
||||
function addImportedRules(ast) {
|
||||
let libraryNumber = 0;
|
||||
for (const imp of ast.imports) {
|
||||
for (const what of imp.what) {
|
||||
let original = undefined;
|
||||
switch (what.type) {
|
||||
case "import_binding_all":
|
||||
// Don't create stub.
|
||||
continue;
|
||||
case "import_binding_default":
|
||||
// Use the default (usually first) rule.
|
||||
break;
|
||||
case "import_binding":
|
||||
original = what.binding;
|
||||
break;
|
||||
case "import_binding_rename":
|
||||
original = what.rename;
|
||||
break;
|
||||
default:
|
||||
throw new TypeError("Unknown binding type");
|
||||
}
|
||||
ast.rules.push({
|
||||
type: "rule",
|
||||
name: what.binding,
|
||||
nameLocation: what.location,
|
||||
expression: {
|
||||
type: "library_ref",
|
||||
name: original,
|
||||
library: imp.from.module,
|
||||
libraryNumber,
|
||||
location: what.location,
|
||||
},
|
||||
location: imp.from.location,
|
||||
});
|
||||
}
|
||||
libraryNumber++;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = addImportedRules;
|
||||
43
resources/app/node_modules/peggy/lib/compiler/passes/fix-library-numbers.js
generated
vendored
Normal file
43
resources/app/node_modules/peggy/lib/compiler/passes/fix-library-numbers.js
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
// @ts-check
|
||||
"use strict";
|
||||
|
||||
const visitor = require("../visitor");
|
||||
|
||||
/**
|
||||
* @param {PEG.ast.Grammar} ast
|
||||
* @param {string} name
|
||||
* @returns {number}
|
||||
*/
|
||||
function findLibraryNumber(ast, name) {
|
||||
let libraryNumber = 0;
|
||||
for (const imp of ast.imports) {
|
||||
for (const what of imp.what) {
|
||||
if ((what.type === "import_binding_all") && (what.binding === name)) {
|
||||
return libraryNumber;
|
||||
}
|
||||
}
|
||||
libraryNumber++;
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
/** @type {PEG.Pass} */
|
||||
function fixLibraryNumbers(ast, _options, session) {
|
||||
const check = visitor.build({
|
||||
library_ref(/** @type {PEG.ast.LibraryReference} */ node) {
|
||||
if (node.libraryNumber === -1) {
|
||||
node.libraryNumber = findLibraryNumber(ast, node.library);
|
||||
if (node.libraryNumber === -1) {
|
||||
session.error(
|
||||
`Unknown module "${node.library}"`,
|
||||
node.location
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
check(ast);
|
||||
}
|
||||
|
||||
module.exports = fixLibraryNumbers;
|
||||
1166
resources/app/node_modules/peggy/lib/compiler/passes/generate-bytecode.js
generated
vendored
Normal file
1166
resources/app/node_modules/peggy/lib/compiler/passes/generate-bytecode.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1677
resources/app/node_modules/peggy/lib/compiler/passes/generate-js.js
generated
vendored
Normal file
1677
resources/app/node_modules/peggy/lib/compiler/passes/generate-js.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
190
resources/app/node_modules/peggy/lib/compiler/passes/inference-match-result.js
generated
vendored
Normal file
190
resources/app/node_modules/peggy/lib/compiler/passes/inference-match-result.js
generated
vendored
Normal file
@@ -0,0 +1,190 @@
|
||||
"use strict";
|
||||
|
||||
const visitor = require("../visitor");
|
||||
const asts = require("../asts");
|
||||
const GrammarError = require("../../grammar-error");
|
||||
|
||||
const ALWAYS_MATCH = 1;
|
||||
const SOMETIMES_MATCH = 0;
|
||||
const NEVER_MATCH = -1;
|
||||
|
||||
// Inference match result of the each node. Can be:
|
||||
// -1: negative result, matching of that node always fails
|
||||
// 0: neutral result, may be fail, may be match
|
||||
// 1: positive result, always match
|
||||
function inferenceMatchResult(ast) {
|
||||
function sometimesMatch(node) { return (node.match = SOMETIMES_MATCH); }
|
||||
function alwaysMatch(node) {
|
||||
// eslint-disable-next-line no-use-before-define -- Mutual recursion
|
||||
inference(node.expression);
|
||||
|
||||
return (node.match = ALWAYS_MATCH);
|
||||
}
|
||||
|
||||
function inferenceExpression(node) {
|
||||
// eslint-disable-next-line no-use-before-define -- Mutual recursion
|
||||
return (node.match = inference(node.expression));
|
||||
}
|
||||
function inferenceElements(elements, forChoice) {
|
||||
const length = elements.length;
|
||||
let always = 0;
|
||||
let never = 0;
|
||||
|
||||
for (let i = 0; i < length; ++i) {
|
||||
// eslint-disable-next-line no-use-before-define -- Mutual recursion
|
||||
const result = inference(elements[i]);
|
||||
|
||||
if (result === ALWAYS_MATCH) { ++always; }
|
||||
if (result === NEVER_MATCH) { ++never; }
|
||||
}
|
||||
|
||||
if (always === length) {
|
||||
return ALWAYS_MATCH;
|
||||
}
|
||||
if (forChoice) {
|
||||
return never === length ? NEVER_MATCH : SOMETIMES_MATCH;
|
||||
}
|
||||
|
||||
return never > 0 ? NEVER_MATCH : SOMETIMES_MATCH;
|
||||
}
|
||||
|
||||
const inference = visitor.build({
|
||||
rule(node) {
|
||||
let oldResult = undefined;
|
||||
let count = 0;
|
||||
|
||||
// If property not yet calculated, do that
|
||||
if (typeof node.match === "undefined") {
|
||||
node.match = SOMETIMES_MATCH;
|
||||
do {
|
||||
oldResult = node.match;
|
||||
node.match = inference(node.expression);
|
||||
// 6 == 3! -- permutations count for all transitions from one match
|
||||
// state to another.
|
||||
// After 6 iterations the cycle with guarantee begins
|
||||
// For example, an input of `start = [] start` will generate the
|
||||
// sequence: 0 -> -1 -> -1 (then stop)
|
||||
//
|
||||
// A more complex grammar theoretically would generate the
|
||||
// sequence: 0 -> 1 -> 0 -> -1 -> 0 -> 1 -> ... (then cycle)
|
||||
// but there are no examples of such grammars yet (possible, they
|
||||
// do not exist at all)
|
||||
|
||||
// istanbul ignore next This is canary test, shouldn't trigger in real life
|
||||
if (++count > 6) {
|
||||
throw new GrammarError(
|
||||
"Infinity cycle detected when trying to evaluate node match result",
|
||||
node.location
|
||||
);
|
||||
}
|
||||
} while (oldResult !== node.match);
|
||||
}
|
||||
|
||||
return node.match;
|
||||
},
|
||||
named: inferenceExpression,
|
||||
choice(node) {
|
||||
return (node.match = inferenceElements(node.alternatives, true));
|
||||
},
|
||||
action: inferenceExpression,
|
||||
sequence(node) {
|
||||
return (node.match = inferenceElements(node.elements, false));
|
||||
},
|
||||
labeled: inferenceExpression,
|
||||
text: inferenceExpression,
|
||||
simple_and: inferenceExpression,
|
||||
simple_not(node) {
|
||||
return (node.match = -inference(node.expression));
|
||||
},
|
||||
optional: alwaysMatch,
|
||||
zero_or_more: alwaysMatch,
|
||||
one_or_more: inferenceExpression,
|
||||
repeated(node) {
|
||||
const match = inference(node.expression);
|
||||
const dMatch = node.delimiter ? inference(node.delimiter) : NEVER_MATCH;
|
||||
// If minimum is `null` it is equals to maximum (parsed from `|exact|` syntax)
|
||||
const min = node.min ? node.min : node.max;
|
||||
|
||||
// If any boundary are variable - it can be negative, and it that case
|
||||
// node does not match, but it may be match with some other values
|
||||
if (min.type !== "constant" || node.max.type !== "constant") {
|
||||
return (node.match = SOMETIMES_MATCH);
|
||||
}
|
||||
// Now both boundaries is constants
|
||||
// If the upper boundary is zero or minimum exceeds maximum,
|
||||
// matching is impossible
|
||||
if (node.max.value === 0
|
||||
|| (node.max.value !== null && min.value > node.max.value)
|
||||
) {
|
||||
return (node.match = NEVER_MATCH);
|
||||
}
|
||||
|
||||
if (match === NEVER_MATCH) {
|
||||
// If an expression always fails, a range will also always fail
|
||||
// (with the one exception - never matched expression repeated
|
||||
// zero times always match and returns an empty array).
|
||||
return (node.match = min.value === 0 ? ALWAYS_MATCH : NEVER_MATCH);
|
||||
}
|
||||
if (match === ALWAYS_MATCH) {
|
||||
if (node.delimiter && min.value >= 2) {
|
||||
// If an expression always match the final result determined only
|
||||
// by the delimiter, but delimiter used only when count of elements
|
||||
// two and more
|
||||
return (node.match = dMatch);
|
||||
}
|
||||
|
||||
return (node.match = ALWAYS_MATCH);
|
||||
}
|
||||
|
||||
// Here `match === SOMETIMES_MATCH`
|
||||
if (node.delimiter && min.value >= 2) {
|
||||
// If an expression always match the final result determined only
|
||||
// by the delimiter, but delimiter used only when count of elements
|
||||
// two and more
|
||||
return (
|
||||
// If a delimiter never match then the range also never match (because
|
||||
// there at least one delimiter)
|
||||
node.match = dMatch === NEVER_MATCH ? NEVER_MATCH : SOMETIMES_MATCH
|
||||
);
|
||||
}
|
||||
|
||||
return (node.match = min.value === 0 ? ALWAYS_MATCH : SOMETIMES_MATCH);
|
||||
},
|
||||
group: inferenceExpression,
|
||||
semantic_and: sometimesMatch,
|
||||
semantic_not: sometimesMatch,
|
||||
rule_ref(node) {
|
||||
const rule = asts.findRule(ast, node.name);
|
||||
if (!rule) {
|
||||
return SOMETIMES_MATCH;
|
||||
}
|
||||
return (node.match = inference(rule));
|
||||
},
|
||||
library_ref() {
|
||||
// Can't look into pre-compiled rules.
|
||||
return 0;
|
||||
},
|
||||
literal(node) {
|
||||
// Empty literal always match on any input
|
||||
const match = node.value.length === 0 ? ALWAYS_MATCH : SOMETIMES_MATCH;
|
||||
|
||||
return (node.match = match);
|
||||
},
|
||||
class(node) {
|
||||
// Empty character class never match on any input
|
||||
const match = node.parts.length === 0 ? NEVER_MATCH : SOMETIMES_MATCH;
|
||||
|
||||
return (node.match = match);
|
||||
},
|
||||
// |any| not match on empty input
|
||||
any: sometimesMatch,
|
||||
});
|
||||
|
||||
inference(ast);
|
||||
}
|
||||
|
||||
inferenceMatchResult.ALWAYS_MATCH = ALWAYS_MATCH;
|
||||
inferenceMatchResult.SOMETIMES_MATCH = SOMETIMES_MATCH;
|
||||
inferenceMatchResult.NEVER_MATCH = NEVER_MATCH;
|
||||
|
||||
module.exports = inferenceMatchResult;
|
||||
191
resources/app/node_modules/peggy/lib/compiler/passes/merge-character-classes.js
generated
vendored
Normal file
191
resources/app/node_modules/peggy/lib/compiler/passes/merge-character-classes.js
generated
vendored
Normal file
@@ -0,0 +1,191 @@
|
||||
// @ts-check
|
||||
"use strict";
|
||||
|
||||
/**
|
||||
* @typedef {import("../../peg")} PEG
|
||||
*/
|
||||
|
||||
/** @type {PEG.compiler.visitor} */
|
||||
const visitor = require("../visitor");
|
||||
|
||||
/**
|
||||
* @param {unknown} target
|
||||
* @param {unknown} source
|
||||
*/
|
||||
function cloneOver(target, source) {
|
||||
const t = /** @type {Record<string,unknown>} */ (target);
|
||||
const s = /** @type {Record<string,unknown>} */ (source);
|
||||
Object.keys(t).forEach(key => delete t[key]);
|
||||
Object.keys(s).forEach(key => { t[key] = s[key]; });
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up the parts array of a `class` node, by sorting,
|
||||
* then removing "contained" ranges, and merging overlapping
|
||||
* or adjacent ranges.
|
||||
*
|
||||
* @param {PEG.ast.CharacterClass["parts"]} parts
|
||||
*/
|
||||
function cleanParts(parts) {
|
||||
// Sort parts on increasing start, and then decreasing end.
|
||||
parts.sort((a, b) => {
|
||||
const [aStart, aEnd] = Array.isArray(a) ? a : [a, a];
|
||||
const [bStart, bEnd] = Array.isArray(b) ? b : [b, b];
|
||||
if (aStart !== bStart) {
|
||||
return aStart < bStart ? -1 : 1;
|
||||
}
|
||||
if (aEnd !== bEnd) {
|
||||
return aEnd > bEnd ? -1 : 1;
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
|
||||
let prevStart = "";
|
||||
let prevEnd = "";
|
||||
for (let i = 0; i < parts.length; i++) {
|
||||
const part = parts[i];
|
||||
const [curStart, curEnd] = Array.isArray(part) ? part : [part, part];
|
||||
if (curEnd <= prevEnd) {
|
||||
// Current range is contained in previous range,
|
||||
// so drop it.
|
||||
parts.splice(i--, 1);
|
||||
continue;
|
||||
}
|
||||
if (prevEnd.charCodeAt(0) + 1 >= curStart.charCodeAt(0)) {
|
||||
// Current and previous ranges overlap, or are adjacent.
|
||||
// Drop the current, and extend the previous range.
|
||||
parts.splice(i--, 1);
|
||||
parts[i] = [prevStart, prevEnd = curEnd];
|
||||
continue;
|
||||
}
|
||||
prevStart = curStart;
|
||||
prevEnd = curEnd;
|
||||
}
|
||||
return parts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges a choice character classes into a character class
|
||||
* @param {PEG.ast.Grammar} ast
|
||||
*/
|
||||
function mergeCharacterClasses(ast) {
|
||||
// Build a map from rule names to rules for quick lookup of
|
||||
// ref_rules.
|
||||
const rules = Object.create(null);
|
||||
ast.rules.forEach(rule => (rules[rule.name] = rule.expression));
|
||||
// Keep a map of which rules have been processed, so that when
|
||||
// we find a ref_rule, we can make sure its processed, before we
|
||||
// try to use it.
|
||||
const processedRules = Object.create(null);
|
||||
const [asClass, merge] = [
|
||||
/**
|
||||
* Determine whether a node can be represented as a simple character class,
|
||||
* and return that class if so.
|
||||
*
|
||||
* @param {PEG.ast.Expression} node - the node to inspect
|
||||
* @param {boolean} [clone] - if true, always return a new node that
|
||||
* can be modified by the caller
|
||||
* @returns {PEG.ast.CharacterClass | null}
|
||||
*/
|
||||
(node, clone) => {
|
||||
if (node.type === "class" && !node.inverted) {
|
||||
if (clone) {
|
||||
node = { ...node };
|
||||
node.parts = [...node.parts];
|
||||
}
|
||||
return node;
|
||||
}
|
||||
if (node.type === "literal" && node.value.length === 1) {
|
||||
return {
|
||||
type: "class",
|
||||
parts: [node.value],
|
||||
inverted: false,
|
||||
ignoreCase: node.ignoreCase,
|
||||
location: node.location,
|
||||
};
|
||||
}
|
||||
if (node.type === "rule_ref") {
|
||||
const ref = rules[node.name];
|
||||
if (ref) {
|
||||
if (!processedRules[node.name]) {
|
||||
processedRules[node.name] = true;
|
||||
merge(ref);
|
||||
}
|
||||
const cls = asClass(ref, true);
|
||||
if (cls) {
|
||||
cls.location = node.location;
|
||||
}
|
||||
return cls;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
},
|
||||
visitor.build({
|
||||
choice(node) {
|
||||
/** @type {PEG.ast.CharacterClass | null} */
|
||||
let prev = null;
|
||||
let changed = false;
|
||||
node.alternatives.forEach((alt, i) => {
|
||||
merge(alt);
|
||||
const cls = asClass(alt);
|
||||
if (!cls) {
|
||||
prev = null;
|
||||
return;
|
||||
}
|
||||
if (prev && prev.ignoreCase === cls.ignoreCase) {
|
||||
prev.parts.push(...cls.parts);
|
||||
node.alternatives[i - 1] = prev;
|
||||
node.alternatives[i] = prev;
|
||||
prev.location = {
|
||||
source: prev.location.source,
|
||||
start: prev.location.start,
|
||||
end: cls.location.end,
|
||||
};
|
||||
changed = true;
|
||||
} else {
|
||||
prev = cls;
|
||||
}
|
||||
});
|
||||
if (changed) {
|
||||
node.alternatives = node.alternatives.filter(
|
||||
(alt, i, arr) => !i || alt !== arr[i - 1]
|
||||
);
|
||||
node.alternatives.forEach((alt, i) => {
|
||||
if (alt.type === "class") {
|
||||
alt.parts = cleanParts(alt.parts);
|
||||
if (alt.parts.length === 1
|
||||
&& !Array.isArray(alt.parts[0])
|
||||
&& !alt.inverted) {
|
||||
node.alternatives[i] = {
|
||||
type: "literal",
|
||||
value: alt.parts[0],
|
||||
ignoreCase: alt.ignoreCase,
|
||||
location: alt.location,
|
||||
};
|
||||
}
|
||||
}
|
||||
});
|
||||
if (node.alternatives.length === 1) {
|
||||
cloneOver(node, node.alternatives[0]);
|
||||
}
|
||||
}
|
||||
},
|
||||
text(node) {
|
||||
merge(node.expression);
|
||||
if (node.expression.type === "class"
|
||||
|| node.expression.type === "literal") {
|
||||
const location = node.location;
|
||||
cloneOver(node, node.expression);
|
||||
node.location = location;
|
||||
}
|
||||
},
|
||||
}),
|
||||
];
|
||||
|
||||
ast.rules.forEach(rule => {
|
||||
processedRules[rule.name] = true;
|
||||
merge(rule.expression);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = mergeCharacterClasses;
|
||||
49
resources/app/node_modules/peggy/lib/compiler/passes/remove-proxy-rules.js
generated
vendored
Normal file
49
resources/app/node_modules/peggy/lib/compiler/passes/remove-proxy-rules.js
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
"use strict";
|
||||
|
||||
const asts = require("../asts");
|
||||
const visitor = require("../visitor");
|
||||
|
||||
// Removes proxy rules -- that is, rules that only delegate to other rule.
|
||||
function removeProxyRules(ast, options, session) {
|
||||
function isProxyRule(node) {
|
||||
return node.type === "rule" && node.expression.type === "rule_ref";
|
||||
}
|
||||
|
||||
function replaceRuleRefs(ast, from, to) {
|
||||
const replace = visitor.build({
|
||||
rule_ref(node) {
|
||||
if (node.name === from) {
|
||||
node.name = to;
|
||||
|
||||
session.info(
|
||||
`Proxy rule "${from}" replaced by the rule "${to}"`,
|
||||
node.location,
|
||||
[{
|
||||
message: "This rule will be used",
|
||||
location: asts.findRule(ast, to).nameLocation,
|
||||
}]
|
||||
);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
replace(ast);
|
||||
}
|
||||
|
||||
const indices = [];
|
||||
|
||||
ast.rules.forEach((rule, i) => {
|
||||
if (isProxyRule(rule)) {
|
||||
replaceRuleRefs(ast, rule.name, rule.expression.name);
|
||||
if (options.allowedStartRules.indexOf(rule.name) === -1) {
|
||||
indices.push(i);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
indices.reverse();
|
||||
|
||||
indices.forEach(i => { ast.rules.splice(i, 1); });
|
||||
}
|
||||
|
||||
module.exports = removeProxyRules;
|
||||
28
resources/app/node_modules/peggy/lib/compiler/passes/report-duplicate-imports.js
generated
vendored
Normal file
28
resources/app/node_modules/peggy/lib/compiler/passes/report-duplicate-imports.js
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
// @ts-check
|
||||
"use strict";
|
||||
|
||||
/** @type {PEG.Pass} */
|
||||
function reportDuplicateImports(ast, _options, session) {
|
||||
/** @type {Record<string, PEG.LocationRange>} */
|
||||
const all = {};
|
||||
|
||||
for (const imp of ast.imports) {
|
||||
for (const what of imp.what) {
|
||||
if (what.type === "import_binding_all") {
|
||||
if (Object.prototype.hasOwnProperty.call(all, what.binding)) {
|
||||
session.error(
|
||||
`Module "${what.binding}" is already imported`,
|
||||
what.location,
|
||||
[{
|
||||
message: "Original module location",
|
||||
location: all[what.binding],
|
||||
}]
|
||||
);
|
||||
}
|
||||
all[what.binding] = what.location;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = reportDuplicateImports;
|
||||
72
resources/app/node_modules/peggy/lib/compiler/passes/report-duplicate-labels.js
generated
vendored
Normal file
72
resources/app/node_modules/peggy/lib/compiler/passes/report-duplicate-labels.js
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
"use strict";
|
||||
|
||||
const visitor = require("../visitor");
|
||||
|
||||
// Checks that each label is defined only once within each scope.
|
||||
function reportDuplicateLabels(ast, options, session) {
|
||||
function cloneEnv(env) {
|
||||
const clone = {};
|
||||
|
||||
Object.keys(env).forEach(name => {
|
||||
clone[name] = env[name];
|
||||
});
|
||||
|
||||
return clone;
|
||||
}
|
||||
|
||||
function checkExpressionWithClonedEnv(node, env) {
|
||||
// eslint-disable-next-line no-use-before-define -- Mutual recursion
|
||||
check(node.expression, cloneEnv(env));
|
||||
}
|
||||
|
||||
const check = visitor.build({
|
||||
rule(node) {
|
||||
check(node.expression, { });
|
||||
},
|
||||
|
||||
choice(node, env) {
|
||||
node.alternatives.forEach(alternative => {
|
||||
check(alternative, cloneEnv(env));
|
||||
});
|
||||
},
|
||||
|
||||
action: checkExpressionWithClonedEnv,
|
||||
|
||||
labeled(node, env) {
|
||||
const label = node.label;
|
||||
if (label && Object.prototype.hasOwnProperty.call(env, label)) {
|
||||
session.error(
|
||||
`Label "${node.label}" is already defined`,
|
||||
node.labelLocation,
|
||||
[{
|
||||
message: "Original label location",
|
||||
location: env[label],
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
check(node.expression, env);
|
||||
|
||||
env[node.label] = node.labelLocation;
|
||||
},
|
||||
|
||||
text: checkExpressionWithClonedEnv,
|
||||
simple_and: checkExpressionWithClonedEnv,
|
||||
simple_not: checkExpressionWithClonedEnv,
|
||||
optional: checkExpressionWithClonedEnv,
|
||||
zero_or_more: checkExpressionWithClonedEnv,
|
||||
one_or_more: checkExpressionWithClonedEnv,
|
||||
repeated(node, env) {
|
||||
if (node.delimiter) {
|
||||
check(node.delimiter, cloneEnv(env));
|
||||
}
|
||||
|
||||
check(node.expression, cloneEnv(env));
|
||||
},
|
||||
group: checkExpressionWithClonedEnv,
|
||||
});
|
||||
|
||||
check(ast);
|
||||
}
|
||||
|
||||
module.exports = reportDuplicateLabels;
|
||||
32
resources/app/node_modules/peggy/lib/compiler/passes/report-duplicate-rules.js
generated
vendored
Normal file
32
resources/app/node_modules/peggy/lib/compiler/passes/report-duplicate-rules.js
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
"use strict";
|
||||
|
||||
const visitor = require("../visitor");
|
||||
|
||||
// Checks that each rule is defined only once.
|
||||
function reportDuplicateRules(ast, options, session) {
|
||||
const rules = {};
|
||||
|
||||
const check = visitor.build({
|
||||
rule(node) {
|
||||
if (Object.prototype.hasOwnProperty.call(rules, node.name)) {
|
||||
session.error(
|
||||
`Rule "${node.name}" is already defined`,
|
||||
node.nameLocation,
|
||||
[{
|
||||
message: "Original rule location",
|
||||
location: rules[node.name],
|
||||
}]
|
||||
);
|
||||
|
||||
// Do not rewrite original rule location
|
||||
return;
|
||||
}
|
||||
|
||||
rules[node.name] = node.nameLocation;
|
||||
},
|
||||
});
|
||||
|
||||
check(ast);
|
||||
}
|
||||
|
||||
module.exports = reportDuplicateRules;
|
||||
37
resources/app/node_modules/peggy/lib/compiler/passes/report-incorrect-plucking.js
generated
vendored
Normal file
37
resources/app/node_modules/peggy/lib/compiler/passes/report-incorrect-plucking.js
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
"use strict";
|
||||
|
||||
const visitor = require("../visitor");
|
||||
|
||||
//
|
||||
// Compiler pass to ensure the following are enforced:
|
||||
//
|
||||
// - plucking can not be done with an action block
|
||||
//
|
||||
function reportIncorrectPlucking(ast, options, session) {
|
||||
const check = visitor.build({
|
||||
action(node) {
|
||||
check(node.expression, node);
|
||||
},
|
||||
|
||||
labeled(node, action) {
|
||||
if (node.pick) {
|
||||
if (action) {
|
||||
session.error(
|
||||
"\"@\" cannot be used with an action block",
|
||||
node.labelLocation,
|
||||
[{
|
||||
message: "Action block location",
|
||||
location: action.codeLocation,
|
||||
}]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
check(node.expression);
|
||||
},
|
||||
});
|
||||
|
||||
check(ast);
|
||||
}
|
||||
|
||||
module.exports = reportIncorrectPlucking;
|
||||
101
resources/app/node_modules/peggy/lib/compiler/passes/report-infinite-recursion.js
generated
vendored
Normal file
101
resources/app/node_modules/peggy/lib/compiler/passes/report-infinite-recursion.js
generated
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
"use strict";
|
||||
|
||||
const asts = require("../asts");
|
||||
const visitor = require("../visitor");
|
||||
|
||||
// Reports left recursion in the grammar, which prevents infinite recursion in
|
||||
// the generated parser.
|
||||
//
|
||||
// Both direct and indirect recursion is detected. The pass also correctly
|
||||
// reports cases like this:
|
||||
//
|
||||
// start = "a"? start
|
||||
//
|
||||
// In general, if a rule reference can be reached without consuming any input,
|
||||
// it can lead to left recursion.
|
||||
function reportInfiniteRecursion(ast, options, session) {
|
||||
// Array with rule names for error message
|
||||
const visitedRules = [];
|
||||
// Array with rule_refs for diagnostic
|
||||
const backtraceRefs = [];
|
||||
|
||||
const check = visitor.build({
|
||||
rule(node) {
|
||||
if (session.errors > 0) {
|
||||
return;
|
||||
}
|
||||
visitedRules.push(node.name);
|
||||
check(node.expression);
|
||||
visitedRules.pop();
|
||||
},
|
||||
|
||||
sequence(node) {
|
||||
if (session.errors > 0) {
|
||||
return;
|
||||
}
|
||||
node.elements.every(element => {
|
||||
check(element);
|
||||
if (session.errors > 0) {
|
||||
return false;
|
||||
}
|
||||
return !asts.alwaysConsumesOnSuccess(ast, element);
|
||||
});
|
||||
},
|
||||
|
||||
repeated(node) {
|
||||
if (session.errors > 0) {
|
||||
return;
|
||||
}
|
||||
check(node.expression);
|
||||
|
||||
// If an expression does not consume input then recursion
|
||||
// over delimiter is possible
|
||||
if (node.delimiter
|
||||
&& !asts.alwaysConsumesOnSuccess(ast, node.expression)
|
||||
) {
|
||||
check(node.delimiter);
|
||||
}
|
||||
},
|
||||
|
||||
rule_ref(node) {
|
||||
if (session.errors > 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
backtraceRefs.push(node);
|
||||
|
||||
const rule = asts.findRule(ast, node.name);
|
||||
|
||||
if (visitedRules.indexOf(node.name) !== -1) {
|
||||
visitedRules.push(node.name);
|
||||
|
||||
session.error(
|
||||
"Possible infinite loop when parsing (left recursion: "
|
||||
+ visitedRules.join(" -> ")
|
||||
+ ")",
|
||||
rule.nameLocation,
|
||||
backtraceRefs.map((ref, i, a) => ({
|
||||
message: i + 1 !== a.length
|
||||
? `Step ${i + 1}: call of the rule "${ref.name}" without input consumption`
|
||||
: `Step ${i + 1}: call itself without input consumption - left recursion`,
|
||||
location: ref.location,
|
||||
}))
|
||||
);
|
||||
|
||||
// Because we enter into recursion we should break it
|
||||
return;
|
||||
}
|
||||
|
||||
// Because we run all checks in one stage, some rules could be missing - this check
|
||||
// executed in parallel
|
||||
if (rule) {
|
||||
check(rule);
|
||||
}
|
||||
backtraceRefs.pop();
|
||||
},
|
||||
});
|
||||
|
||||
check(ast);
|
||||
}
|
||||
|
||||
module.exports = reportInfiniteRecursion;
|
||||
64
resources/app/node_modules/peggy/lib/compiler/passes/report-infinite-repetition.js
generated
vendored
Normal file
64
resources/app/node_modules/peggy/lib/compiler/passes/report-infinite-repetition.js
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
"use strict";
|
||||
|
||||
const asts = require("../asts");
|
||||
const visitor = require("../visitor");
|
||||
|
||||
// Reports expressions that don't consume any input inside |*|, |+| or repeated in the
|
||||
// grammar, which prevents infinite loops in the generated parser.
|
||||
function reportInfiniteRepetition(ast, options, session) {
|
||||
const check = visitor.build({
|
||||
zero_or_more(node) {
|
||||
if (!asts.alwaysConsumesOnSuccess(ast, node.expression)) {
|
||||
session.error(
|
||||
"Possible infinite loop when parsing (repetition used with an expression that may not consume any input)",
|
||||
node.location
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
one_or_more(node) {
|
||||
if (!asts.alwaysConsumesOnSuccess(ast, node.expression)) {
|
||||
session.error(
|
||||
"Possible infinite loop when parsing (repetition used with an expression that may not consume any input)",
|
||||
node.location
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
repeated(node) {
|
||||
// No need to check min or max. They can only be numbers, variable
|
||||
// names, or code blocks.
|
||||
|
||||
if (node.delimiter) {
|
||||
check(node.delimiter);
|
||||
}
|
||||
if (asts.alwaysConsumesOnSuccess(ast, node.expression)
|
||||
|| (node.delimiter
|
||||
&& asts.alwaysConsumesOnSuccess(ast, node.delimiter))) {
|
||||
return;
|
||||
}
|
||||
if (node.max.value === null) {
|
||||
session.error(
|
||||
"Possible infinite loop when parsing (unbounded range repetition used with an expression that may not consume any input)",
|
||||
node.location
|
||||
);
|
||||
} else {
|
||||
// If minimum is `null` it is equals to maximum (parsed from `|exact|` syntax)
|
||||
const min = node.min ? node.min : node.max;
|
||||
|
||||
// Because the high boundary is defined, infinity repetition is not possible
|
||||
// but the grammar will waste of CPU
|
||||
session.warning(
|
||||
min.type === "constant" && node.max.type === "constant"
|
||||
? `An expression may not consume any input and may always match ${node.max.value} times`
|
||||
: "An expression may not consume any input and may always match with a maximum repetition count",
|
||||
node.location
|
||||
);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
check(ast);
|
||||
}
|
||||
|
||||
module.exports = reportInfiniteRepetition;
|
||||
22
resources/app/node_modules/peggy/lib/compiler/passes/report-undefined-rules.js
generated
vendored
Normal file
22
resources/app/node_modules/peggy/lib/compiler/passes/report-undefined-rules.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
"use strict";
|
||||
|
||||
const asts = require("../asts");
|
||||
const visitor = require("../visitor");
|
||||
|
||||
// Checks that all referenced rules exist.
|
||||
function reportUndefinedRules(ast, options, session) {
|
||||
const check = visitor.build({
|
||||
rule_ref(node) {
|
||||
if (!asts.findRule(ast, node.name)) {
|
||||
session.error(
|
||||
`Rule "${node.name}" is not defined`,
|
||||
node.location
|
||||
);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
check(ast);
|
||||
}
|
||||
|
||||
module.exports = reportUndefinedRules;
|
||||
84
resources/app/node_modules/peggy/lib/compiler/session.js
generated
vendored
Normal file
84
resources/app/node_modules/peggy/lib/compiler/session.js
generated
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
"use strict";
|
||||
|
||||
const GrammarError = require("../grammar-error");
|
||||
|
||||
class Defaults {
|
||||
constructor(options) {
|
||||
options = typeof options !== "undefined" ? options : {};
|
||||
|
||||
if (typeof options.error === "function") { this.error = options.error; }
|
||||
if (typeof options.warning === "function") { this.warning = options.warning; }
|
||||
if (typeof options.info === "function") { this.info = options.info; }
|
||||
}
|
||||
|
||||
// eslint-disable-next-line class-methods-use-this -- Abstract
|
||||
error() {
|
||||
// Intentionally empty placeholder
|
||||
}
|
||||
|
||||
// eslint-disable-next-line class-methods-use-this -- Abstract
|
||||
warning() {
|
||||
// Intentionally empty placeholder
|
||||
}
|
||||
|
||||
// eslint-disable-next-line class-methods-use-this -- Abstract
|
||||
info() {
|
||||
// Intentionally empty placeholder
|
||||
}
|
||||
}
|
||||
|
||||
class Session {
|
||||
constructor(options) {
|
||||
this._callbacks = new Defaults(options);
|
||||
this._firstError = null;
|
||||
this.errors = 0;
|
||||
/** @type {import("../peg").Problem[]} */
|
||||
this.problems = [];
|
||||
/** @type {import("../peg").Stage} */
|
||||
this.stage = null;
|
||||
}
|
||||
|
||||
error(...args) {
|
||||
++this.errors;
|
||||
// In order to preserve backward compatibility we cannot change `GrammarError`
|
||||
// constructor, nor throw another class of error:
|
||||
// - if we change `GrammarError` constructor, this will break plugins that
|
||||
// throws `GrammarError`
|
||||
// - if we throw another Error class, this will break parser clients that
|
||||
// catches GrammarError
|
||||
//
|
||||
// So we use a compromise: we throw an `GrammarError` with all found problems
|
||||
// in the `problems` property, but the thrown error itself is the first
|
||||
// registered error.
|
||||
//
|
||||
// Thus when the old client catches the error it can find all properties on
|
||||
// the Grammar error that it want. On the other hand the new client can
|
||||
// inspect the `problems` property to get all problems.
|
||||
if (this._firstError === null) {
|
||||
this._firstError = new GrammarError(...args);
|
||||
this._firstError.stage = this.stage;
|
||||
this._firstError.problems = this.problems;
|
||||
}
|
||||
|
||||
this.problems.push(["error", ...args]);
|
||||
this._callbacks.error(this.stage, ...args);
|
||||
}
|
||||
|
||||
warning(...args) {
|
||||
this.problems.push(["warning", ...args]);
|
||||
this._callbacks.warning(this.stage, ...args);
|
||||
}
|
||||
|
||||
info(...args) {
|
||||
this.problems.push(["info", ...args]);
|
||||
this._callbacks.info(this.stage, ...args);
|
||||
}
|
||||
|
||||
checkErrors() {
|
||||
if (this.errors !== 0) {
|
||||
throw this._firstError;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Session;
|
||||
365
resources/app/node_modules/peggy/lib/compiler/stack.js
generated
vendored
Normal file
365
resources/app/node_modules/peggy/lib/compiler/stack.js
generated
vendored
Normal file
@@ -0,0 +1,365 @@
|
||||
// @ts-check
|
||||
"use strict";
|
||||
|
||||
const { SourceNode } = require("source-map-generator");
|
||||
const GrammarLocation = require("../grammar-location.js");
|
||||
|
||||
/**
|
||||
* @typedef {(string|SourceNode)[]} SourceArray
|
||||
*/
|
||||
|
||||
/** Utility class that helps generating code for C-like languages. */
|
||||
class Stack {
|
||||
/**
|
||||
* Constructs the helper for tracking variable slots of the stack virtual machine
|
||||
*
|
||||
* @param {string} ruleName The name of rule that will be used in error messages
|
||||
* @param {string} varName The prefix for generated names of variables
|
||||
* @param {string} type The type of the variables. For JavaScript there are `var` or `let`
|
||||
* @param {number[]} bytecode Bytecode for error messages
|
||||
*/
|
||||
constructor(ruleName, varName, type, bytecode) {
|
||||
/** Last used variable in the stack. */
|
||||
this.sp = -1;
|
||||
/** Maximum stack size. */
|
||||
this.maxSp = -1;
|
||||
this.varName = varName;
|
||||
this.ruleName = ruleName;
|
||||
this.type = type;
|
||||
this.bytecode = bytecode;
|
||||
/**
|
||||
* Map from stack index, to label targetting that index
|
||||
* @type {Record<number,{label:string,location:import("../peg.js").LocationRange}>}
|
||||
*/
|
||||
this.labels = {};
|
||||
/**
|
||||
* Stack of in-flight source mappings
|
||||
* @type {[SourceArray, number, PEG.LocationRange][]}
|
||||
*/
|
||||
this.sourceMapStack = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns name of the variable at the index `i`.
|
||||
*
|
||||
* @param {number} i Index for which name must be generated
|
||||
* @return {string} Generated name
|
||||
*
|
||||
* @throws {RangeError} If `i < 0`, which means a stack underflow (there are more `pop`s than `push`es)
|
||||
*/
|
||||
name(i) {
|
||||
if (i < 0) {
|
||||
throw new RangeError(
|
||||
`Rule '${this.ruleName}': The variable stack underflow: attempt to use a variable '${this.varName}<x>' at an index ${i}.\nBytecode: ${this.bytecode}`
|
||||
);
|
||||
}
|
||||
|
||||
return this.varName + i;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {PEG.LocationRange} location
|
||||
* @param {SourceArray} chunks
|
||||
* @param {string} [name]
|
||||
* @returns
|
||||
*/
|
||||
static sourceNode(location, chunks, name) {
|
||||
const start = GrammarLocation.offsetStart(location);
|
||||
return new SourceNode(
|
||||
start.line,
|
||||
start.column ? start.column - 1 : null,
|
||||
String(location.source),
|
||||
chunks,
|
||||
name
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Assigns `exprCode` to the new variable in the stack, returns generated code.
|
||||
* As the result, the size of a stack increases on 1.
|
||||
*
|
||||
* @param {string} exprCode Any expression code that must be assigned to the new variable in the stack
|
||||
* @return {string|SourceNode} Assignment code
|
||||
*/
|
||||
push(exprCode) {
|
||||
if (++this.sp > this.maxSp) { this.maxSp = this.sp; }
|
||||
|
||||
const label = this.labels[this.sp];
|
||||
const code = [this.name(this.sp), " = ", exprCode, ";"];
|
||||
if (label) {
|
||||
if (this.sourceMapStack.length) {
|
||||
const sourceNode = Stack.sourceNode(
|
||||
label.location,
|
||||
code.splice(0, 2),
|
||||
label.label
|
||||
);
|
||||
const { parts, location } = this.sourceMapPopInternal();
|
||||
const newLoc = (location.start.offset < label.location.end.offset)
|
||||
? {
|
||||
start: label.location.end,
|
||||
end: location.end,
|
||||
source: location.source,
|
||||
}
|
||||
: location;
|
||||
|
||||
const outerNode = Stack.sourceNode(
|
||||
newLoc,
|
||||
code.concat("\n")
|
||||
);
|
||||
this.sourceMapStack.push([parts, parts.length + 1, location]);
|
||||
return new SourceNode(
|
||||
null,
|
||||
null,
|
||||
label.location.source,
|
||||
[sourceNode, outerNode]
|
||||
);
|
||||
} else {
|
||||
return Stack.sourceNode(
|
||||
label.location,
|
||||
code.concat("\n")
|
||||
);
|
||||
}
|
||||
}
|
||||
return code.join("");
|
||||
}
|
||||
|
||||
/**
|
||||
* @overload
|
||||
* @param {undefined} [n]
|
||||
* @return {string}
|
||||
*/
|
||||
/**
|
||||
* @overload
|
||||
* @param {number} n
|
||||
* @return {string[]}
|
||||
*/
|
||||
/**
|
||||
* Returns name or `n` names of the variable(s) from the top of the stack.
|
||||
*
|
||||
* @param {number} [n] Quantity of variables, which need to be removed from the stack
|
||||
* @returns {string[]|string} Generated name(s). If n is defined then it returns an
|
||||
* array of length `n`
|
||||
*
|
||||
* @throws {RangeError} If the stack underflow (there are more `pop`s than `push`es)
|
||||
*/
|
||||
pop(n) {
|
||||
if (n !== undefined) {
|
||||
this.sp -= n;
|
||||
|
||||
return Array.from({ length: n }, (v, i) => this.name(this.sp + 1 + i));
|
||||
}
|
||||
|
||||
return this.name(this.sp--);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns name of the first free variable. The same as `index(0)`.
|
||||
*
|
||||
* @return {string} Generated name
|
||||
*
|
||||
* @throws {RangeError} If the stack is empty (there was no `push`'s yet)
|
||||
*/
|
||||
top() { return this.name(this.sp); }
|
||||
|
||||
/**
|
||||
* Returns name of the variable at index `i`.
|
||||
*
|
||||
* @param {number} i Index of the variable from top of the stack
|
||||
* @return {string} Generated name
|
||||
*
|
||||
* @throws {RangeError} If `i < 0` or more than the stack size
|
||||
*/
|
||||
index(i) {
|
||||
if (i < 0) {
|
||||
throw new RangeError(
|
||||
`Rule '${this.ruleName}': The variable stack overflow: attempt to get a variable at a negative index ${i}.\nBytecode: ${this.bytecode}`
|
||||
);
|
||||
}
|
||||
|
||||
return this.name(this.sp - i);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns variable name that contains result (bottom of the stack).
|
||||
*
|
||||
* @return {string} Generated name
|
||||
*
|
||||
* @throws {RangeError} If the stack is empty (there was no `push`es yet)
|
||||
*/
|
||||
result() {
|
||||
if (this.maxSp < 0) {
|
||||
throw new RangeError(
|
||||
`Rule '${this.ruleName}': The variable stack is empty, can't get the result.\nBytecode: ${this.bytecode}`
|
||||
);
|
||||
}
|
||||
|
||||
return this.name(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns defines of all used variables.
|
||||
*
|
||||
* @return {string} Generated define variable expression with the type `this.type`.
|
||||
* If the stack is empty, returns empty string
|
||||
*/
|
||||
defines() {
|
||||
if (this.maxSp < 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
return this.type + " " + Array.from({ length: this.maxSp + 1 }, (v, i) => this.name(i)).join(", ") + ";";
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that code in the `generateIf` and `generateElse` move the stack pointer in the same way.
|
||||
*
|
||||
* @template T
|
||||
* @param {number} pos Opcode number for error messages
|
||||
* @param {() => T} generateIf First function that works with this stack
|
||||
* @param {(() => T)|null} [generateElse] Second function that works with this stack
|
||||
* @return {T[]}
|
||||
*
|
||||
* @throws {Error} If `generateElse` is defined and the stack pointer moved differently in the
|
||||
* `generateIf` and `generateElse`
|
||||
*/
|
||||
checkedIf(pos, generateIf, generateElse) {
|
||||
const baseSp = this.sp;
|
||||
|
||||
const ifResult = generateIf();
|
||||
|
||||
if (!generateElse) {
|
||||
return [ifResult];
|
||||
}
|
||||
const thenSp = this.sp;
|
||||
|
||||
this.sp = baseSp;
|
||||
const elseResult = generateElse();
|
||||
|
||||
if (thenSp !== this.sp) {
|
||||
throw new Error(
|
||||
"Rule '" + this.ruleName + "', position " + pos + ": "
|
||||
+ "Branches of a condition can't move the stack pointer differently "
|
||||
+ "(before: " + baseSp + ", after then: " + thenSp + ", after else: " + this.sp + "). "
|
||||
+ "Bytecode: " + this.bytecode
|
||||
);
|
||||
}
|
||||
return [ifResult, elseResult];
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that code in the `generateBody` do not move stack pointer.
|
||||
*
|
||||
* @template T
|
||||
* @param {number} pos Opcode number for error messages
|
||||
* @param {() => T} generateBody Function that works with this stack
|
||||
* @return {T}
|
||||
*
|
||||
* @throws {Error} If `generateBody` move the stack pointer (if it contains unbalanced `push`es and `pop`s)
|
||||
*/
|
||||
checkedLoop(pos, generateBody) {
|
||||
const baseSp = this.sp;
|
||||
|
||||
const result = generateBody();
|
||||
|
||||
if (baseSp !== this.sp) {
|
||||
throw new Error(
|
||||
"Rule '" + this.ruleName + "', position " + pos + ": "
|
||||
+ "Body of a loop can't move the stack pointer "
|
||||
+ "(before: " + baseSp + ", after: " + this.sp + "). "
|
||||
+ "Bytecode: " + this.bytecode
|
||||
);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {SourceArray} parts
|
||||
* @param {PEG.LocationRange} location
|
||||
*/
|
||||
sourceMapPush(parts, location) {
|
||||
if (this.sourceMapStack.length) {
|
||||
const top = this.sourceMapStack[this.sourceMapStack.length - 1];
|
||||
// If the current top of stack starts at the same location as
|
||||
// the about to be pushed item, we should update its start location to
|
||||
// be past the new one. Otherwise any code it generates will
|
||||
// get allocated to the inner node.
|
||||
if (top[2].start.offset === location.start.offset
|
||||
&& top[2].end.offset > location.end.offset) {
|
||||
top[2] = {
|
||||
start: location.end,
|
||||
end: top[2].end,
|
||||
source: top[2].source,
|
||||
};
|
||||
}
|
||||
}
|
||||
this.sourceMapStack.push([
|
||||
parts,
|
||||
parts.length,
|
||||
location,
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {{parts:SourceArray,location:PEG.LocationRange}}
|
||||
*/
|
||||
sourceMapPopInternal() {
|
||||
const elt = this.sourceMapStack.pop();
|
||||
if (!elt) {
|
||||
throw new RangeError(
|
||||
`Rule '${this.ruleName}': Attempting to pop an empty source map stack.\nBytecode: ${this.bytecode}`
|
||||
);
|
||||
}
|
||||
const [
|
||||
parts,
|
||||
index,
|
||||
location,
|
||||
] = elt;
|
||||
const chunks = parts.splice(index).map(
|
||||
chunk => (chunk instanceof SourceNode
|
||||
? chunk
|
||||
: chunk + "\n"
|
||||
)
|
||||
);
|
||||
if (chunks.length) {
|
||||
const start = GrammarLocation.offsetStart(location);
|
||||
parts.push(new SourceNode(
|
||||
start.line,
|
||||
start.column - 1,
|
||||
String(location.source),
|
||||
chunks
|
||||
));
|
||||
}
|
||||
return { parts, location };
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} [offset]
|
||||
* @returns {[SourceArray, number, PEG.LocationRange]|undefined}
|
||||
*/
|
||||
sourceMapPop(offset) {
|
||||
const { location } = this.sourceMapPopInternal();
|
||||
if (this.sourceMapStack.length
|
||||
&& location.end.offset
|
||||
< this.sourceMapStack[this.sourceMapStack.length - 1][2].end.offset) {
|
||||
const { parts, location: outer } = this.sourceMapPopInternal();
|
||||
const newLoc = (outer.start.offset < location.end.offset)
|
||||
? {
|
||||
start: location.end,
|
||||
end: outer.end,
|
||||
source: outer.source,
|
||||
}
|
||||
: outer;
|
||||
|
||||
this.sourceMapStack.push([
|
||||
parts,
|
||||
parts.length + (offset || 0),
|
||||
newLoc,
|
||||
]);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Stack;
|
||||
91
resources/app/node_modules/peggy/lib/compiler/utils.js
generated
vendored
Normal file
91
resources/app/node_modules/peggy/lib/compiler/utils.js
generated
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
"use strict";
|
||||
|
||||
function hex(ch) { return ch.charCodeAt(0).toString(16).toUpperCase(); }
|
||||
exports.hex = hex;
|
||||
|
||||
function stringEscape(s) {
|
||||
// ECMA-262, 5th ed., 7.8.4: All characters may appear literally in a string
|
||||
// literal except for the closing quote character, backslash, carriage
|
||||
// return, line separator, paragraph separator, and line feed. Any character
|
||||
// may appear in the form of an escape sequence.
|
||||
//
|
||||
// For portability, we also escape all control and non-ASCII characters.
|
||||
return s
|
||||
.replace(/\\/g, "\\\\") // Backslash
|
||||
.replace(/"/g, "\\\"") // Closing double quote
|
||||
.replace(/\0/g, "\\0") // Null
|
||||
.replace(/\x08/g, "\\b") // Backspace
|
||||
.replace(/\t/g, "\\t") // Horizontal tab
|
||||
.replace(/\n/g, "\\n") // Line feed
|
||||
.replace(/\v/g, "\\v") // Vertical tab
|
||||
.replace(/\f/g, "\\f") // Form feed
|
||||
.replace(/\r/g, "\\r") // Carriage return
|
||||
.replace(/[\x00-\x0F]/g, ch => "\\x0" + hex(ch))
|
||||
.replace(/[\x10-\x1F\x7F-\xFF]/g, ch => "\\x" + hex(ch))
|
||||
.replace(/[\u0100-\u0FFF]/g, ch => "\\u0" + hex(ch))
|
||||
.replace(/[\u1000-\uFFFF]/g, ch => "\\u" + hex(ch));
|
||||
}
|
||||
exports.stringEscape = stringEscape;
|
||||
|
||||
function regexpClassEscape(s) {
|
||||
// Based on ECMA-262, 5th ed., 7.8.5 & 15.10.1.
|
||||
//
|
||||
// For portability, we also escape all control and non-ASCII characters.
|
||||
return s
|
||||
.replace(/\\/g, "\\\\") // Backslash
|
||||
.replace(/\//g, "\\/") // Closing slash
|
||||
.replace(/]/g, "\\]") // Closing bracket
|
||||
.replace(/\^/g, "\\^") // Caret
|
||||
.replace(/-/g, "\\-") // Dash
|
||||
.replace(/\0/g, "\\0") // Null
|
||||
.replace(/\x08/g, "\\b") // Backspace
|
||||
.replace(/\t/g, "\\t") // Horizontal tab
|
||||
.replace(/\n/g, "\\n") // Line feed
|
||||
.replace(/\v/g, "\\v") // Vertical tab
|
||||
.replace(/\f/g, "\\f") // Form feed
|
||||
.replace(/\r/g, "\\r") // Carriage return
|
||||
.replace(/[\x00-\x0F]/g, ch => "\\x0" + hex(ch))
|
||||
.replace(/[\x10-\x1F\x7F-\xFF]/g, ch => "\\x" + hex(ch))
|
||||
.replace(/[\u0100-\u0FFF]/g, ch => "\\u0" + hex(ch))
|
||||
.replace(/[\u1000-\uFFFF]/g, ch => "\\u" + hex(ch));
|
||||
}
|
||||
exports.regexpClassEscape = regexpClassEscape;
|
||||
|
||||
/**
|
||||
* Base64 encode a Uint8Array. Needed for browser compatibility where
|
||||
* the Buffer class is not available.
|
||||
*
|
||||
* @param {Uint8Array} u8 Bytes to encode
|
||||
* @returns {string} Base64 encoded string
|
||||
*/
|
||||
function base64(u8) {
|
||||
// Note: btoa has the worst API, and even mentioning Buffer here will
|
||||
// cause rollup to suck it in.
|
||||
|
||||
// See RFC4648, sec. 4.
|
||||
// https://datatracker.ietf.org/doc/html/rfc4648#section-4
|
||||
const A = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
||||
const rem = u8.length % 3;
|
||||
const len = u8.length - rem;
|
||||
let res = "";
|
||||
|
||||
for (let i = 0; i < len; i += 3) {
|
||||
res += A[u8[i] >> 2];
|
||||
res += A[((u8[i] & 0x3) << 4) | (u8[i + 1] >> 4)];
|
||||
res += A[((u8[i + 1] & 0xf) << 2) | (u8[i + 2] >> 6)];
|
||||
res += A[u8[i + 2] & 0x3f];
|
||||
}
|
||||
if (rem === 1) {
|
||||
res += A[u8[len] >> 2];
|
||||
res += A[(u8[len] & 0x3) << 4];
|
||||
res += "==";
|
||||
} else if (rem === 2) {
|
||||
res += A[u8[len] >> 2];
|
||||
res += A[((u8[len] & 0x3) << 4) | (u8[len + 1] >> 4)];
|
||||
res += A[(u8[len + 1] & 0xf) << 2];
|
||||
res += "=";
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
exports.base64 = base64;
|
||||
100
resources/app/node_modules/peggy/lib/compiler/visitor.js
generated
vendored
Normal file
100
resources/app/node_modules/peggy/lib/compiler/visitor.js
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
"use strict";
|
||||
|
||||
// Simple AST node visitor builder.
|
||||
const visitor = {
|
||||
build(functions) {
|
||||
function visit(node, ...args) {
|
||||
return functions[node.type](node, ...args);
|
||||
}
|
||||
|
||||
function visitNop() {
|
||||
// Do nothing.
|
||||
}
|
||||
|
||||
function visitExpression(node, ...args) {
|
||||
return visit(node.expression, ...args);
|
||||
}
|
||||
|
||||
function visitChildren(property) {
|
||||
return function(node, ...args) {
|
||||
// We do not use .map() here, because if you need the result
|
||||
// of applying visitor to children you probable also need to
|
||||
// process it in some way, therefore you anyway have to override
|
||||
// this method. If you do not needed that, we do not waste time
|
||||
// and memory for creating the output array
|
||||
node[property].forEach(child => visit(child, ...args));
|
||||
};
|
||||
}
|
||||
|
||||
const DEFAULT_FUNCTIONS = {
|
||||
grammar(node, ...args) {
|
||||
for (const imp of node.imports) {
|
||||
visit(imp, ...args);
|
||||
}
|
||||
|
||||
if (node.topLevelInitializer) {
|
||||
if (Array.isArray(node.topLevelInitializer)) {
|
||||
for (const tli of node.topLevelInitializer) {
|
||||
visit(tli, ...args);
|
||||
}
|
||||
} else {
|
||||
visit(node.topLevelInitializer, ...args);
|
||||
}
|
||||
}
|
||||
|
||||
if (node.initializer) {
|
||||
if (Array.isArray(node.initializer)) {
|
||||
for (const init of node.initializer) {
|
||||
visit(init, ...args);
|
||||
}
|
||||
} else {
|
||||
visit(node.initializer, ...args);
|
||||
}
|
||||
}
|
||||
|
||||
node.rules.forEach(rule => visit(rule, ...args));
|
||||
},
|
||||
|
||||
grammar_import: visitNop,
|
||||
top_level_initializer: visitNop,
|
||||
initializer: visitNop,
|
||||
rule: visitExpression,
|
||||
named: visitExpression,
|
||||
choice: visitChildren("alternatives"),
|
||||
action: visitExpression,
|
||||
sequence: visitChildren("elements"),
|
||||
labeled: visitExpression,
|
||||
text: visitExpression,
|
||||
simple_and: visitExpression,
|
||||
simple_not: visitExpression,
|
||||
optional: visitExpression,
|
||||
zero_or_more: visitExpression,
|
||||
one_or_more: visitExpression,
|
||||
repeated(node, ...args) {
|
||||
if (node.delimiter) {
|
||||
visit(node.delimiter, ...args);
|
||||
}
|
||||
|
||||
return visit(node.expression, ...args);
|
||||
},
|
||||
group: visitExpression,
|
||||
semantic_and: visitNop,
|
||||
semantic_not: visitNop,
|
||||
rule_ref: visitNop,
|
||||
library_ref: visitNop,
|
||||
literal: visitNop,
|
||||
class: visitNop,
|
||||
any: visitNop,
|
||||
};
|
||||
|
||||
Object.keys(DEFAULT_FUNCTIONS).forEach(type => {
|
||||
if (!Object.prototype.hasOwnProperty.call(functions, type)) {
|
||||
functions[type] = DEFAULT_FUNCTIONS[type];
|
||||
}
|
||||
});
|
||||
|
||||
return visit;
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = visitor;
|
||||
175
resources/app/node_modules/peggy/lib/grammar-error.js
generated
vendored
Normal file
175
resources/app/node_modules/peggy/lib/grammar-error.js
generated
vendored
Normal file
@@ -0,0 +1,175 @@
|
||||
"use strict";
|
||||
|
||||
const GrammarLocation = require("./grammar-location");
|
||||
|
||||
// See: https://github.com/Microsoft/TypeScript-wiki/blob/master/Breaking-Changes.md#extending-built-ins-like-error-array-and-map-may-no-longer-work
|
||||
// This is roughly what typescript generates, it's not called after super(), where it's needed.
|
||||
// istanbul ignore next This is a special black magic that cannot be covered everywhere
|
||||
const setProtoOf = Object.setPrototypeOf
|
||||
|| ({ __proto__: [] } instanceof Array
|
||||
&& function(d, b) {
|
||||
// eslint-disable-next-line no-proto -- Backward-compatibility
|
||||
d.__proto__ = b;
|
||||
})
|
||||
|| function(d, b) {
|
||||
for (const p in b) {
|
||||
if (Object.prototype.hasOwnProperty.call(b, p)) {
|
||||
d[p] = b[p];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Thrown when the grammar contains an error.
|
||||
/** @type {import("./peg").GrammarError} */
|
||||
class GrammarError extends Error {
|
||||
constructor(message, location, diagnostics) {
|
||||
super(message);
|
||||
setProtoOf(this, GrammarError.prototype);
|
||||
this.name = "GrammarError";
|
||||
this.location = location;
|
||||
if (diagnostics === undefined) {
|
||||
diagnostics = [];
|
||||
}
|
||||
this.diagnostics = diagnostics;
|
||||
// All problems if this error is thrown by the plugin and not at stage
|
||||
// checking phase
|
||||
this.stage = null;
|
||||
this.problems = [["error", message, location, diagnostics]];
|
||||
}
|
||||
|
||||
toString() {
|
||||
let str = super.toString();
|
||||
if (this.location) {
|
||||
str += "\n at ";
|
||||
if ((this.location.source !== undefined)
|
||||
&& (this.location.source !== null)) {
|
||||
str += `${this.location.source}:`;
|
||||
}
|
||||
str += `${this.location.start.line}:${this.location.start.column}`;
|
||||
}
|
||||
for (const diag of this.diagnostics) {
|
||||
str += "\n from ";
|
||||
if ((diag.location.source !== undefined)
|
||||
&& (diag.location.source !== null)) {
|
||||
str += `${diag.location.source}:`;
|
||||
}
|
||||
str += `${diag.location.start.line}:${diag.location.start.column}: ${diag.message}`;
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format the error with associated sources. The `location.source` should have
|
||||
* a `toString()` representation in order the result to look nice. If source
|
||||
* is `null` or `undefined`, it is skipped from the output
|
||||
*
|
||||
* Sample output:
|
||||
* ```
|
||||
* Error: Label "head" is already defined
|
||||
* --> examples/arithmetics.pegjs:15:17
|
||||
* |
|
||||
* 15 | = head:Factor head:(_ ("*" / "/") _ Factor)* {
|
||||
* | ^^^^
|
||||
* note: Original label location
|
||||
* --> examples/arithmetics.pegjs:15:5
|
||||
* |
|
||||
* 15 | = head:Factor head:(_ ("*" / "/") _ Factor)* {
|
||||
* | ^^^^
|
||||
* ```
|
||||
*
|
||||
* @param {import("./peg").SourceText[]} sources mapping from location source to source text
|
||||
*
|
||||
* @returns {string} the formatted error
|
||||
*/
|
||||
format(sources) {
|
||||
const srcLines = sources.map(({ source, text }) => ({
|
||||
source,
|
||||
text: (text !== null && text !== undefined)
|
||||
? String(text).split(/\r\n|\n|\r/g)
|
||||
: [],
|
||||
}));
|
||||
|
||||
/**
|
||||
* Returns a highlighted piece of source to which the `location` points
|
||||
*
|
||||
* @param {import("./peg").LocationRange} location
|
||||
* @param {number} indent How much width in symbols line number strip should have
|
||||
* @param {string} message Additional message that will be shown after location
|
||||
* @returns {string}
|
||||
*/
|
||||
function entry(location, indent, message = "") {
|
||||
let str = "";
|
||||
const src = srcLines.find(({ source }) => source === location.source);
|
||||
const s = location.start;
|
||||
const offset_s = GrammarLocation.offsetStart(location);
|
||||
if (src) {
|
||||
const e = location.end;
|
||||
const line = src.text[s.line - 1];
|
||||
const last = s.line === e.line ? e.column : line.length + 1;
|
||||
const hatLen = (last - s.column) || 1;
|
||||
if (message) {
|
||||
str += `\nnote: ${message}`;
|
||||
}
|
||||
str += `
|
||||
--> ${location.source}:${offset_s.line}:${offset_s.column}
|
||||
${"".padEnd(indent)} |
|
||||
${offset_s.line.toString().padStart(indent)} | ${line}
|
||||
${"".padEnd(indent)} | ${"".padEnd(s.column - 1)}${"".padEnd(hatLen, "^")}`;
|
||||
} else {
|
||||
str += `\n at ${location.source}:${offset_s.line}:${offset_s.column}`;
|
||||
if (message) {
|
||||
str += `: ${message}`;
|
||||
}
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a formatted representation of the one problem in the error.
|
||||
*
|
||||
* @param {import("./peg").Severity} severity Importance of the message
|
||||
* @param {string} message Test message of the problem
|
||||
* @param {import("./peg").LocationRange?} location Location of the problem in the source
|
||||
* @param {import("./peg").DiagnosticNote[]} diagnostics Additional notes about the problem
|
||||
* @returns {string}
|
||||
*/
|
||||
function formatProblem(severity, message, location, diagnostics = []) {
|
||||
// Calculate maximum width of all lines
|
||||
let maxLine = -Infinity;
|
||||
if (location) {
|
||||
maxLine = diagnostics.reduce(
|
||||
(t, { location }) => Math.max(
|
||||
t, GrammarLocation.offsetStart(location).line
|
||||
),
|
||||
location.start.line
|
||||
);
|
||||
} else {
|
||||
maxLine = Math.max.apply(
|
||||
null,
|
||||
diagnostics.map(d => d.location.start.line)
|
||||
);
|
||||
}
|
||||
maxLine = maxLine.toString().length;
|
||||
|
||||
let str = `${severity}: ${message}`;
|
||||
if (location) {
|
||||
str += entry(location, maxLine);
|
||||
}
|
||||
for (const diag of diagnostics) {
|
||||
str += entry(diag.location, maxLine, diag.message);
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
// "info" problems are only appropriate if in verbose mode.
|
||||
// Handle them separately.
|
||||
return this.problems
|
||||
.filter(p => p[0] !== "info")
|
||||
.map(p => formatProblem(...p)).join("\n\n");
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = GrammarError;
|
||||
81
resources/app/node_modules/peggy/lib/grammar-location.js
generated
vendored
Normal file
81
resources/app/node_modules/peggy/lib/grammar-location.js
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
"use strict";
|
||||
|
||||
/**
|
||||
* When used as a grammarSource, allows grammars embedded in larger files to
|
||||
* specify their offset. The start location is the first character in the
|
||||
* grammar. The first line is often moved to the right by some number of
|
||||
* columns, but subsequent lines all start at the first column.
|
||||
*/
|
||||
class GrammarLocation {
|
||||
/**
|
||||
* Create an instance.
|
||||
*
|
||||
* @param {any} source The original grammarSource. Should be a string or
|
||||
* have a toString() method.
|
||||
* @param {import("./peg").Location} start The starting offset for the
|
||||
* grammar in the larger file.
|
||||
*/
|
||||
constructor(source, start) {
|
||||
this.source = source;
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
/**
|
||||
* Coerce to a string.
|
||||
*
|
||||
* @returns {string} The source, stringified.
|
||||
*/
|
||||
toString() {
|
||||
return String(this.source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a new Location offset from the given location by the start of the
|
||||
* grammar.
|
||||
*
|
||||
* @param {import("./peg").Location} loc The location as if the start of the
|
||||
* grammar was the start of the file.
|
||||
* @returns {import("./peg").Location} The offset location.
|
||||
*/
|
||||
offset(loc) {
|
||||
return {
|
||||
line: loc.line + this.start.line - 1,
|
||||
column: (loc.line === 1)
|
||||
? loc.column + this.start.column - 1
|
||||
: loc.column,
|
||||
offset: loc.offset + this.start.offset,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* If the range has a grammarSource that is a GrammarLocation, offset the
|
||||
* start of that range by the GrammarLocation.
|
||||
*
|
||||
* @param {import("./peg").LocationRange} range The range to extract from.
|
||||
* @returns {import("./peg").Location} The offset start if possible, or the
|
||||
* original start.
|
||||
*/
|
||||
static offsetStart(range) {
|
||||
if (range.source && (typeof range.source.offset === "function")) {
|
||||
return range.source.offset(range.start);
|
||||
}
|
||||
return range.start;
|
||||
}
|
||||
|
||||
/**
|
||||
* If the range has a grammarSource that is a GrammarLocation, offset the
|
||||
* end of that range by the GrammarLocation.
|
||||
*
|
||||
* @param {import("./peg").LocationRange} range The range to extract from.
|
||||
* @returns {import("./peg").Location} The offset end if possible, or the
|
||||
* original end.
|
||||
*/
|
||||
static offsetEnd(range) {
|
||||
if (range.source && (typeof range.source.offset === "function")) {
|
||||
return range.source.offset(range.end);
|
||||
}
|
||||
return range.end;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = GrammarLocation;
|
||||
4117
resources/app/node_modules/peggy/lib/parser.js
generated
vendored
Normal file
4117
resources/app/node_modules/peggy/lib/parser.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
172
resources/app/node_modules/peggy/lib/peg.js
generated
vendored
Normal file
172
resources/app/node_modules/peggy/lib/peg.js
generated
vendored
Normal file
@@ -0,0 +1,172 @@
|
||||
"use strict";
|
||||
|
||||
const GrammarError = require("./grammar-error");
|
||||
const GrammarLocation = require("./grammar-location");
|
||||
const asts = require("./compiler/asts.js");
|
||||
const compiler = require("./compiler");
|
||||
const parser = require("./parser");
|
||||
const VERSION = require("./version");
|
||||
|
||||
const RESERVED_WORDS = [
|
||||
// Reserved keywords as of ECMAScript 2015
|
||||
"break",
|
||||
"case",
|
||||
"catch",
|
||||
"class",
|
||||
"const",
|
||||
"continue",
|
||||
"debugger",
|
||||
"default",
|
||||
"delete",
|
||||
"do",
|
||||
"else",
|
||||
"export",
|
||||
"extends",
|
||||
"finally",
|
||||
"for",
|
||||
"function",
|
||||
"if",
|
||||
"import",
|
||||
"in",
|
||||
"instanceof",
|
||||
"new",
|
||||
"return",
|
||||
"super",
|
||||
"switch",
|
||||
"this",
|
||||
"throw",
|
||||
"try",
|
||||
"typeof",
|
||||
"var",
|
||||
"void",
|
||||
"while",
|
||||
"with",
|
||||
|
||||
// Special constants
|
||||
"null",
|
||||
"true",
|
||||
"false",
|
||||
|
||||
// These are always reserved:
|
||||
"enum",
|
||||
|
||||
// The following are only reserved when they are found in strict mode code
|
||||
// Peggy generates code in strict mode, so they are applicable
|
||||
"implements",
|
||||
"interface",
|
||||
"let",
|
||||
"package",
|
||||
"private",
|
||||
"protected",
|
||||
"public",
|
||||
"static",
|
||||
"yield",
|
||||
|
||||
// The following are only reserved when they are found in module code:
|
||||
"await",
|
||||
|
||||
// The following are reserved as future keywords by ECMAScript 1..3
|
||||
// specifications, but not any more in modern ECMAScript. We don't need these
|
||||
// because the code-generation of Peggy only targets ECMAScript >= 5.
|
||||
//
|
||||
// - abstract
|
||||
// - boolean
|
||||
// - byte
|
||||
// - char
|
||||
// - double
|
||||
// - final
|
||||
// - float
|
||||
// - goto
|
||||
// - int
|
||||
// - long
|
||||
// - native
|
||||
// - short
|
||||
// - synchronized
|
||||
// - throws
|
||||
// - transient
|
||||
// - volatile
|
||||
|
||||
// These are not reserved keywords, but using them as variable names is problematic.
|
||||
"arguments", // Conflicts with a special variable available inside functions.
|
||||
"eval", // Redeclaring eval() is prohibited in strict mode
|
||||
|
||||
// A few identifiers have a special meaning in some contexts without being
|
||||
// reserved words of any kind. These we don't need to worry about as they can
|
||||
// all be safely used as variable names.
|
||||
//
|
||||
// - as
|
||||
// - async
|
||||
// - from
|
||||
// - get
|
||||
// - of
|
||||
// - set
|
||||
];
|
||||
|
||||
const peg = {
|
||||
// Peggy version (filled in by /tools/release).
|
||||
VERSION,
|
||||
/**
|
||||
* Default list of reserved words. Contains list of currently and future
|
||||
* JavaScript (ECMAScript 2015) reserved words.
|
||||
*
|
||||
* @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Lexical_grammar#reserved_words
|
||||
*/
|
||||
RESERVED_WORDS,
|
||||
GrammarError,
|
||||
GrammarLocation,
|
||||
parser,
|
||||
compiler,
|
||||
|
||||
// Generates a parser from a specified grammar and returns it.
|
||||
//
|
||||
// The grammar must be a string in the format described by the meta-grammar in
|
||||
// the parser.pegjs file.
|
||||
//
|
||||
// Throws |peg.parser.SyntaxError| if the grammar contains a syntax error or
|
||||
// |peg.GrammarError| if it contains a semantic error. Note that not all
|
||||
// errors are detected during the generation and some may protrude to the
|
||||
// generated parser and cause its malfunction.
|
||||
generate(grammar, options) {
|
||||
options = options !== undefined ? options : {};
|
||||
|
||||
function copyPasses(passes) {
|
||||
const converted = {};
|
||||
Object.keys(passes).forEach(stage => {
|
||||
converted[stage] = passes[stage].slice();
|
||||
});
|
||||
|
||||
return converted;
|
||||
}
|
||||
|
||||
const plugins = "plugins" in options ? options.plugins : [];
|
||||
const config = {
|
||||
parser: peg.parser,
|
||||
passes: copyPasses(peg.compiler.passes),
|
||||
reservedWords: peg.RESERVED_WORDS.slice(),
|
||||
};
|
||||
|
||||
plugins.forEach(p => { p.use(config, options); });
|
||||
|
||||
if (!Array.isArray(grammar)) {
|
||||
grammar = [{
|
||||
source: options.grammarSource,
|
||||
text: grammar,
|
||||
}];
|
||||
}
|
||||
|
||||
const combined = asts.combine(
|
||||
grammar.map(({ source, text }) => config.parser.parse(text, {
|
||||
grammarSource: source,
|
||||
reservedWords: config.reservedWords,
|
||||
}))
|
||||
);
|
||||
|
||||
return peg.compiler.compile(
|
||||
combined,
|
||||
config.passes,
|
||||
options
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = peg;
|
||||
9
resources/app/node_modules/peggy/lib/version.js
generated
vendored
Normal file
9
resources/app/node_modules/peggy/lib/version.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
|
||||
// This file is generated.
|
||||
// Do not edit it! Your work will be overwritten.
|
||||
//
|
||||
// Instead, please look at ./tools/set_version.js
|
||||
|
||||
"use strict";
|
||||
|
||||
module.exports = "4.0.2";
|
||||
Reference in New Issue
Block a user