init
This commit is contained in:
parent
3b6b7abb38
commit
cc1baf077e
15
node_modules/.bin/webpack
generated
vendored
Normal file
15
node_modules/.bin/webpack
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
"$basedir/node" "$basedir/../webpack/bin/webpack.js" "$@"
|
||||
ret=$?
|
||||
else
|
||||
node "$basedir/../webpack/bin/webpack.js" "$@"
|
||||
ret=$?
|
||||
fi
|
||||
exit $ret
|
||||
17
node_modules/.bin/webpack.cmd
generated
vendored
Normal file
17
node_modules/.bin/webpack.cmd
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
@ECHO off
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
"%_prog%" "%dp0%\..\webpack\bin\webpack.js" %*
|
||||
ENDLOCAL
|
||||
EXIT /b %errorlevel%
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
18
node_modules/.bin/webpack.ps1
generated
vendored
Normal file
18
node_modules/.bin/webpack.ps1
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
& "$basedir/node$exe" "$basedir/../webpack/bin/webpack.js" $args
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
& "node$exe" "$basedir/../webpack/bin/webpack.js" $args
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
||||
22
node_modules/@babel/code-frame/LICENSE
generated
vendored
Normal file
22
node_modules/@babel/code-frame/LICENSE
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
19
node_modules/@babel/code-frame/README.md
generated
vendored
Normal file
19
node_modules/@babel/code-frame/README.md
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
# @babel/code-frame
|
||||
|
||||
> Generate errors that contain a code frame that point to source locations.
|
||||
|
||||
See our website [@babel/code-frame](https://babeljs.io/docs/babel-code-frame) for more information.
|
||||
|
||||
## Install
|
||||
|
||||
Using npm:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @babel/code-frame
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
|
||||
```sh
|
||||
yarn add @babel/code-frame --dev
|
||||
```
|
||||
216
node_modules/@babel/code-frame/lib/index.js
generated
vendored
Normal file
216
node_modules/@babel/code-frame/lib/index.js
generated
vendored
Normal file
@ -0,0 +1,216 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
var picocolors = require('picocolors');
|
||||
var jsTokens = require('js-tokens');
|
||||
var helperValidatorIdentifier = require('@babel/helper-validator-identifier');
|
||||
|
||||
function isColorSupported() {
|
||||
return (typeof process === "object" && (process.env.FORCE_COLOR === "0" || process.env.FORCE_COLOR === "false") ? false : picocolors.isColorSupported
|
||||
);
|
||||
}
|
||||
const compose = (f, g) => v => f(g(v));
|
||||
function buildDefs(colors) {
|
||||
return {
|
||||
keyword: colors.cyan,
|
||||
capitalized: colors.yellow,
|
||||
jsxIdentifier: colors.yellow,
|
||||
punctuator: colors.yellow,
|
||||
number: colors.magenta,
|
||||
string: colors.green,
|
||||
regex: colors.magenta,
|
||||
comment: colors.gray,
|
||||
invalid: compose(compose(colors.white, colors.bgRed), colors.bold),
|
||||
gutter: colors.gray,
|
||||
marker: compose(colors.red, colors.bold),
|
||||
message: compose(colors.red, colors.bold),
|
||||
reset: colors.reset
|
||||
};
|
||||
}
|
||||
const defsOn = buildDefs(picocolors.createColors(true));
|
||||
const defsOff = buildDefs(picocolors.createColors(false));
|
||||
function getDefs(enabled) {
|
||||
return enabled ? defsOn : defsOff;
|
||||
}
|
||||
|
||||
const sometimesKeywords = new Set(["as", "async", "from", "get", "of", "set"]);
|
||||
const NEWLINE$1 = /\r\n|[\n\r\u2028\u2029]/;
|
||||
const BRACKET = /^[()[\]{}]$/;
|
||||
let tokenize;
|
||||
{
|
||||
const JSX_TAG = /^[a-z][\w-]*$/i;
|
||||
const getTokenType = function (token, offset, text) {
|
||||
if (token.type === "name") {
|
||||
if (helperValidatorIdentifier.isKeyword(token.value) || helperValidatorIdentifier.isStrictReservedWord(token.value, true) || sometimesKeywords.has(token.value)) {
|
||||
return "keyword";
|
||||
}
|
||||
if (JSX_TAG.test(token.value) && (text[offset - 1] === "<" || text.slice(offset - 2, offset) === "</")) {
|
||||
return "jsxIdentifier";
|
||||
}
|
||||
if (token.value[0] !== token.value[0].toLowerCase()) {
|
||||
return "capitalized";
|
||||
}
|
||||
}
|
||||
if (token.type === "punctuator" && BRACKET.test(token.value)) {
|
||||
return "bracket";
|
||||
}
|
||||
if (token.type === "invalid" && (token.value === "@" || token.value === "#")) {
|
||||
return "punctuator";
|
||||
}
|
||||
return token.type;
|
||||
};
|
||||
tokenize = function* (text) {
|
||||
let match;
|
||||
while (match = jsTokens.default.exec(text)) {
|
||||
const token = jsTokens.matchToToken(match);
|
||||
yield {
|
||||
type: getTokenType(token, match.index, text),
|
||||
value: token.value
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
function highlight(text) {
|
||||
if (text === "") return "";
|
||||
const defs = getDefs(true);
|
||||
let highlighted = "";
|
||||
for (const {
|
||||
type,
|
||||
value
|
||||
} of tokenize(text)) {
|
||||
if (type in defs) {
|
||||
highlighted += value.split(NEWLINE$1).map(str => defs[type](str)).join("\n");
|
||||
} else {
|
||||
highlighted += value;
|
||||
}
|
||||
}
|
||||
return highlighted;
|
||||
}
|
||||
|
||||
let deprecationWarningShown = false;
|
||||
const NEWLINE = /\r\n|[\n\r\u2028\u2029]/;
|
||||
function getMarkerLines(loc, source, opts) {
|
||||
const startLoc = Object.assign({
|
||||
column: 0,
|
||||
line: -1
|
||||
}, loc.start);
|
||||
const endLoc = Object.assign({}, startLoc, loc.end);
|
||||
const {
|
||||
linesAbove = 2,
|
||||
linesBelow = 3
|
||||
} = opts || {};
|
||||
const startLine = startLoc.line;
|
||||
const startColumn = startLoc.column;
|
||||
const endLine = endLoc.line;
|
||||
const endColumn = endLoc.column;
|
||||
let start = Math.max(startLine - (linesAbove + 1), 0);
|
||||
let end = Math.min(source.length, endLine + linesBelow);
|
||||
if (startLine === -1) {
|
||||
start = 0;
|
||||
}
|
||||
if (endLine === -1) {
|
||||
end = source.length;
|
||||
}
|
||||
const lineDiff = endLine - startLine;
|
||||
const markerLines = {};
|
||||
if (lineDiff) {
|
||||
for (let i = 0; i <= lineDiff; i++) {
|
||||
const lineNumber = i + startLine;
|
||||
if (!startColumn) {
|
||||
markerLines[lineNumber] = true;
|
||||
} else if (i === 0) {
|
||||
const sourceLength = source[lineNumber - 1].length;
|
||||
markerLines[lineNumber] = [startColumn, sourceLength - startColumn + 1];
|
||||
} else if (i === lineDiff) {
|
||||
markerLines[lineNumber] = [0, endColumn];
|
||||
} else {
|
||||
const sourceLength = source[lineNumber - i].length;
|
||||
markerLines[lineNumber] = [0, sourceLength];
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (startColumn === endColumn) {
|
||||
if (startColumn) {
|
||||
markerLines[startLine] = [startColumn, 0];
|
||||
} else {
|
||||
markerLines[startLine] = true;
|
||||
}
|
||||
} else {
|
||||
markerLines[startLine] = [startColumn, endColumn - startColumn];
|
||||
}
|
||||
}
|
||||
return {
|
||||
start,
|
||||
end,
|
||||
markerLines
|
||||
};
|
||||
}
|
||||
function codeFrameColumns(rawLines, loc, opts = {}) {
|
||||
const shouldHighlight = opts.forceColor || isColorSupported() && opts.highlightCode;
|
||||
const defs = getDefs(shouldHighlight);
|
||||
const lines = rawLines.split(NEWLINE);
|
||||
const {
|
||||
start,
|
||||
end,
|
||||
markerLines
|
||||
} = getMarkerLines(loc, lines, opts);
|
||||
const hasColumns = loc.start && typeof loc.start.column === "number";
|
||||
const numberMaxWidth = String(end).length;
|
||||
const highlightedLines = shouldHighlight ? highlight(rawLines) : rawLines;
|
||||
let frame = highlightedLines.split(NEWLINE, end).slice(start, end).map((line, index) => {
|
||||
const number = start + 1 + index;
|
||||
const paddedNumber = ` ${number}`.slice(-numberMaxWidth);
|
||||
const gutter = ` ${paddedNumber} |`;
|
||||
const hasMarker = markerLines[number];
|
||||
const lastMarkerLine = !markerLines[number + 1];
|
||||
if (hasMarker) {
|
||||
let markerLine = "";
|
||||
if (Array.isArray(hasMarker)) {
|
||||
const markerSpacing = line.slice(0, Math.max(hasMarker[0] - 1, 0)).replace(/[^\t]/g, " ");
|
||||
const numberOfMarkers = hasMarker[1] || 1;
|
||||
markerLine = ["\n ", defs.gutter(gutter.replace(/\d/g, " ")), " ", markerSpacing, defs.marker("^").repeat(numberOfMarkers)].join("");
|
||||
if (lastMarkerLine && opts.message) {
|
||||
markerLine += " " + defs.message(opts.message);
|
||||
}
|
||||
}
|
||||
return [defs.marker(">"), defs.gutter(gutter), line.length > 0 ? ` ${line}` : "", markerLine].join("");
|
||||
} else {
|
||||
return ` ${defs.gutter(gutter)}${line.length > 0 ? ` ${line}` : ""}`;
|
||||
}
|
||||
}).join("\n");
|
||||
if (opts.message && !hasColumns) {
|
||||
frame = `${" ".repeat(numberMaxWidth + 1)}${opts.message}\n${frame}`;
|
||||
}
|
||||
if (shouldHighlight) {
|
||||
return defs.reset(frame);
|
||||
} else {
|
||||
return frame;
|
||||
}
|
||||
}
|
||||
function index (rawLines, lineNumber, colNumber, opts = {}) {
|
||||
if (!deprecationWarningShown) {
|
||||
deprecationWarningShown = true;
|
||||
const message = "Passing lineNumber and colNumber is deprecated to @babel/code-frame. Please use `codeFrameColumns`.";
|
||||
if (process.emitWarning) {
|
||||
process.emitWarning(message, "DeprecationWarning");
|
||||
} else {
|
||||
const deprecationError = new Error(message);
|
||||
deprecationError.name = "DeprecationWarning";
|
||||
console.warn(new Error(message));
|
||||
}
|
||||
}
|
||||
colNumber = Math.max(colNumber, 0);
|
||||
const location = {
|
||||
start: {
|
||||
column: colNumber,
|
||||
line: lineNumber
|
||||
}
|
||||
};
|
||||
return codeFrameColumns(rawLines, location, opts);
|
||||
}
|
||||
|
||||
exports.codeFrameColumns = codeFrameColumns;
|
||||
exports.default = index;
|
||||
exports.highlight = highlight;
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
node_modules/@babel/code-frame/lib/index.js.map
generated
vendored
Normal file
1
node_modules/@babel/code-frame/lib/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
35
node_modules/@babel/code-frame/package.json
generated
vendored
Normal file
35
node_modules/@babel/code-frame/package.json
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
{
|
||||
"name": "@babel/code-frame",
|
||||
"version": "7.26.2",
|
||||
"description": "Generate errors that contain a code frame that point to source locations.",
|
||||
"author": "The Babel Team (https://babel.dev/team)",
|
||||
"homepage": "https://babel.dev/docs/en/next/babel-code-frame",
|
||||
"bugs": "https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen",
|
||||
"license": "MIT",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/babel/babel.git",
|
||||
"directory": "packages/babel-code-frame"
|
||||
},
|
||||
"main": "./lib/index.js",
|
||||
"dependencies": {
|
||||
"@babel/helper-validator-identifier": "^7.25.9",
|
||||
"js-tokens": "^4.0.0",
|
||||
"picocolors": "^1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"import-meta-resolve": "^4.1.0",
|
||||
"strip-ansi": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
},
|
||||
"type": "commonjs"
|
||||
|
||||
,"_resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz"
|
||||
,"_integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ=="
|
||||
,"_from": "@babel/code-frame@^7.0.0"
|
||||
}
|
||||
22
node_modules/@babel/helper-validator-identifier/LICENSE
generated
vendored
Normal file
22
node_modules/@babel/helper-validator-identifier/LICENSE
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
19
node_modules/@babel/helper-validator-identifier/README.md
generated
vendored
Normal file
19
node_modules/@babel/helper-validator-identifier/README.md
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
# @babel/helper-validator-identifier
|
||||
|
||||
> Validate identifier/keywords name
|
||||
|
||||
See our website [@babel/helper-validator-identifier](https://babeljs.io/docs/babel-helper-validator-identifier) for more information.
|
||||
|
||||
## Install
|
||||
|
||||
Using npm:
|
||||
|
||||
```sh
|
||||
npm install --save @babel/helper-validator-identifier
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
|
||||
```sh
|
||||
yarn add @babel/helper-validator-identifier
|
||||
```
|
||||
70
node_modules/@babel/helper-validator-identifier/lib/identifier.js
generated
vendored
Normal file
70
node_modules/@babel/helper-validator-identifier/lib/identifier.js
generated
vendored
Normal file
@ -0,0 +1,70 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.isIdentifierChar = isIdentifierChar;
|
||||
exports.isIdentifierName = isIdentifierName;
|
||||
exports.isIdentifierStart = isIdentifierStart;
|
||||
let nonASCIIidentifierStartChars = "\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376\u0377\u037a-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u05d0-\u05ea\u05ef-\u05f2\u0620-\u064a\u066e\u066f\u0671-\u06d3\u06d5\u06e5\u06e6\u06ee\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u0860-\u086a\u0870-\u0887\u0889-\u088e\u08a0-\u08c9\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc\u09dd\u09df-\u09e1\u09f0\u09f1\u09fc\u0a05-\u0a0a\u0a0f\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0\u0ae1\u0af9\u0b05-\u0b0c\u0b0f\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32\u0b33\u0b35-\u0b39\u0b3d\u0b5c\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99\u0b9a\u0b9c\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c5d\u0c60\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cdd\u0cde\u0ce0\u0ce1\u0cf1\u0cf2\u0d04-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e33\u0e40-\u0e46\u0e81\u0e82\u0e84\u0e86-\u0e8a\u0e8c-\u0ea3\u0ea5\u0ea7-\u0eb0\u0eb2\u0eb3\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u1711\u171f-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1878\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4c\u1b83-\u1ba0\u1bae\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c8a\u1c90-\u1cba\u1cbd-\u1cbf\u1ce9-\u1cec\u1cee-\u1cf3\u1cf5\u1cf6\u1cfa\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2ce4\u2ceb-\u2cee\u2cf2\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309b-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31bf\u31f0-\u31ff\u3400-\u4dbf\u4e00-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7cd\ua7d0\ua7d1\ua7d3\ua7d5-\ua7dc\ua7f2-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab69\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40\ufb41\ufb43\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc";
|
||||
let nonASCIIidentifierChars = "\xb7\u0300-\u036f\u0387\u0483-\u0487\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u0669\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7\u06e8\u06ea-\u06ed\u06f0-\u06f9\u0711\u0730-\u074a\u07a6-\u07b0\u07c0-\u07c9\u07eb-\u07f3\u07fd\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u0897-\u089f\u08ca-\u08e1\u08e3-\u0903\u093a-\u093c\u093e-\u094f\u0951-\u0957\u0962\u0963\u0966-\u096f\u0981-\u0983\u09bc\u09be-\u09c4\u09c7\u09c8\u09cb-\u09cd\u09d7\u09e2\u09e3\u09e6-\u09ef\u09fe\u0a01-\u0a03\u0a3c\u0a3e-\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a66-\u0a71\u0a75\u0a81-\u0a83\u0abc\u0abe-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ae2\u0ae3\u0ae6-\u0aef\u0afa-\u0aff\u0b01-\u0b03\u0b3c\u0b3e-\u0b44\u0b47\u0b48\u0b4b-\u0b4d\u0b55-\u0b57\u0b62\u0b63\u0b66-\u0b6f\u0b82\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd7\u0be6-\u0bef\u0c00-\u0c04\u0c3c\u0c3e-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0c66-\u0c6f\u0c81-\u0c83\u0cbc\u0cbe-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0ce6-\u0cef\u0cf3\u0d00-\u0d03\u0d3b\u0d3c\u0d3e-\u0d44\u0d46-\u0d48\u0d4a-\u0d4d\u0d57\u0d62\u0d63\u0d66-\u0d6f\u0d81-\u0d83\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2\u0df3\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0e50-\u0e59\u0eb1\u0eb4-\u0ebc\u0ec8-\u0ece\u0ed0-\u0ed9\u0f18\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e\u0f3f\u0f71-\u0f84\u0f86\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102b-\u103e\u1040-\u1049\u1056-\u1059\u105e-\u1060\u1062-\u1064\u1067-\u106d\u1071-\u1074\u1082-\u108d\u108f-\u109d\u135d-\u135f\u1369-\u1371\u1712-\u1715\u1732-\u1734\u1752\u1753\u1772\u1773\u17b4-\u17d3\u17dd\u17e0-\u17e9\u180b-\u180d\u180f-\u1819\u18a9\u1920-\u192b\u1930-\u193b\u1946-\u194f\u19d0-\u19da\u1a17-\u1a1b\u1a55-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1ab0-\u1abd\u1abf-\u1ace\u1b00-\u1b04\u1b34-\u1b44\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1b82\u1ba1-\u1bad\u1bb0-\u1bb9\u1be6-\u1bf3\u1c24-\u1c37\u1c40-\u1c49\u1c50-\u1c59\u1cd0-\u1cd2\u1cd4-\u1ce8\u1ced\u1cf4\u1cf7-\u1cf9\u1dc0-\u1dff\u200c\u200d\u203f\u2040\u2054\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302f\u3099\u309a\u30fb\ua620-\ua629\ua66f\ua674-\ua67d\ua69e\ua69f\ua6f0\ua6f1\ua802\ua806\ua80b\ua823-\ua827\ua82c\ua880\ua881\ua8b4-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f1\ua8ff-\ua909\ua926-\ua92d\ua947-\ua953\ua980-\ua983\ua9b3-\ua9c0\ua9d0-\ua9d9\ua9e5\ua9f0-\ua9f9\uaa29-\uaa36\uaa43\uaa4c\uaa4d\uaa50-\uaa59\uaa7b-\uaa7d\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uaaeb-\uaaef\uaaf5\uaaf6\uabe3-\uabea\uabec\uabed\uabf0-\uabf9\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f\ufe33\ufe34\ufe4d-\ufe4f\uff10-\uff19\uff3f\uff65";
|
||||
const nonASCIIidentifierStart = new RegExp("[" + nonASCIIidentifierStartChars + "]");
|
||||
const nonASCIIidentifier = new RegExp("[" + nonASCIIidentifierStartChars + nonASCIIidentifierChars + "]");
|
||||
nonASCIIidentifierStartChars = nonASCIIidentifierChars = null;
|
||||
const astralIdentifierStartCodes = [0, 11, 2, 25, 2, 18, 2, 1, 2, 14, 3, 13, 35, 122, 70, 52, 268, 28, 4, 48, 48, 31, 14, 29, 6, 37, 11, 29, 3, 35, 5, 7, 2, 4, 43, 157, 19, 35, 5, 35, 5, 39, 9, 51, 13, 10, 2, 14, 2, 6, 2, 1, 2, 10, 2, 14, 2, 6, 2, 1, 4, 51, 13, 310, 10, 21, 11, 7, 25, 5, 2, 41, 2, 8, 70, 5, 3, 0, 2, 43, 2, 1, 4, 0, 3, 22, 11, 22, 10, 30, 66, 18, 2, 1, 11, 21, 11, 25, 71, 55, 7, 1, 65, 0, 16, 3, 2, 2, 2, 28, 43, 28, 4, 28, 36, 7, 2, 27, 28, 53, 11, 21, 11, 18, 14, 17, 111, 72, 56, 50, 14, 50, 14, 35, 39, 27, 10, 22, 251, 41, 7, 1, 17, 2, 60, 28, 11, 0, 9, 21, 43, 17, 47, 20, 28, 22, 13, 52, 58, 1, 3, 0, 14, 44, 33, 24, 27, 35, 30, 0, 3, 0, 9, 34, 4, 0, 13, 47, 15, 3, 22, 0, 2, 0, 36, 17, 2, 24, 20, 1, 64, 6, 2, 0, 2, 3, 2, 14, 2, 9, 8, 46, 39, 7, 3, 1, 3, 21, 2, 6, 2, 1, 2, 4, 4, 0, 19, 0, 13, 4, 31, 9, 2, 0, 3, 0, 2, 37, 2, 0, 26, 0, 2, 0, 45, 52, 19, 3, 21, 2, 31, 47, 21, 1, 2, 0, 185, 46, 42, 3, 37, 47, 21, 0, 60, 42, 14, 0, 72, 26, 38, 6, 186, 43, 117, 63, 32, 7, 3, 0, 3, 7, 2, 1, 2, 23, 16, 0, 2, 0, 95, 7, 3, 38, 17, 0, 2, 0, 29, 0, 11, 39, 8, 0, 22, 0, 12, 45, 20, 0, 19, 72, 200, 32, 32, 8, 2, 36, 18, 0, 50, 29, 113, 6, 2, 1, 2, 37, 22, 0, 26, 5, 2, 1, 2, 31, 15, 0, 328, 18, 16, 0, 2, 12, 2, 33, 125, 0, 80, 921, 103, 110, 18, 195, 2637, 96, 16, 1071, 18, 5, 26, 3994, 6, 582, 6842, 29, 1763, 568, 8, 30, 18, 78, 18, 29, 19, 47, 17, 3, 32, 20, 6, 18, 433, 44, 212, 63, 129, 74, 6, 0, 67, 12, 65, 1, 2, 0, 29, 6135, 9, 1237, 42, 9, 8936, 3, 2, 6, 2, 1, 2, 290, 16, 0, 30, 2, 3, 0, 15, 3, 9, 395, 2309, 106, 6, 12, 4, 8, 8, 9, 5991, 84, 2, 70, 2, 1, 3, 0, 3, 1, 3, 3, 2, 11, 2, 0, 2, 6, 2, 64, 2, 3, 3, 7, 2, 6, 2, 27, 2, 3, 2, 4, 2, 0, 4, 6, 2, 339, 3, 24, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 7, 1845, 30, 7, 5, 262, 61, 147, 44, 11, 6, 17, 0, 322, 29, 19, 43, 485, 27, 229, 29, 3, 0, 496, 6, 2, 3, 2, 1, 2, 14, 2, 196, 60, 67, 8, 0, 1205, 3, 2, 26, 2, 1, 2, 0, 3, 0, 2, 9, 2, 3, 2, 0, 2, 0, 7, 0, 5, 0, 2, 0, 2, 0, 2, 2, 2, 1, 2, 0, 3, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 1, 2, 0, 3, 3, 2, 6, 2, 3, 2, 3, 2, 0, 2, 9, 2, 16, 6, 2, 2, 4, 2, 16, 4421, 42719, 33, 4153, 7, 221, 3, 5761, 15, 7472, 16, 621, 2467, 541, 1507, 4938, 6, 4191];
|
||||
const astralIdentifierCodes = [509, 0, 227, 0, 150, 4, 294, 9, 1368, 2, 2, 1, 6, 3, 41, 2, 5, 0, 166, 1, 574, 3, 9, 9, 7, 9, 32, 4, 318, 1, 80, 3, 71, 10, 50, 3, 123, 2, 54, 14, 32, 10, 3, 1, 11, 3, 46, 10, 8, 0, 46, 9, 7, 2, 37, 13, 2, 9, 6, 1, 45, 0, 13, 2, 49, 13, 9, 3, 2, 11, 83, 11, 7, 0, 3, 0, 158, 11, 6, 9, 7, 3, 56, 1, 2, 6, 3, 1, 3, 2, 10, 0, 11, 1, 3, 6, 4, 4, 68, 8, 2, 0, 3, 0, 2, 3, 2, 4, 2, 0, 15, 1, 83, 17, 10, 9, 5, 0, 82, 19, 13, 9, 214, 6, 3, 8, 28, 1, 83, 16, 16, 9, 82, 12, 9, 9, 7, 19, 58, 14, 5, 9, 243, 14, 166, 9, 71, 5, 2, 1, 3, 3, 2, 0, 2, 1, 13, 9, 120, 6, 3, 6, 4, 0, 29, 9, 41, 6, 2, 3, 9, 0, 10, 10, 47, 15, 343, 9, 54, 7, 2, 7, 17, 9, 57, 21, 2, 13, 123, 5, 4, 0, 2, 1, 2, 6, 2, 0, 9, 9, 49, 4, 2, 1, 2, 4, 9, 9, 330, 3, 10, 1, 2, 0, 49, 6, 4, 4, 14, 10, 5350, 0, 7, 14, 11465, 27, 2343, 9, 87, 9, 39, 4, 60, 6, 26, 9, 535, 9, 470, 0, 2, 54, 8, 3, 82, 0, 12, 1, 19628, 1, 4178, 9, 519, 45, 3, 22, 543, 4, 4, 5, 9, 7, 3, 6, 31, 3, 149, 2, 1418, 49, 513, 54, 5, 49, 9, 0, 15, 0, 23, 4, 2, 14, 1361, 6, 2, 16, 3, 6, 2, 1, 2, 4, 101, 0, 161, 6, 10, 9, 357, 0, 62, 13, 499, 13, 245, 1, 2, 9, 726, 6, 110, 6, 6, 9, 4759, 9, 787719, 239];
|
||||
function isInAstralSet(code, set) {
|
||||
let pos = 0x10000;
|
||||
for (let i = 0, length = set.length; i < length; i += 2) {
|
||||
pos += set[i];
|
||||
if (pos > code) return false;
|
||||
pos += set[i + 1];
|
||||
if (pos >= code) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function isIdentifierStart(code) {
|
||||
if (code < 65) return code === 36;
|
||||
if (code <= 90) return true;
|
||||
if (code < 97) return code === 95;
|
||||
if (code <= 122) return true;
|
||||
if (code <= 0xffff) {
|
||||
return code >= 0xaa && nonASCIIidentifierStart.test(String.fromCharCode(code));
|
||||
}
|
||||
return isInAstralSet(code, astralIdentifierStartCodes);
|
||||
}
|
||||
function isIdentifierChar(code) {
|
||||
if (code < 48) return code === 36;
|
||||
if (code < 58) return true;
|
||||
if (code < 65) return false;
|
||||
if (code <= 90) return true;
|
||||
if (code < 97) return code === 95;
|
||||
if (code <= 122) return true;
|
||||
if (code <= 0xffff) {
|
||||
return code >= 0xaa && nonASCIIidentifier.test(String.fromCharCode(code));
|
||||
}
|
||||
return isInAstralSet(code, astralIdentifierStartCodes) || isInAstralSet(code, astralIdentifierCodes);
|
||||
}
|
||||
function isIdentifierName(name) {
|
||||
let isFirst = true;
|
||||
for (let i = 0; i < name.length; i++) {
|
||||
let cp = name.charCodeAt(i);
|
||||
if ((cp & 0xfc00) === 0xd800 && i + 1 < name.length) {
|
||||
const trail = name.charCodeAt(++i);
|
||||
if ((trail & 0xfc00) === 0xdc00) {
|
||||
cp = 0x10000 + ((cp & 0x3ff) << 10) + (trail & 0x3ff);
|
||||
}
|
||||
}
|
||||
if (isFirst) {
|
||||
isFirst = false;
|
||||
if (!isIdentifierStart(cp)) {
|
||||
return false;
|
||||
}
|
||||
} else if (!isIdentifierChar(cp)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return !isFirst;
|
||||
}
|
||||
|
||||
//# sourceMappingURL=identifier.js.map
|
||||
1
node_modules/@babel/helper-validator-identifier/lib/identifier.js.map
generated
vendored
Normal file
1
node_modules/@babel/helper-validator-identifier/lib/identifier.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
57
node_modules/@babel/helper-validator-identifier/lib/index.js
generated
vendored
Normal file
57
node_modules/@babel/helper-validator-identifier/lib/index.js
generated
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "isIdentifierChar", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _identifier.isIdentifierChar;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isIdentifierName", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _identifier.isIdentifierName;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isIdentifierStart", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _identifier.isIdentifierStart;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isKeyword", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _keyword.isKeyword;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isReservedWord", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _keyword.isReservedWord;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isStrictBindOnlyReservedWord", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _keyword.isStrictBindOnlyReservedWord;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isStrictBindReservedWord", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _keyword.isStrictBindReservedWord;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isStrictReservedWord", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _keyword.isStrictReservedWord;
|
||||
}
|
||||
});
|
||||
var _identifier = require("./identifier.js");
|
||||
var _keyword = require("./keyword.js");
|
||||
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
node_modules/@babel/helper-validator-identifier/lib/index.js.map
generated
vendored
Normal file
1
node_modules/@babel/helper-validator-identifier/lib/index.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"names":["_identifier","require","_keyword"],"sources":["../src/index.ts"],"sourcesContent":["export {\n isIdentifierName,\n isIdentifierChar,\n isIdentifierStart,\n} from \"./identifier.ts\";\nexport {\n isReservedWord,\n isStrictBindOnlyReservedWord,\n isStrictBindReservedWord,\n isStrictReservedWord,\n isKeyword,\n} from \"./keyword.ts\";\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,IAAAA,WAAA,GAAAC,OAAA;AAKA,IAAAC,QAAA,GAAAD,OAAA","ignoreList":[]}
|
||||
35
node_modules/@babel/helper-validator-identifier/lib/keyword.js
generated
vendored
Normal file
35
node_modules/@babel/helper-validator-identifier/lib/keyword.js
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.isKeyword = isKeyword;
|
||||
exports.isReservedWord = isReservedWord;
|
||||
exports.isStrictBindOnlyReservedWord = isStrictBindOnlyReservedWord;
|
||||
exports.isStrictBindReservedWord = isStrictBindReservedWord;
|
||||
exports.isStrictReservedWord = isStrictReservedWord;
|
||||
const reservedWords = {
|
||||
keyword: ["break", "case", "catch", "continue", "debugger", "default", "do", "else", "finally", "for", "function", "if", "return", "switch", "throw", "try", "var", "const", "while", "with", "new", "this", "super", "class", "extends", "export", "import", "null", "true", "false", "in", "instanceof", "typeof", "void", "delete"],
|
||||
strict: ["implements", "interface", "let", "package", "private", "protected", "public", "static", "yield"],
|
||||
strictBind: ["eval", "arguments"]
|
||||
};
|
||||
const keywords = new Set(reservedWords.keyword);
|
||||
const reservedWordsStrictSet = new Set(reservedWords.strict);
|
||||
const reservedWordsStrictBindSet = new Set(reservedWords.strictBind);
|
||||
function isReservedWord(word, inModule) {
|
||||
return inModule && word === "await" || word === "enum";
|
||||
}
|
||||
function isStrictReservedWord(word, inModule) {
|
||||
return isReservedWord(word, inModule) || reservedWordsStrictSet.has(word);
|
||||
}
|
||||
function isStrictBindOnlyReservedWord(word) {
|
||||
return reservedWordsStrictBindSet.has(word);
|
||||
}
|
||||
function isStrictBindReservedWord(word, inModule) {
|
||||
return isStrictReservedWord(word, inModule) || isStrictBindOnlyReservedWord(word);
|
||||
}
|
||||
function isKeyword(word) {
|
||||
return keywords.has(word);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=keyword.js.map
|
||||
1
node_modules/@babel/helper-validator-identifier/lib/keyword.js.map
generated
vendored
Normal file
1
node_modules/@babel/helper-validator-identifier/lib/keyword.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"names":["reservedWords","keyword","strict","strictBind","keywords","Set","reservedWordsStrictSet","reservedWordsStrictBindSet","isReservedWord","word","inModule","isStrictReservedWord","has","isStrictBindOnlyReservedWord","isStrictBindReservedWord","isKeyword"],"sources":["../src/keyword.ts"],"sourcesContent":["const reservedWords = {\n keyword: [\n \"break\",\n \"case\",\n \"catch\",\n \"continue\",\n \"debugger\",\n \"default\",\n \"do\",\n \"else\",\n \"finally\",\n \"for\",\n \"function\",\n \"if\",\n \"return\",\n \"switch\",\n \"throw\",\n \"try\",\n \"var\",\n \"const\",\n \"while\",\n \"with\",\n \"new\",\n \"this\",\n \"super\",\n \"class\",\n \"extends\",\n \"export\",\n \"import\",\n \"null\",\n \"true\",\n \"false\",\n \"in\",\n \"instanceof\",\n \"typeof\",\n \"void\",\n \"delete\",\n ],\n strict: [\n \"implements\",\n \"interface\",\n \"let\",\n \"package\",\n \"private\",\n \"protected\",\n \"public\",\n \"static\",\n \"yield\",\n ],\n strictBind: [\"eval\", \"arguments\"],\n};\nconst keywords = new Set(reservedWords.keyword);\nconst reservedWordsStrictSet = new Set(reservedWords.strict);\nconst reservedWordsStrictBindSet = new Set(reservedWords.strictBind);\n\n/**\n * Checks if word is a reserved word in non-strict mode\n */\nexport function isReservedWord(word: string, inModule: boolean): boolean {\n return (inModule && word === \"await\") || word === \"enum\";\n}\n\n/**\n * Checks if word is a reserved word in non-binding strict mode\n *\n * Includes non-strict reserved words\n */\nexport function isStrictReservedWord(word: string, inModule: boolean): boolean {\n return isReservedWord(word, inModule) || reservedWordsStrictSet.has(word);\n}\n\n/**\n * Checks if word is a reserved word in binding strict mode, but it is allowed as\n * a normal identifier.\n */\nexport function isStrictBindOnlyReservedWord(word: string): boolean {\n return reservedWordsStrictBindSet.has(word);\n}\n\n/**\n * Checks if word is a reserved word in binding strict mode\n *\n * Includes non-strict reserved words and non-binding strict reserved words\n */\nexport function isStrictBindReservedWord(\n word: string,\n inModule: boolean,\n): boolean {\n return (\n isStrictReservedWord(word, inModule) || isStrictBindOnlyReservedWord(word)\n );\n}\n\nexport function isKeyword(word: string): boolean {\n return keywords.has(word);\n}\n"],"mappings":";;;;;;;;;;AAAA,MAAMA,aAAa,GAAG;EACpBC,OAAO,EAAE,CACP,OAAO,EACP,MAAM,EACN,OAAO,EACP,UAAU,EACV,UAAU,EACV,SAAS,EACT,IAAI,EACJ,MAAM,EACN,SAAS,EACT,KAAK,EACL,UAAU,EACV,IAAI,EACJ,QAAQ,EACR,QAAQ,EACR,OAAO,EACP,KAAK,EACL,KAAK,EACL,OAAO,EACP,OAAO,EACP,MAAM,EACN,KAAK,EACL,MAAM,EACN,OAAO,EACP,OAAO,EACP,SAAS,EACT,QAAQ,EACR,QAAQ,EACR,MAAM,EACN,MAAM,EACN,OAAO,EACP,IAAI,EACJ,YAAY,EACZ,QAAQ,EACR,MAAM,EACN,QAAQ,CACT;EACDC,MAAM,EAAE,CACN,YAAY,EACZ,WAAW,EACX,KAAK,EACL,SAAS,EACT,SAAS,EACT,WAAW,EACX,QAAQ,EACR,QAAQ,EACR,OAAO,CACR;EACDC,UAAU,EAAE,CAAC,MAAM,EAAE,WAAW;AAClC,CAAC;AACD,MAAMC,QAAQ,GAAG,IAAIC,GAAG,CAACL,aAAa,CAACC,OAAO,CAAC;AAC/C,MAAMK,sBAAsB,GAAG,IAAID,GAAG,CAACL,aAAa,CAACE,MAAM,CAAC;AAC5D,MAAMK,0BAA0B,GAAG,IAAIF,GAAG,CAACL,aAAa,CAACG,UAAU,CAAC;AAK7D,SAASK,cAAcA,CAACC,IAAY,EAAEC,QAAiB,EAAW;EACvE,OAAQA,QAAQ,IAAID,IAAI,KAAK,OAAO,IAAKA,IAAI,KAAK,MAAM;AAC1D;AAOO,SAASE,oBAAoBA,CAACF,IAAY,EAAEC,QAAiB,EAAW;EAC7E,OAAOF,cAAc,CAACC,IAAI,EAAEC,QAAQ,CAAC,IAAIJ,sBAAsB,CAACM,GAAG,CAACH,IAAI,CAAC;AAC3E;AAMO,SAASI,4BAA4BA,CAACJ,IAAY,EAAW;EAClE,OAAOF,0BAA0B,CAACK,GAAG,CAACH,IAAI,CAAC;AAC7C;AAOO,SAASK,wBAAwBA,CACtCL,IAAY,EACZC,QAAiB,EACR;EACT,OACEC,oBAAoB,CAACF,IAAI,EAAEC,QAAQ,CAAC,IAAIG,4BAA4B,CAACJ,IAAI,CAAC;AAE9E;AAEO,SAASM,SAASA,CAACN,IAAY,EAAW;EAC/C,OAAOL,QAAQ,CAACQ,GAAG,CAACH,IAAI,CAAC;AAC3B","ignoreList":[]}
|
||||
35
node_modules/@babel/helper-validator-identifier/package.json
generated
vendored
Normal file
35
node_modules/@babel/helper-validator-identifier/package.json
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
{
|
||||
"name": "@babel/helper-validator-identifier",
|
||||
"version": "7.25.9",
|
||||
"description": "Validate identifier/keywords name",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/babel/babel.git",
|
||||
"directory": "packages/babel-helper-validator-identifier"
|
||||
},
|
||||
"license": "MIT",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"main": "./lib/index.js",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./lib/index.d.ts",
|
||||
"default": "./lib/index.js"
|
||||
},
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@unicode/unicode-16.0.0": "^1.0.0",
|
||||
"charcodes": "^0.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
},
|
||||
"author": "The Babel Team (https://babel.dev/team)",
|
||||
"type": "commonjs"
|
||||
|
||||
,"_resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz"
|
||||
,"_integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ=="
|
||||
,"_from": "@babel/helper-validator-identifier@^7.25.9"
|
||||
}
|
||||
21
node_modules/@discoveryjs/json-ext/LICENSE
generated
vendored
Normal file
21
node_modules/@discoveryjs/json-ext/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2020 Roman Dvornov <rdvornov@gmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
256
node_modules/@discoveryjs/json-ext/README.md
generated
vendored
Normal file
256
node_modules/@discoveryjs/json-ext/README.md
generated
vendored
Normal file
@ -0,0 +1,256 @@
|
||||
# json-ext
|
||||
|
||||
[](https://www.npmjs.com/package/@discoveryjs/json-ext)
|
||||
[](https://github.com/discoveryjs/json-ext/actions/workflows/ci.yml)
|
||||
[](https://coveralls.io/github/discoveryjs/json-ext?)
|
||||
[](https://www.npmjs.com/package/@discoveryjs/json-ext)
|
||||
|
||||
A set of utilities that extend the use of JSON. Designed to be fast and memory efficient
|
||||
|
||||
Features:
|
||||
|
||||
- [x] `parseChunked()` – Parse JSON that comes by chunks (e.g. FS readable stream or fetch response stream)
|
||||
- [x] `stringifyStream()` – Stringify stream (Node.js)
|
||||
- [x] `stringifyInfo()` – Get estimated size and other facts of JSON.stringify() without converting a value to string
|
||||
- [ ] **TBD** Support for circular references
|
||||
- [ ] **TBD** Binary representation [branch](https://github.com/discoveryjs/json-ext/tree/binary)
|
||||
- [ ] **TBD** WHATWG [Streams](https://streams.spec.whatwg.org/) support
|
||||
|
||||
## Install
|
||||
|
||||
```bash
|
||||
npm install @discoveryjs/json-ext
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
- [parseChunked(chunkEmitter)](#parsechunkedchunkemitter)
|
||||
- [stringifyStream(value[, replacer[, space]])](#stringifystreamvalue-replacer-space)
|
||||
- [stringifyInfo(value[, replacer[, space[, options]]])](#stringifyinfovalue-replacer-space-options)
|
||||
- [Options](#options)
|
||||
- [async](#async)
|
||||
- [continueOnCircular](#continueoncircular)
|
||||
- [version](#version)
|
||||
|
||||
### parseChunked(chunkEmitter)
|
||||
|
||||
Works the same as [`JSON.parse()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse) but takes `chunkEmitter` instead of string and returns [Promise](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise).
|
||||
|
||||
> NOTE: `reviver` parameter is not supported yet, but will be added in next releases.
|
||||
> NOTE: WHATWG streams aren't supported yet
|
||||
|
||||
When to use:
|
||||
- It's required to avoid freezing the main thread during big JSON parsing, since this process can be distributed in time
|
||||
- Huge JSON needs to be parsed (e.g. >500MB on Node.js)
|
||||
- Needed to reduce memory pressure. `JSON.parse()` needs to receive the entire JSON before parsing it. With `parseChunked()` you may parse JSON as first bytes of it comes. This approach helps to avoid storing a huge string in the memory at a single time point and following GC.
|
||||
|
||||
[Benchmark](https://github.com/discoveryjs/json-ext/tree/master/benchmarks#parse-chunked)
|
||||
|
||||
Usage:
|
||||
|
||||
```js
|
||||
const { parseChunked } = require('@discoveryjs/json-ext');
|
||||
|
||||
// as a regular Promise
|
||||
parseChunked(chunkEmitter)
|
||||
.then(data => {
|
||||
/* data is parsed JSON */
|
||||
});
|
||||
|
||||
// using await (keep in mind that not every runtime has a support for top level await)
|
||||
const data = await parseChunked(chunkEmitter);
|
||||
```
|
||||
|
||||
Parameter `chunkEmitter` can be:
|
||||
- [`ReadableStream`](https://nodejs.org/dist/latest-v14.x/docs/api/stream.html#stream_readable_streams) (Node.js only)
|
||||
```js
|
||||
const fs = require('fs');
|
||||
const { parseChunked } = require('@discoveryjs/json-ext');
|
||||
|
||||
parseChunked(fs.createReadStream('path/to/file.json'))
|
||||
```
|
||||
- Generator, async generator or function that returns iterable (chunks). Chunk might be a `string`, `Uint8Array` or `Buffer` (Node.js only):
|
||||
```js
|
||||
const { parseChunked } = require('@discoveryjs/json-ext');
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
// generator
|
||||
parseChunked(function*() {
|
||||
yield '{ "hello":';
|
||||
yield Buffer.from(' "wor'); // Node.js only
|
||||
yield encoder.encode('ld" }'); // returns Uint8Array(5) [ 108, 100, 34, 32, 125 ]
|
||||
});
|
||||
|
||||
// async generator
|
||||
parseChunked(async function*() {
|
||||
for await (const chunk of someAsyncSource) {
|
||||
yield chunk;
|
||||
}
|
||||
});
|
||||
|
||||
// function that returns iterable
|
||||
parseChunked(() => ['{ "hello":', ' "world"}'])
|
||||
```
|
||||
|
||||
Using with [fetch()](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API):
|
||||
|
||||
```js
|
||||
async function loadData(url) {
|
||||
const response = await fetch(url);
|
||||
const reader = response.body.getReader();
|
||||
|
||||
return parseChunked(async function*() {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
|
||||
if (done) {
|
||||
break;
|
||||
}
|
||||
|
||||
yield value;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
loadData('https://example.com/data.json')
|
||||
.then(data => {
|
||||
/* data is parsed JSON */
|
||||
})
|
||||
```
|
||||
|
||||
### stringifyStream(value[, replacer[, space]])
|
||||
|
||||
Works the same as [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify), but returns an instance of [`ReadableStream`](https://nodejs.org/dist/latest-v14.x/docs/api/stream.html#stream_readable_streams) instead of string.
|
||||
|
||||
> NOTE: WHATWG Streams aren't supported yet, so function available for Node.js only for now
|
||||
|
||||
Departs from JSON.stringify():
|
||||
- Outputs `null` when `JSON.stringify()` returns `undefined` (since streams may not emit `undefined`)
|
||||
- A promise is resolving and the resulting value is stringifying as a regular one
|
||||
- A stream in non-object mode is piping to output as is
|
||||
- A stream in object mode is piping to output as an array of objects
|
||||
|
||||
When to use:
|
||||
- Huge JSON needs to be generated (e.g. >500MB on Node.js)
|
||||
- Needed to reduce memory pressure. `JSON.stringify()` needs to generate the entire JSON before send or write it to somewhere. With `stringifyStream()` you may send a result to somewhere as first bytes of the result appears. This approach helps to avoid storing a huge string in the memory at a single time point.
|
||||
- The object being serialized contains Promises or Streams (see Usage for examples)
|
||||
|
||||
[Benchmark](https://github.com/discoveryjs/json-ext/tree/master/benchmarks#stream-stringifying)
|
||||
|
||||
Usage:
|
||||
|
||||
```js
|
||||
const { stringifyStream } = require('@discoveryjs/json-ext');
|
||||
|
||||
// handle events
|
||||
stringifyStream(data)
|
||||
.on('data', chunk => console.log(chunk))
|
||||
.on('error', error => consold.error(error))
|
||||
.on('finish', () => console.log('DONE!'));
|
||||
|
||||
// pipe into a stream
|
||||
stringifyStream(data)
|
||||
.pipe(writableStream);
|
||||
```
|
||||
|
||||
Using Promise or ReadableStream in serializing object:
|
||||
|
||||
```js
|
||||
const fs = require('fs');
|
||||
const { stringifyStream } = require('@discoveryjs/json-ext');
|
||||
|
||||
// output will be
|
||||
// {"name":"example","willSerializeResolvedValue":42,"fromFile":[1, 2, 3],"at":{"any":{"level":"promise!"}}}
|
||||
stringifyStream({
|
||||
name: 'example',
|
||||
willSerializeResolvedValue: Promise.resolve(42),
|
||||
fromFile: fs.createReadStream('path/to/file.json'), // support file content is "[1, 2, 3]", it'll be inserted as it
|
||||
at: {
|
||||
any: {
|
||||
level: new Promise(resolve => setTimeout(() => resolve('promise!'), 100))
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// in case several async requests are used in object, it's prefered
|
||||
// to put fastest requests first, because in this case
|
||||
stringifyStream({
|
||||
foo: fetch('http://example.com/request_takes_2s').then(req => req.json()),
|
||||
bar: fetch('http://example.com/request_takes_5s').then(req => req.json())
|
||||
});
|
||||
```
|
||||
|
||||
Using with [`WritableStream`](https://nodejs.org/dist/latest-v14.x/docs/api/stream.html#stream_writable_streams) (Node.js only):
|
||||
|
||||
```js
|
||||
const fs = require('fs');
|
||||
const { stringifyStream } = require('@discoveryjs/json-ext');
|
||||
|
||||
// pipe into a console
|
||||
stringifyStream(data)
|
||||
.pipe(process.stdout);
|
||||
|
||||
// pipe into a file
|
||||
stringifyStream(data)
|
||||
.pipe(fs.createWriteStream('path/to/file.json'));
|
||||
|
||||
// wrapping into a Promise
|
||||
new Promise((resolve, reject) => {
|
||||
stringifyStream(data)
|
||||
.on('error', reject)
|
||||
.pipe(stream)
|
||||
.on('error', reject)
|
||||
.on('finish', resolve);
|
||||
});
|
||||
```
|
||||
|
||||
### stringifyInfo(value[, replacer[, space[, options]]])
|
||||
|
||||
`value`, `replacer` and `space` arguments are the same as for `JSON.stringify()`.
|
||||
|
||||
Result is an object:
|
||||
|
||||
```js
|
||||
{
|
||||
minLength: Number, // minimal bytes when values is stringified
|
||||
circular: [...], // list of circular references
|
||||
duplicate: [...], // list of objects that occur more than once
|
||||
async: [...] // list of async values, i.e. promises and streams
|
||||
}
|
||||
```
|
||||
|
||||
Example:
|
||||
|
||||
```js
|
||||
const { stringifyInfo } = require('@discoveryjs/json-ext');
|
||||
|
||||
console.log(
|
||||
stringifyInfo({ test: true }).minLength
|
||||
);
|
||||
// > 13
|
||||
// that equals '{"test":true}'.length
|
||||
```
|
||||
|
||||
#### Options
|
||||
|
||||
##### async
|
||||
|
||||
Type: `Boolean`
|
||||
Default: `false`
|
||||
|
||||
Collect async values (promises and streams) or not.
|
||||
|
||||
##### continueOnCircular
|
||||
|
||||
Type: `Boolean`
|
||||
Default: `false`
|
||||
|
||||
Stop collecting info for a value or not whenever circular reference is found. Setting option to `true` allows to find all circular references.
|
||||
|
||||
### version
|
||||
|
||||
The version of library, e.g. `"0.3.1"`.
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
791
node_modules/@discoveryjs/json-ext/dist/json-ext.js
generated
vendored
Normal file
791
node_modules/@discoveryjs/json-ext/dist/json-ext.js
generated
vendored
Normal file
@ -0,0 +1,791 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
|
||||
typeof define === 'function' && define.amd ? define(factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.jsonExt = factory());
|
||||
})(this, (function () { 'use strict';
|
||||
|
||||
var version = "0.5.7";
|
||||
|
||||
const PrimitiveType = 1;
|
||||
const ObjectType = 2;
|
||||
const ArrayType = 3;
|
||||
const PromiseType = 4;
|
||||
const ReadableStringType = 5;
|
||||
const ReadableObjectType = 6;
|
||||
// https://tc39.es/ecma262/#table-json-single-character-escapes
|
||||
const escapableCharCodeSubstitution$1 = { // JSON Single Character Escape Sequences
|
||||
0x08: '\\b',
|
||||
0x09: '\\t',
|
||||
0x0a: '\\n',
|
||||
0x0c: '\\f',
|
||||
0x0d: '\\r',
|
||||
0x22: '\\\"',
|
||||
0x5c: '\\\\'
|
||||
};
|
||||
|
||||
function isLeadingSurrogate$1(code) {
|
||||
return code >= 0xD800 && code <= 0xDBFF;
|
||||
}
|
||||
|
||||
function isTrailingSurrogate$1(code) {
|
||||
return code >= 0xDC00 && code <= 0xDFFF;
|
||||
}
|
||||
|
||||
function isReadableStream$1(value) {
|
||||
return (
|
||||
typeof value.pipe === 'function' &&
|
||||
typeof value._read === 'function' &&
|
||||
typeof value._readableState === 'object' && value._readableState !== null
|
||||
);
|
||||
}
|
||||
|
||||
function replaceValue$1(holder, key, value, replacer) {
|
||||
if (value && typeof value.toJSON === 'function') {
|
||||
value = value.toJSON();
|
||||
}
|
||||
|
||||
if (replacer !== null) {
|
||||
value = replacer.call(holder, String(key), value);
|
||||
}
|
||||
|
||||
switch (typeof value) {
|
||||
case 'function':
|
||||
case 'symbol':
|
||||
value = undefined;
|
||||
break;
|
||||
|
||||
case 'object':
|
||||
if (value !== null) {
|
||||
const cls = value.constructor;
|
||||
if (cls === String || cls === Number || cls === Boolean) {
|
||||
value = value.valueOf();
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function getTypeNative$1(value) {
|
||||
if (value === null || typeof value !== 'object') {
|
||||
return PrimitiveType;
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
return ArrayType;
|
||||
}
|
||||
|
||||
return ObjectType;
|
||||
}
|
||||
|
||||
function getTypeAsync$1(value) {
|
||||
if (value === null || typeof value !== 'object') {
|
||||
return PrimitiveType;
|
||||
}
|
||||
|
||||
if (typeof value.then === 'function') {
|
||||
return PromiseType;
|
||||
}
|
||||
|
||||
if (isReadableStream$1(value)) {
|
||||
return value._readableState.objectMode ? ReadableObjectType : ReadableStringType;
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
return ArrayType;
|
||||
}
|
||||
|
||||
return ObjectType;
|
||||
}
|
||||
|
||||
function normalizeReplacer$1(replacer) {
|
||||
if (typeof replacer === 'function') {
|
||||
return replacer;
|
||||
}
|
||||
|
||||
if (Array.isArray(replacer)) {
|
||||
const allowlist = new Set(replacer
|
||||
.map(item => {
|
||||
const cls = item && item.constructor;
|
||||
return cls === String || cls === Number ? String(item) : null;
|
||||
})
|
||||
.filter(item => typeof item === 'string')
|
||||
);
|
||||
|
||||
return [...allowlist];
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function normalizeSpace$1(space) {
|
||||
if (typeof space === 'number') {
|
||||
if (!Number.isFinite(space) || space < 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return ' '.repeat(Math.min(space, 10));
|
||||
}
|
||||
|
||||
if (typeof space === 'string') {
|
||||
return space.slice(0, 10) || false;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
var utils = {
|
||||
escapableCharCodeSubstitution: escapableCharCodeSubstitution$1,
|
||||
isLeadingSurrogate: isLeadingSurrogate$1,
|
||||
isTrailingSurrogate: isTrailingSurrogate$1,
|
||||
type: {
|
||||
PRIMITIVE: PrimitiveType,
|
||||
PROMISE: PromiseType,
|
||||
ARRAY: ArrayType,
|
||||
OBJECT: ObjectType,
|
||||
STRING_STREAM: ReadableStringType,
|
||||
OBJECT_STREAM: ReadableObjectType
|
||||
},
|
||||
|
||||
isReadableStream: isReadableStream$1,
|
||||
replaceValue: replaceValue$1,
|
||||
getTypeNative: getTypeNative$1,
|
||||
getTypeAsync: getTypeAsync$1,
|
||||
normalizeReplacer: normalizeReplacer$1,
|
||||
normalizeSpace: normalizeSpace$1
|
||||
};
|
||||
|
||||
const {
|
||||
normalizeReplacer,
|
||||
normalizeSpace,
|
||||
replaceValue,
|
||||
getTypeNative,
|
||||
getTypeAsync,
|
||||
isLeadingSurrogate,
|
||||
isTrailingSurrogate,
|
||||
escapableCharCodeSubstitution,
|
||||
type: {
|
||||
PRIMITIVE,
|
||||
OBJECT,
|
||||
ARRAY,
|
||||
PROMISE,
|
||||
STRING_STREAM,
|
||||
OBJECT_STREAM
|
||||
}
|
||||
} = utils;
|
||||
const charLength2048 = Array.from({ length: 2048 }).map((_, code) => {
|
||||
if (escapableCharCodeSubstitution.hasOwnProperty(code)) {
|
||||
return 2; // \X
|
||||
}
|
||||
|
||||
if (code < 0x20) {
|
||||
return 6; // \uXXXX
|
||||
}
|
||||
|
||||
return code < 128 ? 1 : 2; // UTF8 bytes
|
||||
});
|
||||
|
||||
function stringLength(str) {
|
||||
let len = 0;
|
||||
let prevLeadingSurrogate = false;
|
||||
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
const code = str.charCodeAt(i);
|
||||
|
||||
if (code < 2048) {
|
||||
len += charLength2048[code];
|
||||
} else if (isLeadingSurrogate(code)) {
|
||||
len += 6; // \uXXXX since no pair with trailing surrogate yet
|
||||
prevLeadingSurrogate = true;
|
||||
continue;
|
||||
} else if (isTrailingSurrogate(code)) {
|
||||
len = prevLeadingSurrogate
|
||||
? len - 2 // surrogate pair (4 bytes), since we calculate prev leading surrogate as 6 bytes, substruct 2 bytes
|
||||
: len + 6; // \uXXXX
|
||||
} else {
|
||||
len += 3; // code >= 2048 is 3 bytes length for UTF8
|
||||
}
|
||||
|
||||
prevLeadingSurrogate = false;
|
||||
}
|
||||
|
||||
return len + 2; // +2 for quotes
|
||||
}
|
||||
|
||||
function primitiveLength(value) {
|
||||
switch (typeof value) {
|
||||
case 'string':
|
||||
return stringLength(value);
|
||||
|
||||
case 'number':
|
||||
return Number.isFinite(value) ? String(value).length : 4 /* null */;
|
||||
|
||||
case 'boolean':
|
||||
return value ? 4 /* true */ : 5 /* false */;
|
||||
|
||||
case 'undefined':
|
||||
case 'object':
|
||||
return 4; /* null */
|
||||
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
function spaceLength(space) {
|
||||
space = normalizeSpace(space);
|
||||
return typeof space === 'string' ? space.length : 0;
|
||||
}
|
||||
|
||||
var stringifyInfo = function jsonStringifyInfo(value, replacer, space, options) {
|
||||
function walk(holder, key, value) {
|
||||
if (stop) {
|
||||
return;
|
||||
}
|
||||
|
||||
value = replaceValue(holder, key, value, replacer);
|
||||
|
||||
let type = getType(value);
|
||||
|
||||
// check for circular structure
|
||||
if (type !== PRIMITIVE && stack.has(value)) {
|
||||
circular.add(value);
|
||||
length += 4; // treat as null
|
||||
|
||||
if (!options.continueOnCircular) {
|
||||
stop = true;
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
switch (type) {
|
||||
case PRIMITIVE:
|
||||
if (value !== undefined || Array.isArray(holder)) {
|
||||
length += primitiveLength(value);
|
||||
} else if (holder === root) {
|
||||
length += 9; // FIXME: that's the length of undefined, should we normalize behaviour to convert it to null?
|
||||
}
|
||||
break;
|
||||
|
||||
case OBJECT: {
|
||||
if (visited.has(value)) {
|
||||
duplicate.add(value);
|
||||
length += visited.get(value);
|
||||
break;
|
||||
}
|
||||
|
||||
const valueLength = length;
|
||||
let entries = 0;
|
||||
|
||||
length += 2; // {}
|
||||
|
||||
stack.add(value);
|
||||
|
||||
for (const key in value) {
|
||||
if (hasOwnProperty.call(value, key) && (allowlist === null || allowlist.has(key))) {
|
||||
const prevLength = length;
|
||||
walk(value, key, value[key]);
|
||||
|
||||
if (prevLength !== length) {
|
||||
// value is printed
|
||||
length += stringLength(key) + 1; // "key":
|
||||
entries++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (entries > 1) {
|
||||
length += entries - 1; // commas
|
||||
}
|
||||
|
||||
stack.delete(value);
|
||||
|
||||
if (space > 0 && entries > 0) {
|
||||
length += (1 + (stack.size + 1) * space + 1) * entries; // for each key-value: \n{space}
|
||||
length += 1 + stack.size * space; // for }
|
||||
}
|
||||
|
||||
visited.set(value, length - valueLength);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case ARRAY: {
|
||||
if (visited.has(value)) {
|
||||
duplicate.add(value);
|
||||
length += visited.get(value);
|
||||
break;
|
||||
}
|
||||
|
||||
const valueLength = length;
|
||||
|
||||
length += 2; // []
|
||||
|
||||
stack.add(value);
|
||||
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
walk(value, i, value[i]);
|
||||
}
|
||||
|
||||
if (value.length > 1) {
|
||||
length += value.length - 1; // commas
|
||||
}
|
||||
|
||||
stack.delete(value);
|
||||
|
||||
if (space > 0 && value.length > 0) {
|
||||
length += (1 + (stack.size + 1) * space) * value.length; // for each element: \n{space}
|
||||
length += 1 + stack.size * space; // for ]
|
||||
}
|
||||
|
||||
visited.set(value, length - valueLength);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case PROMISE:
|
||||
case STRING_STREAM:
|
||||
async.add(value);
|
||||
break;
|
||||
|
||||
case OBJECT_STREAM:
|
||||
length += 2; // []
|
||||
async.add(value);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let allowlist = null;
|
||||
replacer = normalizeReplacer(replacer);
|
||||
|
||||
if (Array.isArray(replacer)) {
|
||||
allowlist = new Set(replacer);
|
||||
replacer = null;
|
||||
}
|
||||
|
||||
space = spaceLength(space);
|
||||
options = options || {};
|
||||
|
||||
const visited = new Map();
|
||||
const stack = new Set();
|
||||
const duplicate = new Set();
|
||||
const circular = new Set();
|
||||
const async = new Set();
|
||||
const getType = options.async ? getTypeAsync : getTypeNative;
|
||||
const root = { '': value };
|
||||
let stop = false;
|
||||
let length = 0;
|
||||
|
||||
walk(root, '', value);
|
||||
|
||||
return {
|
||||
minLength: isNaN(length) ? Infinity : length,
|
||||
circular: [...circular],
|
||||
duplicate: [...duplicate],
|
||||
async: [...async]
|
||||
};
|
||||
};
|
||||
|
||||
var stringifyStreamBrowser = () => {
|
||||
throw new Error('Method is not supported');
|
||||
};
|
||||
|
||||
var textDecoderBrowser = TextDecoder;
|
||||
|
||||
const { isReadableStream } = utils;
|
||||
|
||||
|
||||
const STACK_OBJECT = 1;
|
||||
const STACK_ARRAY = 2;
|
||||
const decoder = new textDecoderBrowser();
|
||||
|
||||
function isObject(value) {
|
||||
return value !== null && typeof value === 'object';
|
||||
}
|
||||
|
||||
function adjustPosition(error, parser) {
|
||||
if (error.name === 'SyntaxError' && parser.jsonParseOffset) {
|
||||
error.message = error.message.replace(/at position (\d+)/, (_, pos) =>
|
||||
'at position ' + (Number(pos) + parser.jsonParseOffset)
|
||||
);
|
||||
}
|
||||
|
||||
return error;
|
||||
}
|
||||
|
||||
function append(array, elements) {
|
||||
// Note: Avoid to use array.push(...elements) since it may lead to
|
||||
// "RangeError: Maximum call stack size exceeded" for a long arrays
|
||||
const initialLength = array.length;
|
||||
array.length += elements.length;
|
||||
|
||||
for (let i = 0; i < elements.length; i++) {
|
||||
array[initialLength + i] = elements[i];
|
||||
}
|
||||
}
|
||||
|
||||
var parseChunked = function(chunkEmitter) {
|
||||
let parser = new ChunkParser();
|
||||
|
||||
if (isObject(chunkEmitter) && isReadableStream(chunkEmitter)) {
|
||||
return new Promise((resolve, reject) => {
|
||||
chunkEmitter
|
||||
.on('data', chunk => {
|
||||
try {
|
||||
parser.push(chunk);
|
||||
} catch (e) {
|
||||
reject(adjustPosition(e, parser));
|
||||
parser = null;
|
||||
}
|
||||
})
|
||||
.on('error', (e) => {
|
||||
parser = null;
|
||||
reject(e);
|
||||
})
|
||||
.on('end', () => {
|
||||
try {
|
||||
resolve(parser.finish());
|
||||
} catch (e) {
|
||||
reject(adjustPosition(e, parser));
|
||||
} finally {
|
||||
parser = null;
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof chunkEmitter === 'function') {
|
||||
const iterator = chunkEmitter();
|
||||
|
||||
if (isObject(iterator) && (Symbol.iterator in iterator || Symbol.asyncIterator in iterator)) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
for await (const chunk of iterator) {
|
||||
parser.push(chunk);
|
||||
}
|
||||
|
||||
resolve(parser.finish());
|
||||
} catch (e) {
|
||||
reject(adjustPosition(e, parser));
|
||||
} finally {
|
||||
parser = null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
'Chunk emitter should be readable stream, generator, ' +
|
||||
'async generator or function returning an iterable object'
|
||||
);
|
||||
};
|
||||
|
||||
class ChunkParser {
|
||||
constructor() {
|
||||
this.value = undefined;
|
||||
this.valueStack = null;
|
||||
|
||||
this.stack = new Array(100);
|
||||
this.lastFlushDepth = 0;
|
||||
this.flushDepth = 0;
|
||||
this.stateString = false;
|
||||
this.stateStringEscape = false;
|
||||
this.pendingByteSeq = null;
|
||||
this.pendingChunk = null;
|
||||
this.chunkOffset = 0;
|
||||
this.jsonParseOffset = 0;
|
||||
}
|
||||
|
||||
parseAndAppend(fragment, wrap) {
|
||||
// Append new entries or elements
|
||||
if (this.stack[this.lastFlushDepth - 1] === STACK_OBJECT) {
|
||||
if (wrap) {
|
||||
this.jsonParseOffset--;
|
||||
fragment = '{' + fragment + '}';
|
||||
}
|
||||
|
||||
Object.assign(this.valueStack.value, JSON.parse(fragment));
|
||||
} else {
|
||||
if (wrap) {
|
||||
this.jsonParseOffset--;
|
||||
fragment = '[' + fragment + ']';
|
||||
}
|
||||
|
||||
append(this.valueStack.value, JSON.parse(fragment));
|
||||
}
|
||||
}
|
||||
|
||||
prepareAddition(fragment) {
|
||||
const { value } = this.valueStack;
|
||||
const expectComma = Array.isArray(value)
|
||||
? value.length !== 0
|
||||
: Object.keys(value).length !== 0;
|
||||
|
||||
if (expectComma) {
|
||||
// Skip a comma at the beginning of fragment, otherwise it would
|
||||
// fail to parse
|
||||
if (fragment[0] === ',') {
|
||||
this.jsonParseOffset++;
|
||||
return fragment.slice(1);
|
||||
}
|
||||
|
||||
// When value (an object or array) is not empty and a fragment
|
||||
// doesn't start with a comma, a single valid fragment starting
|
||||
// is a closing bracket. If it's not, a prefix is adding to fail
|
||||
// parsing. Otherwise, the sequence of chunks can be successfully
|
||||
// parsed, although it should not, e.g. ["[{}", "{}]"]
|
||||
if (fragment[0] !== '}' && fragment[0] !== ']') {
|
||||
this.jsonParseOffset -= 3;
|
||||
return '[[]' + fragment;
|
||||
}
|
||||
}
|
||||
|
||||
return fragment;
|
||||
}
|
||||
|
||||
flush(chunk, start, end) {
|
||||
let fragment = chunk.slice(start, end);
|
||||
|
||||
// Save position correction an error in JSON.parse() if any
|
||||
this.jsonParseOffset = this.chunkOffset + start;
|
||||
|
||||
// Prepend pending chunk if any
|
||||
if (this.pendingChunk !== null) {
|
||||
fragment = this.pendingChunk + fragment;
|
||||
this.jsonParseOffset -= this.pendingChunk.length;
|
||||
this.pendingChunk = null;
|
||||
}
|
||||
|
||||
if (this.flushDepth === this.lastFlushDepth) {
|
||||
// Depth didn't changed, so it's a root value or entry/element set
|
||||
if (this.flushDepth > 0) {
|
||||
this.parseAndAppend(this.prepareAddition(fragment), true);
|
||||
} else {
|
||||
// That's an entire value on a top level
|
||||
this.value = JSON.parse(fragment);
|
||||
this.valueStack = {
|
||||
value: this.value,
|
||||
prev: null
|
||||
};
|
||||
}
|
||||
} else if (this.flushDepth > this.lastFlushDepth) {
|
||||
// Add missed closing brackets/parentheses
|
||||
for (let i = this.flushDepth - 1; i >= this.lastFlushDepth; i--) {
|
||||
fragment += this.stack[i] === STACK_OBJECT ? '}' : ']';
|
||||
}
|
||||
|
||||
if (this.lastFlushDepth === 0) {
|
||||
// That's a root value
|
||||
this.value = JSON.parse(fragment);
|
||||
this.valueStack = {
|
||||
value: this.value,
|
||||
prev: null
|
||||
};
|
||||
} else {
|
||||
this.parseAndAppend(this.prepareAddition(fragment), true);
|
||||
}
|
||||
|
||||
// Move down to the depths to the last object/array, which is current now
|
||||
for (let i = this.lastFlushDepth || 1; i < this.flushDepth; i++) {
|
||||
let value = this.valueStack.value;
|
||||
|
||||
if (this.stack[i - 1] === STACK_OBJECT) {
|
||||
// find last entry
|
||||
let key;
|
||||
// eslint-disable-next-line curly
|
||||
for (key in value);
|
||||
value = value[key];
|
||||
} else {
|
||||
// last element
|
||||
value = value[value.length - 1];
|
||||
}
|
||||
|
||||
this.valueStack = {
|
||||
value,
|
||||
prev: this.valueStack
|
||||
};
|
||||
}
|
||||
} else /* this.flushDepth < this.lastFlushDepth */ {
|
||||
fragment = this.prepareAddition(fragment);
|
||||
|
||||
// Add missed opening brackets/parentheses
|
||||
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
|
||||
this.jsonParseOffset--;
|
||||
fragment = (this.stack[i] === STACK_OBJECT ? '{' : '[') + fragment;
|
||||
}
|
||||
|
||||
this.parseAndAppend(fragment, false);
|
||||
|
||||
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
|
||||
this.valueStack = this.valueStack.prev;
|
||||
}
|
||||
}
|
||||
|
||||
this.lastFlushDepth = this.flushDepth;
|
||||
}
|
||||
|
||||
push(chunk) {
|
||||
if (typeof chunk !== 'string') {
|
||||
// Suppose chunk is Buffer or Uint8Array
|
||||
|
||||
// Prepend uncompleted byte sequence if any
|
||||
if (this.pendingByteSeq !== null) {
|
||||
const origRawChunk = chunk;
|
||||
chunk = new Uint8Array(this.pendingByteSeq.length + origRawChunk.length);
|
||||
chunk.set(this.pendingByteSeq);
|
||||
chunk.set(origRawChunk, this.pendingByteSeq.length);
|
||||
this.pendingByteSeq = null;
|
||||
}
|
||||
|
||||
// In case Buffer/Uint8Array, an input is encoded in UTF8
|
||||
// Seek for parts of uncompleted UTF8 symbol on the ending
|
||||
// This makes sense only if we expect more chunks and last char is not multi-bytes
|
||||
if (chunk[chunk.length - 1] > 127) {
|
||||
for (let seqLength = 0; seqLength < chunk.length; seqLength++) {
|
||||
const byte = chunk[chunk.length - 1 - seqLength];
|
||||
|
||||
// 10xxxxxx - 2nd, 3rd or 4th byte
|
||||
// 110xxxxx – first byte of 2-byte sequence
|
||||
// 1110xxxx - first byte of 3-byte sequence
|
||||
// 11110xxx - first byte of 4-byte sequence
|
||||
if (byte >> 6 === 3) {
|
||||
seqLength++;
|
||||
|
||||
// If the sequence is really incomplete, then preserve it
|
||||
// for the future chunk and cut off it from the current chunk
|
||||
if ((seqLength !== 4 && byte >> 3 === 0b11110) ||
|
||||
(seqLength !== 3 && byte >> 4 === 0b1110) ||
|
||||
(seqLength !== 2 && byte >> 5 === 0b110)) {
|
||||
this.pendingByteSeq = chunk.slice(chunk.length - seqLength);
|
||||
chunk = chunk.slice(0, -seqLength);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Convert chunk to a string, since single decode per chunk
|
||||
// is much effective than decode multiple small substrings
|
||||
chunk = decoder.decode(chunk);
|
||||
}
|
||||
|
||||
const chunkLength = chunk.length;
|
||||
let lastFlushPoint = 0;
|
||||
let flushPoint = 0;
|
||||
|
||||
// Main scan loop
|
||||
scan: for (let i = 0; i < chunkLength; i++) {
|
||||
if (this.stateString) {
|
||||
for (; i < chunkLength; i++) {
|
||||
if (this.stateStringEscape) {
|
||||
this.stateStringEscape = false;
|
||||
} else {
|
||||
switch (chunk.charCodeAt(i)) {
|
||||
case 0x22: /* " */
|
||||
this.stateString = false;
|
||||
continue scan;
|
||||
|
||||
case 0x5C: /* \ */
|
||||
this.stateStringEscape = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
switch (chunk.charCodeAt(i)) {
|
||||
case 0x22: /* " */
|
||||
this.stateString = true;
|
||||
this.stateStringEscape = false;
|
||||
break;
|
||||
|
||||
case 0x2C: /* , */
|
||||
flushPoint = i;
|
||||
break;
|
||||
|
||||
case 0x7B: /* { */
|
||||
// Open an object
|
||||
flushPoint = i + 1;
|
||||
this.stack[this.flushDepth++] = STACK_OBJECT;
|
||||
break;
|
||||
|
||||
case 0x5B: /* [ */
|
||||
// Open an array
|
||||
flushPoint = i + 1;
|
||||
this.stack[this.flushDepth++] = STACK_ARRAY;
|
||||
break;
|
||||
|
||||
case 0x5D: /* ] */
|
||||
case 0x7D: /* } */
|
||||
// Close an object or array
|
||||
flushPoint = i + 1;
|
||||
this.flushDepth--;
|
||||
|
||||
if (this.flushDepth < this.lastFlushDepth) {
|
||||
this.flush(chunk, lastFlushPoint, flushPoint);
|
||||
lastFlushPoint = flushPoint;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case 0x09: /* \t */
|
||||
case 0x0A: /* \n */
|
||||
case 0x0D: /* \r */
|
||||
case 0x20: /* space */
|
||||
// Move points forward when they points on current position and it's a whitespace
|
||||
if (lastFlushPoint === i) {
|
||||
lastFlushPoint++;
|
||||
}
|
||||
|
||||
if (flushPoint === i) {
|
||||
flushPoint++;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (flushPoint > lastFlushPoint) {
|
||||
this.flush(chunk, lastFlushPoint, flushPoint);
|
||||
}
|
||||
|
||||
// Produce pendingChunk if something left
|
||||
if (flushPoint < chunkLength) {
|
||||
if (this.pendingChunk !== null) {
|
||||
// When there is already a pending chunk then no flush happened,
|
||||
// appending entire chunk to pending one
|
||||
this.pendingChunk += chunk;
|
||||
} else {
|
||||
// Create a pending chunk, it will start with non-whitespace since
|
||||
// flushPoint was moved forward away from whitespaces on scan
|
||||
this.pendingChunk = chunk.slice(flushPoint, chunkLength);
|
||||
}
|
||||
}
|
||||
|
||||
this.chunkOffset += chunkLength;
|
||||
}
|
||||
|
||||
finish() {
|
||||
if (this.pendingChunk !== null) {
|
||||
this.flush('', 0, 0);
|
||||
this.pendingChunk = null;
|
||||
}
|
||||
|
||||
return this.value;
|
||||
}
|
||||
}
|
||||
|
||||
var src = {
|
||||
version: version,
|
||||
stringifyInfo: stringifyInfo,
|
||||
stringifyStream: stringifyStreamBrowser,
|
||||
parseChunked: parseChunked
|
||||
};
|
||||
|
||||
return src;
|
||||
|
||||
}));
|
||||
1
node_modules/@discoveryjs/json-ext/dist/json-ext.min.js
generated
vendored
Normal file
1
node_modules/@discoveryjs/json-ext/dist/json-ext.min.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
node_modules/@discoveryjs/json-ext/dist/version.js
generated
vendored
Normal file
1
node_modules/@discoveryjs/json-ext/dist/version.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = "0.5.7";
|
||||
31
node_modules/@discoveryjs/json-ext/index.d.ts
generated
vendored
Normal file
31
node_modules/@discoveryjs/json-ext/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
declare module '@discoveryjs/json-ext' {
|
||||
import { Readable } from 'stream';
|
||||
|
||||
type TReplacer =
|
||||
| ((this: any, key: string, value: any) => any)
|
||||
| string[]
|
||||
| number[]
|
||||
| null;
|
||||
type TSpace = string | number | null;
|
||||
type TChunk = string | Buffer | Uint8Array;
|
||||
|
||||
export function parseChunked(input: Readable): Promise<any>;
|
||||
export function parseChunked(input: () => (Iterable<TChunk> | AsyncIterable<TChunk>)): Promise<any>;
|
||||
|
||||
export function stringifyStream(value: any, replacer?: TReplacer, space?: TSpace): Readable;
|
||||
|
||||
export function stringifyInfo(
|
||||
value: any,
|
||||
replacer?: TReplacer,
|
||||
space?: TSpace,
|
||||
options?: {
|
||||
async?: boolean;
|
||||
continueOnCircular?: boolean;
|
||||
}
|
||||
): {
|
||||
minLength: number;
|
||||
circular: any[];
|
||||
duplicate: any[];
|
||||
async: any[];
|
||||
};
|
||||
}
|
||||
60
node_modules/@discoveryjs/json-ext/package.json
generated
vendored
Normal file
60
node_modules/@discoveryjs/json-ext/package.json
generated
vendored
Normal file
@ -0,0 +1,60 @@
|
||||
{
|
||||
"name": "@discoveryjs/json-ext",
|
||||
"version": "0.5.7",
|
||||
"description": "A set of utilities that extend the use of JSON",
|
||||
"keywords": [
|
||||
"json",
|
||||
"utils",
|
||||
"stream",
|
||||
"async",
|
||||
"promise",
|
||||
"stringify",
|
||||
"info"
|
||||
],
|
||||
"author": "Roman Dvornov <rdvornov@gmail.com> (https://github.com/lahmatiy)",
|
||||
"license": "MIT",
|
||||
"repository": "discoveryjs/json-ext",
|
||||
"main": "./src/index",
|
||||
"browser": {
|
||||
"./src/stringify-stream.js": "./src/stringify-stream-browser.js",
|
||||
"./src/text-decoder.js": "./src/text-decoder-browser.js",
|
||||
"./src/version.js": "./dist/version.js"
|
||||
},
|
||||
"types": "./index.d.ts",
|
||||
"scripts": {
|
||||
"test": "mocha --reporter progress",
|
||||
"lint": "eslint src test",
|
||||
"lint-and-test": "npm run lint && npm test",
|
||||
"build": "rollup --config",
|
||||
"test:all": "npm run test:src && npm run test:dist",
|
||||
"test:src": "npm test",
|
||||
"test:dist": "cross-env MODE=dist npm test && cross-env MODE=dist-min npm test",
|
||||
"build-and-test": "npm run build && npm run test:dist",
|
||||
"coverage": "c8 --reporter=lcovonly npm test",
|
||||
"prepublishOnly": "npm run lint && npm test && npm run build-and-test"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-commonjs": "^15.1.0",
|
||||
"@rollup/plugin-json": "^4.1.0",
|
||||
"@rollup/plugin-node-resolve": "^9.0.0",
|
||||
"c8": "^7.10.0",
|
||||
"chalk": "^4.1.0",
|
||||
"cross-env": "^7.0.3",
|
||||
"eslint": "^8.10.0",
|
||||
"mocha": "^8.4.0",
|
||||
"rollup": "^2.28.2",
|
||||
"rollup-plugin-terser": "^7.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
},
|
||||
"files": [
|
||||
"dist",
|
||||
"src",
|
||||
"index.d.ts"
|
||||
]
|
||||
|
||||
,"_resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz"
|
||||
,"_integrity": "sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw=="
|
||||
,"_from": "@discoveryjs/json-ext@^0.5.0"
|
||||
}
|
||||
6
node_modules/@discoveryjs/json-ext/src/index.js
generated
vendored
Normal file
6
node_modules/@discoveryjs/json-ext/src/index.js
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
module.exports = {
|
||||
version: require('./version'),
|
||||
stringifyInfo: require('./stringify-info'),
|
||||
stringifyStream: require('./stringify-stream'),
|
||||
parseChunked: require('./parse-chunked')
|
||||
};
|
||||
384
node_modules/@discoveryjs/json-ext/src/parse-chunked.js
generated
vendored
Normal file
384
node_modules/@discoveryjs/json-ext/src/parse-chunked.js
generated
vendored
Normal file
@ -0,0 +1,384 @@
|
||||
const { isReadableStream } = require('./utils');
|
||||
const TextDecoder = require('./text-decoder');
|
||||
|
||||
const STACK_OBJECT = 1;
|
||||
const STACK_ARRAY = 2;
|
||||
const decoder = new TextDecoder();
|
||||
|
||||
function isObject(value) {
|
||||
return value !== null && typeof value === 'object';
|
||||
}
|
||||
|
||||
function adjustPosition(error, parser) {
|
||||
if (error.name === 'SyntaxError' && parser.jsonParseOffset) {
|
||||
error.message = error.message.replace(/at position (\d+)/, (_, pos) =>
|
||||
'at position ' + (Number(pos) + parser.jsonParseOffset)
|
||||
);
|
||||
}
|
||||
|
||||
return error;
|
||||
}
|
||||
|
||||
function append(array, elements) {
|
||||
// Note: Avoid to use array.push(...elements) since it may lead to
|
||||
// "RangeError: Maximum call stack size exceeded" for a long arrays
|
||||
const initialLength = array.length;
|
||||
array.length += elements.length;
|
||||
|
||||
for (let i = 0; i < elements.length; i++) {
|
||||
array[initialLength + i] = elements[i];
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function(chunkEmitter) {
|
||||
let parser = new ChunkParser();
|
||||
|
||||
if (isObject(chunkEmitter) && isReadableStream(chunkEmitter)) {
|
||||
return new Promise((resolve, reject) => {
|
||||
chunkEmitter
|
||||
.on('data', chunk => {
|
||||
try {
|
||||
parser.push(chunk);
|
||||
} catch (e) {
|
||||
reject(adjustPosition(e, parser));
|
||||
parser = null;
|
||||
}
|
||||
})
|
||||
.on('error', (e) => {
|
||||
parser = null;
|
||||
reject(e);
|
||||
})
|
||||
.on('end', () => {
|
||||
try {
|
||||
resolve(parser.finish());
|
||||
} catch (e) {
|
||||
reject(adjustPosition(e, parser));
|
||||
} finally {
|
||||
parser = null;
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof chunkEmitter === 'function') {
|
||||
const iterator = chunkEmitter();
|
||||
|
||||
if (isObject(iterator) && (Symbol.iterator in iterator || Symbol.asyncIterator in iterator)) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
for await (const chunk of iterator) {
|
||||
parser.push(chunk);
|
||||
}
|
||||
|
||||
resolve(parser.finish());
|
||||
} catch (e) {
|
||||
reject(adjustPosition(e, parser));
|
||||
} finally {
|
||||
parser = null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
'Chunk emitter should be readable stream, generator, ' +
|
||||
'async generator or function returning an iterable object'
|
||||
);
|
||||
};
|
||||
|
||||
class ChunkParser {
|
||||
constructor() {
|
||||
this.value = undefined;
|
||||
this.valueStack = null;
|
||||
|
||||
this.stack = new Array(100);
|
||||
this.lastFlushDepth = 0;
|
||||
this.flushDepth = 0;
|
||||
this.stateString = false;
|
||||
this.stateStringEscape = false;
|
||||
this.pendingByteSeq = null;
|
||||
this.pendingChunk = null;
|
||||
this.chunkOffset = 0;
|
||||
this.jsonParseOffset = 0;
|
||||
}
|
||||
|
||||
parseAndAppend(fragment, wrap) {
|
||||
// Append new entries or elements
|
||||
if (this.stack[this.lastFlushDepth - 1] === STACK_OBJECT) {
|
||||
if (wrap) {
|
||||
this.jsonParseOffset--;
|
||||
fragment = '{' + fragment + '}';
|
||||
}
|
||||
|
||||
Object.assign(this.valueStack.value, JSON.parse(fragment));
|
||||
} else {
|
||||
if (wrap) {
|
||||
this.jsonParseOffset--;
|
||||
fragment = '[' + fragment + ']';
|
||||
}
|
||||
|
||||
append(this.valueStack.value, JSON.parse(fragment));
|
||||
}
|
||||
}
|
||||
|
||||
prepareAddition(fragment) {
|
||||
const { value } = this.valueStack;
|
||||
const expectComma = Array.isArray(value)
|
||||
? value.length !== 0
|
||||
: Object.keys(value).length !== 0;
|
||||
|
||||
if (expectComma) {
|
||||
// Skip a comma at the beginning of fragment, otherwise it would
|
||||
// fail to parse
|
||||
if (fragment[0] === ',') {
|
||||
this.jsonParseOffset++;
|
||||
return fragment.slice(1);
|
||||
}
|
||||
|
||||
// When value (an object or array) is not empty and a fragment
|
||||
// doesn't start with a comma, a single valid fragment starting
|
||||
// is a closing bracket. If it's not, a prefix is adding to fail
|
||||
// parsing. Otherwise, the sequence of chunks can be successfully
|
||||
// parsed, although it should not, e.g. ["[{}", "{}]"]
|
||||
if (fragment[0] !== '}' && fragment[0] !== ']') {
|
||||
this.jsonParseOffset -= 3;
|
||||
return '[[]' + fragment;
|
||||
}
|
||||
}
|
||||
|
||||
return fragment;
|
||||
}
|
||||
|
||||
flush(chunk, start, end) {
|
||||
let fragment = chunk.slice(start, end);
|
||||
|
||||
// Save position correction an error in JSON.parse() if any
|
||||
this.jsonParseOffset = this.chunkOffset + start;
|
||||
|
||||
// Prepend pending chunk if any
|
||||
if (this.pendingChunk !== null) {
|
||||
fragment = this.pendingChunk + fragment;
|
||||
this.jsonParseOffset -= this.pendingChunk.length;
|
||||
this.pendingChunk = null;
|
||||
}
|
||||
|
||||
if (this.flushDepth === this.lastFlushDepth) {
|
||||
// Depth didn't changed, so it's a root value or entry/element set
|
||||
if (this.flushDepth > 0) {
|
||||
this.parseAndAppend(this.prepareAddition(fragment), true);
|
||||
} else {
|
||||
// That's an entire value on a top level
|
||||
this.value = JSON.parse(fragment);
|
||||
this.valueStack = {
|
||||
value: this.value,
|
||||
prev: null
|
||||
};
|
||||
}
|
||||
} else if (this.flushDepth > this.lastFlushDepth) {
|
||||
// Add missed closing brackets/parentheses
|
||||
for (let i = this.flushDepth - 1; i >= this.lastFlushDepth; i--) {
|
||||
fragment += this.stack[i] === STACK_OBJECT ? '}' : ']';
|
||||
}
|
||||
|
||||
if (this.lastFlushDepth === 0) {
|
||||
// That's a root value
|
||||
this.value = JSON.parse(fragment);
|
||||
this.valueStack = {
|
||||
value: this.value,
|
||||
prev: null
|
||||
};
|
||||
} else {
|
||||
this.parseAndAppend(this.prepareAddition(fragment), true);
|
||||
}
|
||||
|
||||
// Move down to the depths to the last object/array, which is current now
|
||||
for (let i = this.lastFlushDepth || 1; i < this.flushDepth; i++) {
|
||||
let value = this.valueStack.value;
|
||||
|
||||
if (this.stack[i - 1] === STACK_OBJECT) {
|
||||
// find last entry
|
||||
let key;
|
||||
// eslint-disable-next-line curly
|
||||
for (key in value);
|
||||
value = value[key];
|
||||
} else {
|
||||
// last element
|
||||
value = value[value.length - 1];
|
||||
}
|
||||
|
||||
this.valueStack = {
|
||||
value,
|
||||
prev: this.valueStack
|
||||
};
|
||||
}
|
||||
} else /* this.flushDepth < this.lastFlushDepth */ {
|
||||
fragment = this.prepareAddition(fragment);
|
||||
|
||||
// Add missed opening brackets/parentheses
|
||||
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
|
||||
this.jsonParseOffset--;
|
||||
fragment = (this.stack[i] === STACK_OBJECT ? '{' : '[') + fragment;
|
||||
}
|
||||
|
||||
this.parseAndAppend(fragment, false);
|
||||
|
||||
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
|
||||
this.valueStack = this.valueStack.prev;
|
||||
}
|
||||
}
|
||||
|
||||
this.lastFlushDepth = this.flushDepth;
|
||||
}
|
||||
|
||||
push(chunk) {
|
||||
if (typeof chunk !== 'string') {
|
||||
// Suppose chunk is Buffer or Uint8Array
|
||||
|
||||
// Prepend uncompleted byte sequence if any
|
||||
if (this.pendingByteSeq !== null) {
|
||||
const origRawChunk = chunk;
|
||||
chunk = new Uint8Array(this.pendingByteSeq.length + origRawChunk.length);
|
||||
chunk.set(this.pendingByteSeq);
|
||||
chunk.set(origRawChunk, this.pendingByteSeq.length);
|
||||
this.pendingByteSeq = null;
|
||||
}
|
||||
|
||||
// In case Buffer/Uint8Array, an input is encoded in UTF8
|
||||
// Seek for parts of uncompleted UTF8 symbol on the ending
|
||||
// This makes sense only if we expect more chunks and last char is not multi-bytes
|
||||
if (chunk[chunk.length - 1] > 127) {
|
||||
for (let seqLength = 0; seqLength < chunk.length; seqLength++) {
|
||||
const byte = chunk[chunk.length - 1 - seqLength];
|
||||
|
||||
// 10xxxxxx - 2nd, 3rd or 4th byte
|
||||
// 110xxxxx – first byte of 2-byte sequence
|
||||
// 1110xxxx - first byte of 3-byte sequence
|
||||
// 11110xxx - first byte of 4-byte sequence
|
||||
if (byte >> 6 === 3) {
|
||||
seqLength++;
|
||||
|
||||
// If the sequence is really incomplete, then preserve it
|
||||
// for the future chunk and cut off it from the current chunk
|
||||
if ((seqLength !== 4 && byte >> 3 === 0b11110) ||
|
||||
(seqLength !== 3 && byte >> 4 === 0b1110) ||
|
||||
(seqLength !== 2 && byte >> 5 === 0b110)) {
|
||||
this.pendingByteSeq = chunk.slice(chunk.length - seqLength);
|
||||
chunk = chunk.slice(0, -seqLength);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Convert chunk to a string, since single decode per chunk
|
||||
// is much effective than decode multiple small substrings
|
||||
chunk = decoder.decode(chunk);
|
||||
}
|
||||
|
||||
const chunkLength = chunk.length;
|
||||
let lastFlushPoint = 0;
|
||||
let flushPoint = 0;
|
||||
|
||||
// Main scan loop
|
||||
scan: for (let i = 0; i < chunkLength; i++) {
|
||||
if (this.stateString) {
|
||||
for (; i < chunkLength; i++) {
|
||||
if (this.stateStringEscape) {
|
||||
this.stateStringEscape = false;
|
||||
} else {
|
||||
switch (chunk.charCodeAt(i)) {
|
||||
case 0x22: /* " */
|
||||
this.stateString = false;
|
||||
continue scan;
|
||||
|
||||
case 0x5C: /* \ */
|
||||
this.stateStringEscape = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
switch (chunk.charCodeAt(i)) {
|
||||
case 0x22: /* " */
|
||||
this.stateString = true;
|
||||
this.stateStringEscape = false;
|
||||
break;
|
||||
|
||||
case 0x2C: /* , */
|
||||
flushPoint = i;
|
||||
break;
|
||||
|
||||
case 0x7B: /* { */
|
||||
// Open an object
|
||||
flushPoint = i + 1;
|
||||
this.stack[this.flushDepth++] = STACK_OBJECT;
|
||||
break;
|
||||
|
||||
case 0x5B: /* [ */
|
||||
// Open an array
|
||||
flushPoint = i + 1;
|
||||
this.stack[this.flushDepth++] = STACK_ARRAY;
|
||||
break;
|
||||
|
||||
case 0x5D: /* ] */
|
||||
case 0x7D: /* } */
|
||||
// Close an object or array
|
||||
flushPoint = i + 1;
|
||||
this.flushDepth--;
|
||||
|
||||
if (this.flushDepth < this.lastFlushDepth) {
|
||||
this.flush(chunk, lastFlushPoint, flushPoint);
|
||||
lastFlushPoint = flushPoint;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case 0x09: /* \t */
|
||||
case 0x0A: /* \n */
|
||||
case 0x0D: /* \r */
|
||||
case 0x20: /* space */
|
||||
// Move points forward when they points on current position and it's a whitespace
|
||||
if (lastFlushPoint === i) {
|
||||
lastFlushPoint++;
|
||||
}
|
||||
|
||||
if (flushPoint === i) {
|
||||
flushPoint++;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (flushPoint > lastFlushPoint) {
|
||||
this.flush(chunk, lastFlushPoint, flushPoint);
|
||||
}
|
||||
|
||||
// Produce pendingChunk if something left
|
||||
if (flushPoint < chunkLength) {
|
||||
if (this.pendingChunk !== null) {
|
||||
// When there is already a pending chunk then no flush happened,
|
||||
// appending entire chunk to pending one
|
||||
this.pendingChunk += chunk;
|
||||
} else {
|
||||
// Create a pending chunk, it will start with non-whitespace since
|
||||
// flushPoint was moved forward away from whitespaces on scan
|
||||
this.pendingChunk = chunk.slice(flushPoint, chunkLength);
|
||||
}
|
||||
}
|
||||
|
||||
this.chunkOffset += chunkLength;
|
||||
}
|
||||
|
||||
finish() {
|
||||
if (this.pendingChunk !== null) {
|
||||
this.flush('', 0, 0);
|
||||
this.pendingChunk = null;
|
||||
}
|
||||
|
||||
return this.value;
|
||||
}
|
||||
};
|
||||
231
node_modules/@discoveryjs/json-ext/src/stringify-info.js
generated
vendored
Normal file
231
node_modules/@discoveryjs/json-ext/src/stringify-info.js
generated
vendored
Normal file
@ -0,0 +1,231 @@
|
||||
const {
|
||||
normalizeReplacer,
|
||||
normalizeSpace,
|
||||
replaceValue,
|
||||
getTypeNative,
|
||||
getTypeAsync,
|
||||
isLeadingSurrogate,
|
||||
isTrailingSurrogate,
|
||||
escapableCharCodeSubstitution,
|
||||
type: {
|
||||
PRIMITIVE,
|
||||
OBJECT,
|
||||
ARRAY,
|
||||
PROMISE,
|
||||
STRING_STREAM,
|
||||
OBJECT_STREAM
|
||||
}
|
||||
} = require('./utils');
|
||||
const charLength2048 = Array.from({ length: 2048 }).map((_, code) => {
|
||||
if (escapableCharCodeSubstitution.hasOwnProperty(code)) {
|
||||
return 2; // \X
|
||||
}
|
||||
|
||||
if (code < 0x20) {
|
||||
return 6; // \uXXXX
|
||||
}
|
||||
|
||||
return code < 128 ? 1 : 2; // UTF8 bytes
|
||||
});
|
||||
|
||||
function stringLength(str) {
|
||||
let len = 0;
|
||||
let prevLeadingSurrogate = false;
|
||||
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
const code = str.charCodeAt(i);
|
||||
|
||||
if (code < 2048) {
|
||||
len += charLength2048[code];
|
||||
} else if (isLeadingSurrogate(code)) {
|
||||
len += 6; // \uXXXX since no pair with trailing surrogate yet
|
||||
prevLeadingSurrogate = true;
|
||||
continue;
|
||||
} else if (isTrailingSurrogate(code)) {
|
||||
len = prevLeadingSurrogate
|
||||
? len - 2 // surrogate pair (4 bytes), since we calculate prev leading surrogate as 6 bytes, substruct 2 bytes
|
||||
: len + 6; // \uXXXX
|
||||
} else {
|
||||
len += 3; // code >= 2048 is 3 bytes length for UTF8
|
||||
}
|
||||
|
||||
prevLeadingSurrogate = false;
|
||||
}
|
||||
|
||||
return len + 2; // +2 for quotes
|
||||
}
|
||||
|
||||
function primitiveLength(value) {
|
||||
switch (typeof value) {
|
||||
case 'string':
|
||||
return stringLength(value);
|
||||
|
||||
case 'number':
|
||||
return Number.isFinite(value) ? String(value).length : 4 /* null */;
|
||||
|
||||
case 'boolean':
|
||||
return value ? 4 /* true */ : 5 /* false */;
|
||||
|
||||
case 'undefined':
|
||||
case 'object':
|
||||
return 4; /* null */
|
||||
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
function spaceLength(space) {
|
||||
space = normalizeSpace(space);
|
||||
return typeof space === 'string' ? space.length : 0;
|
||||
}
|
||||
|
||||
module.exports = function jsonStringifyInfo(value, replacer, space, options) {
|
||||
function walk(holder, key, value) {
|
||||
if (stop) {
|
||||
return;
|
||||
}
|
||||
|
||||
value = replaceValue(holder, key, value, replacer);
|
||||
|
||||
let type = getType(value);
|
||||
|
||||
// check for circular structure
|
||||
if (type !== PRIMITIVE && stack.has(value)) {
|
||||
circular.add(value);
|
||||
length += 4; // treat as null
|
||||
|
||||
if (!options.continueOnCircular) {
|
||||
stop = true;
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
switch (type) {
|
||||
case PRIMITIVE:
|
||||
if (value !== undefined || Array.isArray(holder)) {
|
||||
length += primitiveLength(value);
|
||||
} else if (holder === root) {
|
||||
length += 9; // FIXME: that's the length of undefined, should we normalize behaviour to convert it to null?
|
||||
}
|
||||
break;
|
||||
|
||||
case OBJECT: {
|
||||
if (visited.has(value)) {
|
||||
duplicate.add(value);
|
||||
length += visited.get(value);
|
||||
break;
|
||||
}
|
||||
|
||||
const valueLength = length;
|
||||
let entries = 0;
|
||||
|
||||
length += 2; // {}
|
||||
|
||||
stack.add(value);
|
||||
|
||||
for (const key in value) {
|
||||
if (hasOwnProperty.call(value, key) && (allowlist === null || allowlist.has(key))) {
|
||||
const prevLength = length;
|
||||
walk(value, key, value[key]);
|
||||
|
||||
if (prevLength !== length) {
|
||||
// value is printed
|
||||
length += stringLength(key) + 1; // "key":
|
||||
entries++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (entries > 1) {
|
||||
length += entries - 1; // commas
|
||||
}
|
||||
|
||||
stack.delete(value);
|
||||
|
||||
if (space > 0 && entries > 0) {
|
||||
length += (1 + (stack.size + 1) * space + 1) * entries; // for each key-value: \n{space}
|
||||
length += 1 + stack.size * space; // for }
|
||||
}
|
||||
|
||||
visited.set(value, length - valueLength);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case ARRAY: {
|
||||
if (visited.has(value)) {
|
||||
duplicate.add(value);
|
||||
length += visited.get(value);
|
||||
break;
|
||||
}
|
||||
|
||||
const valueLength = length;
|
||||
|
||||
length += 2; // []
|
||||
|
||||
stack.add(value);
|
||||
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
walk(value, i, value[i]);
|
||||
}
|
||||
|
||||
if (value.length > 1) {
|
||||
length += value.length - 1; // commas
|
||||
}
|
||||
|
||||
stack.delete(value);
|
||||
|
||||
if (space > 0 && value.length > 0) {
|
||||
length += (1 + (stack.size + 1) * space) * value.length; // for each element: \n{space}
|
||||
length += 1 + stack.size * space; // for ]
|
||||
}
|
||||
|
||||
visited.set(value, length - valueLength);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case PROMISE:
|
||||
case STRING_STREAM:
|
||||
async.add(value);
|
||||
break;
|
||||
|
||||
case OBJECT_STREAM:
|
||||
length += 2; // []
|
||||
async.add(value);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let allowlist = null;
|
||||
replacer = normalizeReplacer(replacer);
|
||||
|
||||
if (Array.isArray(replacer)) {
|
||||
allowlist = new Set(replacer);
|
||||
replacer = null;
|
||||
}
|
||||
|
||||
space = spaceLength(space);
|
||||
options = options || {};
|
||||
|
||||
const visited = new Map();
|
||||
const stack = new Set();
|
||||
const duplicate = new Set();
|
||||
const circular = new Set();
|
||||
const async = new Set();
|
||||
const getType = options.async ? getTypeAsync : getTypeNative;
|
||||
const root = { '': value };
|
||||
let stop = false;
|
||||
let length = 0;
|
||||
|
||||
walk(root, '', value);
|
||||
|
||||
return {
|
||||
minLength: isNaN(length) ? Infinity : length,
|
||||
circular: [...circular],
|
||||
duplicate: [...duplicate],
|
||||
async: [...async]
|
||||
};
|
||||
};
|
||||
3
node_modules/@discoveryjs/json-ext/src/stringify-stream-browser.js
generated
vendored
Normal file
3
node_modules/@discoveryjs/json-ext/src/stringify-stream-browser.js
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
module.exports = () => {
|
||||
throw new Error('Method is not supported');
|
||||
};
|
||||
408
node_modules/@discoveryjs/json-ext/src/stringify-stream.js
generated
vendored
Normal file
408
node_modules/@discoveryjs/json-ext/src/stringify-stream.js
generated
vendored
Normal file
@ -0,0 +1,408 @@
|
||||
const { Readable } = require('stream');
|
||||
const {
|
||||
normalizeReplacer,
|
||||
normalizeSpace,
|
||||
replaceValue,
|
||||
getTypeAsync,
|
||||
type: {
|
||||
PRIMITIVE,
|
||||
OBJECT,
|
||||
ARRAY,
|
||||
PROMISE,
|
||||
STRING_STREAM,
|
||||
OBJECT_STREAM
|
||||
}
|
||||
} = require('./utils');
|
||||
const noop = () => {};
|
||||
const hasOwnProperty = Object.prototype.hasOwnProperty;
|
||||
|
||||
// TODO: Remove when drop support for Node.js 10
|
||||
// Node.js 10 has no well-formed JSON.stringify()
|
||||
// https://github.com/tc39/proposal-well-formed-stringify
|
||||
// Adopted code from https://bugs.chromium.org/p/v8/issues/detail?id=7782#c12
|
||||
const wellformedStringStringify = JSON.stringify('\ud800') === '"\\ud800"'
|
||||
? JSON.stringify
|
||||
: s => JSON.stringify(s).replace(
|
||||
/\p{Surrogate}/gu,
|
||||
m => `\\u${m.charCodeAt(0).toString(16)}`
|
||||
);
|
||||
|
||||
function push() {
|
||||
this.push(this._stack.value);
|
||||
this.popStack();
|
||||
}
|
||||
|
||||
function pushPrimitive(value) {
|
||||
switch (typeof value) {
|
||||
case 'string':
|
||||
this.push(this.encodeString(value));
|
||||
break;
|
||||
|
||||
case 'number':
|
||||
this.push(Number.isFinite(value) ? this.encodeNumber(value) : 'null');
|
||||
break;
|
||||
|
||||
case 'boolean':
|
||||
this.push(value ? 'true' : 'false');
|
||||
break;
|
||||
|
||||
case 'undefined':
|
||||
case 'object': // typeof null === 'object'
|
||||
this.push('null');
|
||||
break;
|
||||
|
||||
default:
|
||||
this.destroy(new TypeError(`Do not know how to serialize a ${value.constructor && value.constructor.name || typeof value}`));
|
||||
}
|
||||
}
|
||||
|
||||
function processObjectEntry(key) {
|
||||
const current = this._stack;
|
||||
|
||||
if (!current.first) {
|
||||
current.first = true;
|
||||
} else {
|
||||
this.push(',');
|
||||
}
|
||||
|
||||
if (this.space) {
|
||||
this.push(`\n${this.space.repeat(this._depth)}${this.encodeString(key)}: `);
|
||||
} else {
|
||||
this.push(this.encodeString(key) + ':');
|
||||
}
|
||||
}
|
||||
|
||||
function processObject() {
|
||||
const current = this._stack;
|
||||
|
||||
// when no keys left, remove obj from stack
|
||||
if (current.index === current.keys.length) {
|
||||
if (this.space && current.first) {
|
||||
this.push(`\n${this.space.repeat(this._depth - 1)}}`);
|
||||
} else {
|
||||
this.push('}');
|
||||
}
|
||||
|
||||
this.popStack();
|
||||
return;
|
||||
}
|
||||
|
||||
const key = current.keys[current.index];
|
||||
|
||||
this.processValue(current.value, key, current.value[key], processObjectEntry);
|
||||
current.index++;
|
||||
}
|
||||
|
||||
function processArrayItem(index) {
|
||||
if (index !== 0) {
|
||||
this.push(',');
|
||||
}
|
||||
|
||||
if (this.space) {
|
||||
this.push(`\n${this.space.repeat(this._depth)}`);
|
||||
}
|
||||
}
|
||||
|
||||
function processArray() {
|
||||
const current = this._stack;
|
||||
|
||||
if (current.index === current.value.length) {
|
||||
if (this.space && current.index > 0) {
|
||||
this.push(`\n${this.space.repeat(this._depth - 1)}]`);
|
||||
} else {
|
||||
this.push(']');
|
||||
}
|
||||
|
||||
this.popStack();
|
||||
return;
|
||||
}
|
||||
|
||||
this.processValue(current.value, current.index, current.value[current.index], processArrayItem);
|
||||
current.index++;
|
||||
}
|
||||
|
||||
function createStreamReader(fn) {
|
||||
return function() {
|
||||
const current = this._stack;
|
||||
const data = current.value.read(this._readSize);
|
||||
|
||||
if (data !== null) {
|
||||
current.first = false;
|
||||
fn.call(this, data, current);
|
||||
} else {
|
||||
if ((current.first && !current.value._readableState.reading) || current.ended) {
|
||||
this.popStack();
|
||||
} else {
|
||||
current.first = true;
|
||||
current.awaiting = true;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const processReadableObject = createStreamReader(function(data, current) {
|
||||
this.processValue(current.value, current.index, data, processArrayItem);
|
||||
current.index++;
|
||||
});
|
||||
|
||||
const processReadableString = createStreamReader(function(data) {
|
||||
this.push(data);
|
||||
});
|
||||
|
||||
class JsonStringifyStream extends Readable {
|
||||
constructor(value, replacer, space) {
|
||||
super({
|
||||
autoDestroy: true
|
||||
});
|
||||
|
||||
this.getKeys = Object.keys;
|
||||
this.replacer = normalizeReplacer(replacer);
|
||||
|
||||
if (Array.isArray(this.replacer)) {
|
||||
const allowlist = this.replacer;
|
||||
|
||||
this.getKeys = (value) => allowlist.filter(key => hasOwnProperty.call(value, key));
|
||||
this.replacer = null;
|
||||
}
|
||||
|
||||
this.space = normalizeSpace(space);
|
||||
this._depth = 0;
|
||||
|
||||
this.error = null;
|
||||
this._processing = false;
|
||||
this._ended = false;
|
||||
|
||||
this._readSize = 0;
|
||||
this._buffer = '';
|
||||
|
||||
this._stack = null;
|
||||
this._visited = new WeakSet();
|
||||
|
||||
this.pushStack({
|
||||
handler: () => {
|
||||
this.popStack();
|
||||
this.processValue({ '': value }, '', value, noop);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
encodeString(value) {
|
||||
if (/[^\x20-\uD799]|[\x22\x5c]/.test(value)) {
|
||||
return wellformedStringStringify(value);
|
||||
}
|
||||
|
||||
return '"' + value + '"';
|
||||
}
|
||||
|
||||
encodeNumber(value) {
|
||||
return value;
|
||||
}
|
||||
|
||||
processValue(holder, key, value, callback) {
|
||||
value = replaceValue(holder, key, value, this.replacer);
|
||||
|
||||
let type = getTypeAsync(value);
|
||||
|
||||
switch (type) {
|
||||
case PRIMITIVE:
|
||||
if (callback !== processObjectEntry || value !== undefined) {
|
||||
callback.call(this, key);
|
||||
pushPrimitive.call(this, value);
|
||||
}
|
||||
break;
|
||||
|
||||
case OBJECT:
|
||||
callback.call(this, key);
|
||||
|
||||
// check for circular structure
|
||||
if (this._visited.has(value)) {
|
||||
return this.destroy(new TypeError('Converting circular structure to JSON'));
|
||||
}
|
||||
|
||||
this._visited.add(value);
|
||||
this._depth++;
|
||||
this.push('{');
|
||||
this.pushStack({
|
||||
handler: processObject,
|
||||
value,
|
||||
index: 0,
|
||||
first: false,
|
||||
keys: this.getKeys(value)
|
||||
});
|
||||
break;
|
||||
|
||||
case ARRAY:
|
||||
callback.call(this, key);
|
||||
|
||||
// check for circular structure
|
||||
if (this._visited.has(value)) {
|
||||
return this.destroy(new TypeError('Converting circular structure to JSON'));
|
||||
}
|
||||
|
||||
this._visited.add(value);
|
||||
|
||||
this.push('[');
|
||||
this.pushStack({
|
||||
handler: processArray,
|
||||
value,
|
||||
index: 0
|
||||
});
|
||||
this._depth++;
|
||||
break;
|
||||
|
||||
case PROMISE:
|
||||
this.pushStack({
|
||||
handler: noop,
|
||||
awaiting: true
|
||||
});
|
||||
|
||||
Promise.resolve(value)
|
||||
.then(resolved => {
|
||||
this.popStack();
|
||||
this.processValue(holder, key, resolved, callback);
|
||||
this.processStack();
|
||||
})
|
||||
.catch(error => {
|
||||
this.destroy(error);
|
||||
});
|
||||
break;
|
||||
|
||||
case STRING_STREAM:
|
||||
case OBJECT_STREAM:
|
||||
callback.call(this, key);
|
||||
|
||||
// TODO: Remove when drop support for Node.js 10
|
||||
// Used `_readableState.endEmitted` as fallback, since Node.js 10 has no `readableEnded` getter
|
||||
if (value.readableEnded || value._readableState.endEmitted) {
|
||||
return this.destroy(new Error('Readable Stream has ended before it was serialized. All stream data have been lost'));
|
||||
}
|
||||
|
||||
if (value.readableFlowing) {
|
||||
return this.destroy(new Error('Readable Stream is in flowing mode, data may have been lost. Trying to pause stream.'));
|
||||
}
|
||||
|
||||
if (type === OBJECT_STREAM) {
|
||||
this.push('[');
|
||||
this.pushStack({
|
||||
handler: push,
|
||||
value: this.space ? '\n' + this.space.repeat(this._depth) + ']' : ']'
|
||||
});
|
||||
this._depth++;
|
||||
}
|
||||
|
||||
const self = this.pushStack({
|
||||
handler: type === OBJECT_STREAM ? processReadableObject : processReadableString,
|
||||
value,
|
||||
index: 0,
|
||||
first: false,
|
||||
ended: false,
|
||||
awaiting: !value.readable || value.readableLength === 0
|
||||
});
|
||||
const continueProcessing = () => {
|
||||
if (self.awaiting) {
|
||||
self.awaiting = false;
|
||||
this.processStack();
|
||||
}
|
||||
};
|
||||
|
||||
value.once('error', error => this.destroy(error));
|
||||
value.once('end', () => {
|
||||
self.ended = true;
|
||||
continueProcessing();
|
||||
});
|
||||
value.on('readable', continueProcessing);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
pushStack(node) {
|
||||
node.prev = this._stack;
|
||||
return this._stack = node;
|
||||
}
|
||||
|
||||
popStack() {
|
||||
const { handler, value } = this._stack;
|
||||
|
||||
if (handler === processObject || handler === processArray || handler === processReadableObject) {
|
||||
this._visited.delete(value);
|
||||
this._depth--;
|
||||
}
|
||||
|
||||
this._stack = this._stack.prev;
|
||||
}
|
||||
|
||||
processStack() {
|
||||
if (this._processing || this._ended) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
this._processing = true;
|
||||
|
||||
while (this._stack !== null && !this._stack.awaiting) {
|
||||
this._stack.handler.call(this);
|
||||
|
||||
if (!this._processing) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
this._processing = false;
|
||||
} catch (error) {
|
||||
this.destroy(error);
|
||||
return;
|
||||
}
|
||||
|
||||
if (this._stack === null && !this._ended) {
|
||||
this._finish();
|
||||
this.push(null);
|
||||
}
|
||||
}
|
||||
|
||||
push(data) {
|
||||
if (data !== null) {
|
||||
this._buffer += data;
|
||||
|
||||
// check buffer overflow
|
||||
if (this._buffer.length < this._readSize) {
|
||||
return;
|
||||
}
|
||||
|
||||
// flush buffer
|
||||
data = this._buffer;
|
||||
this._buffer = '';
|
||||
this._processing = false;
|
||||
}
|
||||
|
||||
super.push(data);
|
||||
}
|
||||
|
||||
_read(size) {
|
||||
// start processing
|
||||
this._readSize = size || this.readableHighWaterMark;
|
||||
this.processStack();
|
||||
}
|
||||
|
||||
_finish() {
|
||||
this._ended = true;
|
||||
this._processing = false;
|
||||
this._stack = null;
|
||||
this._visited = null;
|
||||
|
||||
if (this._buffer && this._buffer.length) {
|
||||
super.push(this._buffer); // flush buffer
|
||||
}
|
||||
|
||||
this._buffer = '';
|
||||
}
|
||||
|
||||
_destroy(error, cb) {
|
||||
this.error = this.error || error;
|
||||
this._finish();
|
||||
cb(error);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function createJsonStringifyStream(value, replacer, space) {
|
||||
return new JsonStringifyStream(value, replacer, space);
|
||||
};
|
||||
1
node_modules/@discoveryjs/json-ext/src/text-decoder-browser.js
generated
vendored
Normal file
1
node_modules/@discoveryjs/json-ext/src/text-decoder-browser.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = TextDecoder;
|
||||
1
node_modules/@discoveryjs/json-ext/src/text-decoder.js
generated
vendored
Normal file
1
node_modules/@discoveryjs/json-ext/src/text-decoder.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require('util').TextDecoder;
|
||||
149
node_modules/@discoveryjs/json-ext/src/utils.js
generated
vendored
Normal file
149
node_modules/@discoveryjs/json-ext/src/utils.js
generated
vendored
Normal file
@ -0,0 +1,149 @@
|
||||
const PrimitiveType = 1;
|
||||
const ObjectType = 2;
|
||||
const ArrayType = 3;
|
||||
const PromiseType = 4;
|
||||
const ReadableStringType = 5;
|
||||
const ReadableObjectType = 6;
|
||||
// https://tc39.es/ecma262/#table-json-single-character-escapes
|
||||
const escapableCharCodeSubstitution = { // JSON Single Character Escape Sequences
|
||||
0x08: '\\b',
|
||||
0x09: '\\t',
|
||||
0x0a: '\\n',
|
||||
0x0c: '\\f',
|
||||
0x0d: '\\r',
|
||||
0x22: '\\\"',
|
||||
0x5c: '\\\\'
|
||||
};
|
||||
|
||||
function isLeadingSurrogate(code) {
|
||||
return code >= 0xD800 && code <= 0xDBFF;
|
||||
}
|
||||
|
||||
function isTrailingSurrogate(code) {
|
||||
return code >= 0xDC00 && code <= 0xDFFF;
|
||||
}
|
||||
|
||||
function isReadableStream(value) {
|
||||
return (
|
||||
typeof value.pipe === 'function' &&
|
||||
typeof value._read === 'function' &&
|
||||
typeof value._readableState === 'object' && value._readableState !== null
|
||||
);
|
||||
}
|
||||
|
||||
function replaceValue(holder, key, value, replacer) {
|
||||
if (value && typeof value.toJSON === 'function') {
|
||||
value = value.toJSON();
|
||||
}
|
||||
|
||||
if (replacer !== null) {
|
||||
value = replacer.call(holder, String(key), value);
|
||||
}
|
||||
|
||||
switch (typeof value) {
|
||||
case 'function':
|
||||
case 'symbol':
|
||||
value = undefined;
|
||||
break;
|
||||
|
||||
case 'object':
|
||||
if (value !== null) {
|
||||
const cls = value.constructor;
|
||||
if (cls === String || cls === Number || cls === Boolean) {
|
||||
value = value.valueOf();
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function getTypeNative(value) {
|
||||
if (value === null || typeof value !== 'object') {
|
||||
return PrimitiveType;
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
return ArrayType;
|
||||
}
|
||||
|
||||
return ObjectType;
|
||||
}
|
||||
|
||||
function getTypeAsync(value) {
|
||||
if (value === null || typeof value !== 'object') {
|
||||
return PrimitiveType;
|
||||
}
|
||||
|
||||
if (typeof value.then === 'function') {
|
||||
return PromiseType;
|
||||
}
|
||||
|
||||
if (isReadableStream(value)) {
|
||||
return value._readableState.objectMode ? ReadableObjectType : ReadableStringType;
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
return ArrayType;
|
||||
}
|
||||
|
||||
return ObjectType;
|
||||
}
|
||||
|
||||
function normalizeReplacer(replacer) {
|
||||
if (typeof replacer === 'function') {
|
||||
return replacer;
|
||||
}
|
||||
|
||||
if (Array.isArray(replacer)) {
|
||||
const allowlist = new Set(replacer
|
||||
.map(item => {
|
||||
const cls = item && item.constructor;
|
||||
return cls === String || cls === Number ? String(item) : null;
|
||||
})
|
||||
.filter(item => typeof item === 'string')
|
||||
);
|
||||
|
||||
return [...allowlist];
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function normalizeSpace(space) {
|
||||
if (typeof space === 'number') {
|
||||
if (!Number.isFinite(space) || space < 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return ' '.repeat(Math.min(space, 10));
|
||||
}
|
||||
|
||||
if (typeof space === 'string') {
|
||||
return space.slice(0, 10) || false;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
escapableCharCodeSubstitution,
|
||||
isLeadingSurrogate,
|
||||
isTrailingSurrogate,
|
||||
type: {
|
||||
PRIMITIVE: PrimitiveType,
|
||||
PROMISE: PromiseType,
|
||||
ARRAY: ArrayType,
|
||||
OBJECT: ObjectType,
|
||||
STRING_STREAM: ReadableStringType,
|
||||
OBJECT_STREAM: ReadableObjectType
|
||||
},
|
||||
|
||||
isReadableStream,
|
||||
replaceValue,
|
||||
getTypeNative,
|
||||
getTypeAsync,
|
||||
normalizeReplacer,
|
||||
normalizeSpace
|
||||
};
|
||||
1
node_modules/@discoveryjs/json-ext/src/version.js
generated
vendored
Normal file
1
node_modules/@discoveryjs/json-ext/src/version.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require('../package.json').version;
|
||||
19
node_modules/@jridgewell/gen-mapping/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/gen-mapping/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
Copyright 2022 Justin Ridgewell <jridgewell@google.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
227
node_modules/@jridgewell/gen-mapping/README.md
generated
vendored
Normal file
227
node_modules/@jridgewell/gen-mapping/README.md
generated
vendored
Normal file
@ -0,0 +1,227 @@
|
||||
# @jridgewell/gen-mapping
|
||||
|
||||
> Generate source maps
|
||||
|
||||
`gen-mapping` allows you to generate a source map during transpilation or minification.
|
||||
With a source map, you're able to trace the original location in the source file, either in Chrome's
|
||||
DevTools or using a library like [`@jridgewell/trace-mapping`][trace-mapping].
|
||||
|
||||
You may already be familiar with the [`source-map`][source-map] package's `SourceMapGenerator`. This
|
||||
provides the same `addMapping` and `setSourceContent` API.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @jridgewell/gen-mapping
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
import { GenMapping, addMapping, setSourceContent, toEncodedMap, toDecodedMap } from '@jridgewell/gen-mapping';
|
||||
|
||||
const map = new GenMapping({
|
||||
file: 'output.js',
|
||||
sourceRoot: 'https://example.com/',
|
||||
});
|
||||
|
||||
setSourceContent(map, 'input.js', `function foo() {}`);
|
||||
|
||||
addMapping(map, {
|
||||
// Lines start at line 1, columns at column 0.
|
||||
generated: { line: 1, column: 0 },
|
||||
source: 'input.js',
|
||||
original: { line: 1, column: 0 },
|
||||
});
|
||||
|
||||
addMapping(map, {
|
||||
generated: { line: 1, column: 9 },
|
||||
source: 'input.js',
|
||||
original: { line: 1, column: 9 },
|
||||
name: 'foo',
|
||||
});
|
||||
|
||||
assert.deepEqual(toDecodedMap(map), {
|
||||
version: 3,
|
||||
file: 'output.js',
|
||||
names: ['foo'],
|
||||
sourceRoot: 'https://example.com/',
|
||||
sources: ['input.js'],
|
||||
sourcesContent: ['function foo() {}'],
|
||||
mappings: [
|
||||
[ [0, 0, 0, 0], [9, 0, 0, 9, 0] ]
|
||||
],
|
||||
});
|
||||
|
||||
assert.deepEqual(toEncodedMap(map), {
|
||||
version: 3,
|
||||
file: 'output.js',
|
||||
names: ['foo'],
|
||||
sourceRoot: 'https://example.com/',
|
||||
sources: ['input.js'],
|
||||
sourcesContent: ['function foo() {}'],
|
||||
mappings: 'AAAA,SAASA',
|
||||
});
|
||||
```
|
||||
|
||||
### Smaller Sourcemaps
|
||||
|
||||
Not everything needs to be added to a sourcemap, and needless markings can cause signficantly
|
||||
larger file sizes. `gen-mapping` exposes `maybeAddSegment`/`maybeAddMapping` APIs that will
|
||||
intelligently determine if this marking adds useful information. If not, the marking will be
|
||||
skipped.
|
||||
|
||||
```typescript
|
||||
import { maybeAddMapping } from '@jridgewell/gen-mapping';
|
||||
|
||||
const map = new GenMapping();
|
||||
|
||||
// Adding a sourceless marking at the beginning of a line isn't useful.
|
||||
maybeAddMapping(map, {
|
||||
generated: { line: 1, column: 0 },
|
||||
});
|
||||
|
||||
// Adding a new source marking is useful.
|
||||
maybeAddMapping(map, {
|
||||
generated: { line: 1, column: 0 },
|
||||
source: 'input.js',
|
||||
original: { line: 1, column: 0 },
|
||||
});
|
||||
|
||||
// But adding another marking pointing to the exact same original location isn't, even if the
|
||||
// generated column changed.
|
||||
maybeAddMapping(map, {
|
||||
generated: { line: 1, column: 9 },
|
||||
source: 'input.js',
|
||||
original: { line: 1, column: 0 },
|
||||
});
|
||||
|
||||
assert.deepEqual(toEncodedMap(map), {
|
||||
version: 3,
|
||||
names: [],
|
||||
sources: ['input.js'],
|
||||
sourcesContent: [null],
|
||||
mappings: 'AAAA',
|
||||
});
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
```
|
||||
node v18.0.0
|
||||
|
||||
amp.js.map
|
||||
Memory Usage:
|
||||
gen-mapping: addSegment 5852872 bytes
|
||||
gen-mapping: addMapping 7716042 bytes
|
||||
source-map-js 6143250 bytes
|
||||
source-map-0.6.1 6124102 bytes
|
||||
source-map-0.8.0 6121173 bytes
|
||||
Smallest memory usage is gen-mapping: addSegment
|
||||
|
||||
Adding speed:
|
||||
gen-mapping: addSegment x 441 ops/sec ±2.07% (90 runs sampled)
|
||||
gen-mapping: addMapping x 350 ops/sec ±2.40% (86 runs sampled)
|
||||
source-map-js: addMapping x 169 ops/sec ±2.42% (80 runs sampled)
|
||||
source-map-0.6.1: addMapping x 167 ops/sec ±2.56% (80 runs sampled)
|
||||
source-map-0.8.0: addMapping x 168 ops/sec ±2.52% (80 runs sampled)
|
||||
Fastest is gen-mapping: addSegment
|
||||
|
||||
Generate speed:
|
||||
gen-mapping: decoded output x 150,824,370 ops/sec ±0.07% (102 runs sampled)
|
||||
gen-mapping: encoded output x 663 ops/sec ±0.22% (98 runs sampled)
|
||||
source-map-js: encoded output x 197 ops/sec ±0.45% (84 runs sampled)
|
||||
source-map-0.6.1: encoded output x 198 ops/sec ±0.33% (85 runs sampled)
|
||||
source-map-0.8.0: encoded output x 197 ops/sec ±0.06% (93 runs sampled)
|
||||
Fastest is gen-mapping: decoded output
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
babel.min.js.map
|
||||
Memory Usage:
|
||||
gen-mapping: addSegment 37578063 bytes
|
||||
gen-mapping: addMapping 37212897 bytes
|
||||
source-map-js 47638527 bytes
|
||||
source-map-0.6.1 47690503 bytes
|
||||
source-map-0.8.0 47470188 bytes
|
||||
Smallest memory usage is gen-mapping: addMapping
|
||||
|
||||
Adding speed:
|
||||
gen-mapping: addSegment x 31.05 ops/sec ±8.31% (43 runs sampled)
|
||||
gen-mapping: addMapping x 29.83 ops/sec ±7.36% (51 runs sampled)
|
||||
source-map-js: addMapping x 20.73 ops/sec ±6.22% (38 runs sampled)
|
||||
source-map-0.6.1: addMapping x 20.03 ops/sec ±10.51% (38 runs sampled)
|
||||
source-map-0.8.0: addMapping x 19.30 ops/sec ±8.27% (37 runs sampled)
|
||||
Fastest is gen-mapping: addSegment
|
||||
|
||||
Generate speed:
|
||||
gen-mapping: decoded output x 381,379,234 ops/sec ±0.29% (96 runs sampled)
|
||||
gen-mapping: encoded output x 95.15 ops/sec ±2.98% (72 runs sampled)
|
||||
source-map-js: encoded output x 15.20 ops/sec ±7.41% (33 runs sampled)
|
||||
source-map-0.6.1: encoded output x 16.36 ops/sec ±10.46% (31 runs sampled)
|
||||
source-map-0.8.0: encoded output x 16.06 ops/sec ±6.45% (31 runs sampled)
|
||||
Fastest is gen-mapping: decoded output
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
preact.js.map
|
||||
Memory Usage:
|
||||
gen-mapping: addSegment 416247 bytes
|
||||
gen-mapping: addMapping 419824 bytes
|
||||
source-map-js 1024619 bytes
|
||||
source-map-0.6.1 1146004 bytes
|
||||
source-map-0.8.0 1113250 bytes
|
||||
Smallest memory usage is gen-mapping: addSegment
|
||||
|
||||
Adding speed:
|
||||
gen-mapping: addSegment x 13,755 ops/sec ±0.15% (98 runs sampled)
|
||||
gen-mapping: addMapping x 13,013 ops/sec ±0.11% (101 runs sampled)
|
||||
source-map-js: addMapping x 4,564 ops/sec ±0.21% (98 runs sampled)
|
||||
source-map-0.6.1: addMapping x 4,562 ops/sec ±0.11% (99 runs sampled)
|
||||
source-map-0.8.0: addMapping x 4,593 ops/sec ±0.11% (100 runs sampled)
|
||||
Fastest is gen-mapping: addSegment
|
||||
|
||||
Generate speed:
|
||||
gen-mapping: decoded output x 379,864,020 ops/sec ±0.23% (93 runs sampled)
|
||||
gen-mapping: encoded output x 14,368 ops/sec ±4.07% (82 runs sampled)
|
||||
source-map-js: encoded output x 5,261 ops/sec ±0.21% (99 runs sampled)
|
||||
source-map-0.6.1: encoded output x 5,124 ops/sec ±0.58% (99 runs sampled)
|
||||
source-map-0.8.0: encoded output x 5,434 ops/sec ±0.33% (96 runs sampled)
|
||||
Fastest is gen-mapping: decoded output
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
react.js.map
|
||||
Memory Usage:
|
||||
gen-mapping: addSegment 975096 bytes
|
||||
gen-mapping: addMapping 1102981 bytes
|
||||
source-map-js 2918836 bytes
|
||||
source-map-0.6.1 2885435 bytes
|
||||
source-map-0.8.0 2874336 bytes
|
||||
Smallest memory usage is gen-mapping: addSegment
|
||||
|
||||
Adding speed:
|
||||
gen-mapping: addSegment x 4,772 ops/sec ±0.15% (100 runs sampled)
|
||||
gen-mapping: addMapping x 4,456 ops/sec ±0.13% (97 runs sampled)
|
||||
source-map-js: addMapping x 1,618 ops/sec ±0.24% (97 runs sampled)
|
||||
source-map-0.6.1: addMapping x 1,622 ops/sec ±0.12% (99 runs sampled)
|
||||
source-map-0.8.0: addMapping x 1,631 ops/sec ±0.12% (100 runs sampled)
|
||||
Fastest is gen-mapping: addSegment
|
||||
|
||||
Generate speed:
|
||||
gen-mapping: decoded output x 379,107,695 ops/sec ±0.07% (99 runs sampled)
|
||||
gen-mapping: encoded output x 5,421 ops/sec ±1.60% (89 runs sampled)
|
||||
source-map-js: encoded output x 2,113 ops/sec ±1.81% (98 runs sampled)
|
||||
source-map-0.6.1: encoded output x 2,126 ops/sec ±0.10% (100 runs sampled)
|
||||
source-map-0.8.0: encoded output x 2,176 ops/sec ±0.39% (98 runs sampled)
|
||||
Fastest is gen-mapping: decoded output
|
||||
```
|
||||
|
||||
[source-map]: https://www.npmjs.com/package/source-map
|
||||
[trace-mapping]: https://github.com/jridgewell/trace-mapping
|
||||
230
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs
generated
vendored
Normal file
230
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs
generated
vendored
Normal file
@ -0,0 +1,230 @@
|
||||
import { SetArray, put, remove } from '@jridgewell/set-array';
|
||||
import { encode } from '@jridgewell/sourcemap-codec';
|
||||
import { TraceMap, decodedMappings } from '@jridgewell/trace-mapping';
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
|
||||
const NO_NAME = -1;
|
||||
/**
|
||||
* Provides the state to generate a sourcemap.
|
||||
*/
|
||||
class GenMapping {
|
||||
constructor({ file, sourceRoot } = {}) {
|
||||
this._names = new SetArray();
|
||||
this._sources = new SetArray();
|
||||
this._sourcesContent = [];
|
||||
this._mappings = [];
|
||||
this.file = file;
|
||||
this.sourceRoot = sourceRoot;
|
||||
this._ignoreList = new SetArray();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(map) {
|
||||
return map;
|
||||
}
|
||||
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||
return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||
}
|
||||
function addMapping(map, mapping) {
|
||||
return addMappingInternal(false, map, mapping);
|
||||
}
|
||||
/**
|
||||
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||
* not add a segment with a lower generated line/column than one that came before.
|
||||
*/
|
||||
const maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||
};
|
||||
/**
|
||||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||
* not add a mapping with a lower generated line/column than one that came before.
|
||||
*/
|
||||
const maybeAddMapping = (map, mapping) => {
|
||||
return addMappingInternal(true, map, mapping);
|
||||
};
|
||||
/**
|
||||
* Adds/removes the content of the source file to the source map.
|
||||
*/
|
||||
function setSourceContent(map, source, content) {
|
||||
const { _sources: sources, _sourcesContent: sourcesContent } = cast(map);
|
||||
const index = put(sources, source);
|
||||
sourcesContent[index] = content;
|
||||
}
|
||||
function setIgnore(map, source, ignore = true) {
|
||||
const { _sources: sources, _sourcesContent: sourcesContent, _ignoreList: ignoreList } = cast(map);
|
||||
const index = put(sources, source);
|
||||
if (index === sourcesContent.length)
|
||||
sourcesContent[index] = null;
|
||||
if (ignore)
|
||||
put(ignoreList, index);
|
||||
else
|
||||
remove(ignoreList, index);
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function toDecodedMap(map) {
|
||||
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, _ignoreList: ignoreList, } = cast(map);
|
||||
removeEmptyFinalLines(mappings);
|
||||
return {
|
||||
version: 3,
|
||||
file: map.file || undefined,
|
||||
names: names.array,
|
||||
sourceRoot: map.sourceRoot || undefined,
|
||||
sources: sources.array,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
ignoreList: ignoreList.array,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function toEncodedMap(map) {
|
||||
const decoded = toDecodedMap(map);
|
||||
return Object.assign(Object.assign({}, decoded), { mappings: encode(decoded.mappings) });
|
||||
}
|
||||
/**
|
||||
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||
*/
|
||||
function fromMap(input) {
|
||||
const map = new TraceMap(input);
|
||||
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
|
||||
putAll(cast(gen)._names, map.names);
|
||||
putAll(cast(gen)._sources, map.sources);
|
||||
cast(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
|
||||
cast(gen)._mappings = decodedMappings(map);
|
||||
if (map.ignoreList)
|
||||
putAll(cast(gen)._ignoreList, map.ignoreList);
|
||||
return gen;
|
||||
}
|
||||
/**
|
||||
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||
* passed to the `source-map` library.
|
||||
*/
|
||||
function allMappings(map) {
|
||||
const out = [];
|
||||
const { _mappings: mappings, _sources: sources, _names: names } = cast(map);
|
||||
for (let i = 0; i < mappings.length; i++) {
|
||||
const line = mappings[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generated = { line: i + 1, column: seg[COLUMN] };
|
||||
let source = undefined;
|
||||
let original = undefined;
|
||||
let name = undefined;
|
||||
if (seg.length !== 1) {
|
||||
source = sources.array[seg[SOURCES_INDEX]];
|
||||
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
|
||||
if (seg.length === 5)
|
||||
name = names.array[seg[NAMES_INDEX]];
|
||||
}
|
||||
out.push({ generated, source, original, name });
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
// This split declaration is only so that terser can elminiate the static initialization block.
|
||||
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = cast(map);
|
||||
const line = getLine(mappings, genLine);
|
||||
const index = getColumnIndex(line, genColumn);
|
||||
if (!source) {
|
||||
if (skipable && skipSourceless(line, index))
|
||||
return;
|
||||
return insert(line, index, [genColumn]);
|
||||
}
|
||||
const sourcesIndex = put(sources, source);
|
||||
const namesIndex = name ? put(names, name) : NO_NAME;
|
||||
if (sourcesIndex === sourcesContent.length)
|
||||
sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null;
|
||||
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
||||
return;
|
||||
}
|
||||
return insert(line, index, name
|
||||
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
|
||||
}
|
||||
function getLine(mappings, index) {
|
||||
for (let i = mappings.length; i <= index; i++) {
|
||||
mappings[i] = [];
|
||||
}
|
||||
return mappings[index];
|
||||
}
|
||||
function getColumnIndex(line, genColumn) {
|
||||
let index = line.length;
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
const current = line[i];
|
||||
if (genColumn >= current[COLUMN])
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
function removeEmptyFinalLines(mappings) {
|
||||
const { length } = mappings;
|
||||
let len = length;
|
||||
for (let i = len - 1; i >= 0; len = i, i--) {
|
||||
if (mappings[i].length > 0)
|
||||
break;
|
||||
}
|
||||
if (len < length)
|
||||
mappings.length = len;
|
||||
}
|
||||
function putAll(setarr, array) {
|
||||
for (let i = 0; i < array.length; i++)
|
||||
put(setarr, array[i]);
|
||||
}
|
||||
function skipSourceless(line, index) {
|
||||
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
||||
// doesn't generate any useful information.
|
||||
if (index === 0)
|
||||
return true;
|
||||
const prev = line[index - 1];
|
||||
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
||||
// genrate any new information. Else, this segment will end the source/named segment and point to
|
||||
// a sourceless position, which is useful.
|
||||
return prev.length === 1;
|
||||
}
|
||||
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
|
||||
// A source/named segment at the start of a line gives position at that genColumn
|
||||
if (index === 0)
|
||||
return false;
|
||||
const prev = line[index - 1];
|
||||
// If the previous segment is sourceless, then we're transitioning to a source.
|
||||
if (prev.length === 1)
|
||||
return false;
|
||||
// If the previous segment maps to the exact same source position, then this segment doesn't
|
||||
// provide any new position information.
|
||||
return (sourcesIndex === prev[SOURCES_INDEX] &&
|
||||
sourceLine === prev[SOURCE_LINE] &&
|
||||
sourceColumn === prev[SOURCE_COLUMN] &&
|
||||
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
|
||||
}
|
||||
function addMappingInternal(skipable, map, mapping) {
|
||||
const { generated, source, original, name, content } = mapping;
|
||||
if (!source) {
|
||||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null);
|
||||
}
|
||||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, source, original.line - 1, original.column, name, content);
|
||||
}
|
||||
|
||||
export { GenMapping, addMapping, addSegment, allMappings, fromMap, maybeAddMapping, maybeAddSegment, setIgnore, setSourceContent, toDecodedMap, toEncodedMap };
|
||||
//# sourceMappingURL=gen-mapping.mjs.map
|
||||
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
246
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js
generated
vendored
Normal file
246
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js
generated
vendored
Normal file
@ -0,0 +1,246 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/set-array'), require('@jridgewell/sourcemap-codec'), require('@jridgewell/trace-mapping')) :
|
||||
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/set-array', '@jridgewell/sourcemap-codec', '@jridgewell/trace-mapping'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.genMapping = {}, global.setArray, global.sourcemapCodec, global.traceMapping));
|
||||
})(this, (function (exports, setArray, sourcemapCodec, traceMapping) { 'use strict';
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
|
||||
const NO_NAME = -1;
|
||||
/**
|
||||
* Provides the state to generate a sourcemap.
|
||||
*/
|
||||
class GenMapping {
|
||||
constructor({ file, sourceRoot } = {}) {
|
||||
this._names = new setArray.SetArray();
|
||||
this._sources = new setArray.SetArray();
|
||||
this._sourcesContent = [];
|
||||
this._mappings = [];
|
||||
this.file = file;
|
||||
this.sourceRoot = sourceRoot;
|
||||
this._ignoreList = new setArray.SetArray();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(map) {
|
||||
return map;
|
||||
}
|
||||
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||
return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||
}
|
||||
function addMapping(map, mapping) {
|
||||
return addMappingInternal(false, map, mapping);
|
||||
}
|
||||
/**
|
||||
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||
* not add a segment with a lower generated line/column than one that came before.
|
||||
*/
|
||||
const maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||
};
|
||||
/**
|
||||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||
* not add a mapping with a lower generated line/column than one that came before.
|
||||
*/
|
||||
const maybeAddMapping = (map, mapping) => {
|
||||
return addMappingInternal(true, map, mapping);
|
||||
};
|
||||
/**
|
||||
* Adds/removes the content of the source file to the source map.
|
||||
*/
|
||||
function setSourceContent(map, source, content) {
|
||||
const { _sources: sources, _sourcesContent: sourcesContent } = cast(map);
|
||||
const index = setArray.put(sources, source);
|
||||
sourcesContent[index] = content;
|
||||
}
|
||||
function setIgnore(map, source, ignore = true) {
|
||||
const { _sources: sources, _sourcesContent: sourcesContent, _ignoreList: ignoreList } = cast(map);
|
||||
const index = setArray.put(sources, source);
|
||||
if (index === sourcesContent.length)
|
||||
sourcesContent[index] = null;
|
||||
if (ignore)
|
||||
setArray.put(ignoreList, index);
|
||||
else
|
||||
setArray.remove(ignoreList, index);
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function toDecodedMap(map) {
|
||||
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, _ignoreList: ignoreList, } = cast(map);
|
||||
removeEmptyFinalLines(mappings);
|
||||
return {
|
||||
version: 3,
|
||||
file: map.file || undefined,
|
||||
names: names.array,
|
||||
sourceRoot: map.sourceRoot || undefined,
|
||||
sources: sources.array,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
ignoreList: ignoreList.array,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function toEncodedMap(map) {
|
||||
const decoded = toDecodedMap(map);
|
||||
return Object.assign(Object.assign({}, decoded), { mappings: sourcemapCodec.encode(decoded.mappings) });
|
||||
}
|
||||
/**
|
||||
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||
*/
|
||||
function fromMap(input) {
|
||||
const map = new traceMapping.TraceMap(input);
|
||||
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
|
||||
putAll(cast(gen)._names, map.names);
|
||||
putAll(cast(gen)._sources, map.sources);
|
||||
cast(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
|
||||
cast(gen)._mappings = traceMapping.decodedMappings(map);
|
||||
if (map.ignoreList)
|
||||
putAll(cast(gen)._ignoreList, map.ignoreList);
|
||||
return gen;
|
||||
}
|
||||
/**
|
||||
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||
* passed to the `source-map` library.
|
||||
*/
|
||||
function allMappings(map) {
|
||||
const out = [];
|
||||
const { _mappings: mappings, _sources: sources, _names: names } = cast(map);
|
||||
for (let i = 0; i < mappings.length; i++) {
|
||||
const line = mappings[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generated = { line: i + 1, column: seg[COLUMN] };
|
||||
let source = undefined;
|
||||
let original = undefined;
|
||||
let name = undefined;
|
||||
if (seg.length !== 1) {
|
||||
source = sources.array[seg[SOURCES_INDEX]];
|
||||
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
|
||||
if (seg.length === 5)
|
||||
name = names.array[seg[NAMES_INDEX]];
|
||||
}
|
||||
out.push({ generated, source, original, name });
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
// This split declaration is only so that terser can elminiate the static initialization block.
|
||||
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = cast(map);
|
||||
const line = getLine(mappings, genLine);
|
||||
const index = getColumnIndex(line, genColumn);
|
||||
if (!source) {
|
||||
if (skipable && skipSourceless(line, index))
|
||||
return;
|
||||
return insert(line, index, [genColumn]);
|
||||
}
|
||||
const sourcesIndex = setArray.put(sources, source);
|
||||
const namesIndex = name ? setArray.put(names, name) : NO_NAME;
|
||||
if (sourcesIndex === sourcesContent.length)
|
||||
sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null;
|
||||
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
||||
return;
|
||||
}
|
||||
return insert(line, index, name
|
||||
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
|
||||
}
|
||||
function getLine(mappings, index) {
|
||||
for (let i = mappings.length; i <= index; i++) {
|
||||
mappings[i] = [];
|
||||
}
|
||||
return mappings[index];
|
||||
}
|
||||
function getColumnIndex(line, genColumn) {
|
||||
let index = line.length;
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
const current = line[i];
|
||||
if (genColumn >= current[COLUMN])
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
function removeEmptyFinalLines(mappings) {
|
||||
const { length } = mappings;
|
||||
let len = length;
|
||||
for (let i = len - 1; i >= 0; len = i, i--) {
|
||||
if (mappings[i].length > 0)
|
||||
break;
|
||||
}
|
||||
if (len < length)
|
||||
mappings.length = len;
|
||||
}
|
||||
function putAll(setarr, array) {
|
||||
for (let i = 0; i < array.length; i++)
|
||||
setArray.put(setarr, array[i]);
|
||||
}
|
||||
function skipSourceless(line, index) {
|
||||
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
||||
// doesn't generate any useful information.
|
||||
if (index === 0)
|
||||
return true;
|
||||
const prev = line[index - 1];
|
||||
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
||||
// genrate any new information. Else, this segment will end the source/named segment and point to
|
||||
// a sourceless position, which is useful.
|
||||
return prev.length === 1;
|
||||
}
|
||||
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
|
||||
// A source/named segment at the start of a line gives position at that genColumn
|
||||
if (index === 0)
|
||||
return false;
|
||||
const prev = line[index - 1];
|
||||
// If the previous segment is sourceless, then we're transitioning to a source.
|
||||
if (prev.length === 1)
|
||||
return false;
|
||||
// If the previous segment maps to the exact same source position, then this segment doesn't
|
||||
// provide any new position information.
|
||||
return (sourcesIndex === prev[SOURCES_INDEX] &&
|
||||
sourceLine === prev[SOURCE_LINE] &&
|
||||
sourceColumn === prev[SOURCE_COLUMN] &&
|
||||
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
|
||||
}
|
||||
function addMappingInternal(skipable, map, mapping) {
|
||||
const { generated, source, original, name, content } = mapping;
|
||||
if (!source) {
|
||||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null);
|
||||
}
|
||||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, source, original.line - 1, original.column, name, content);
|
||||
}
|
||||
|
||||
exports.GenMapping = GenMapping;
|
||||
exports.addMapping = addMapping;
|
||||
exports.addSegment = addSegment;
|
||||
exports.allMappings = allMappings;
|
||||
exports.fromMap = fromMap;
|
||||
exports.maybeAddMapping = maybeAddMapping;
|
||||
exports.maybeAddSegment = maybeAddSegment;
|
||||
exports.setIgnore = setIgnore;
|
||||
exports.setSourceContent = setSourceContent;
|
||||
exports.toDecodedMap = toDecodedMap;
|
||||
exports.toEncodedMap = toEncodedMap;
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=gen-mapping.umd.js.map
|
||||
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
88
node_modules/@jridgewell/gen-mapping/dist/types/gen-mapping.d.ts
generated
vendored
Normal file
88
node_modules/@jridgewell/gen-mapping/dist/types/gen-mapping.d.ts
generated
vendored
Normal file
@ -0,0 +1,88 @@
|
||||
import type { SourceMapInput } from '@jridgewell/trace-mapping';
|
||||
import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types';
|
||||
export type { DecodedSourceMap, EncodedSourceMap, Mapping };
|
||||
export declare type Options = {
|
||||
file?: string | null;
|
||||
sourceRoot?: string | null;
|
||||
};
|
||||
/**
|
||||
* Provides the state to generate a sourcemap.
|
||||
*/
|
||||
export declare class GenMapping {
|
||||
private _names;
|
||||
private _sources;
|
||||
private _sourcesContent;
|
||||
private _mappings;
|
||||
private _ignoreList;
|
||||
file: string | null | undefined;
|
||||
sourceRoot: string | null | undefined;
|
||||
constructor({ file, sourceRoot }?: Options);
|
||||
}
|
||||
/**
|
||||
* A low-level API to associate a generated position with an original source position. Line and
|
||||
* column here are 0-based, unlike `addMapping`.
|
||||
*/
|
||||
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source?: null, sourceLine?: null, sourceColumn?: null, name?: null, content?: null): void;
|
||||
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name?: null, content?: string | null): void;
|
||||
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name: string, content?: string | null): void;
|
||||
/**
|
||||
* A high-level API to associate a generated position with an original source position. Line is
|
||||
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
|
||||
*/
|
||||
export declare function addMapping(map: GenMapping, mapping: {
|
||||
generated: Pos;
|
||||
source?: null;
|
||||
original?: null;
|
||||
name?: null;
|
||||
content?: null;
|
||||
}): void;
|
||||
export declare function addMapping(map: GenMapping, mapping: {
|
||||
generated: Pos;
|
||||
source: string;
|
||||
original: Pos;
|
||||
name?: null;
|
||||
content?: string | null;
|
||||
}): void;
|
||||
export declare function addMapping(map: GenMapping, mapping: {
|
||||
generated: Pos;
|
||||
source: string;
|
||||
original: Pos;
|
||||
name: string;
|
||||
content?: string | null;
|
||||
}): void;
|
||||
/**
|
||||
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||
* not add a segment with a lower generated line/column than one that came before.
|
||||
*/
|
||||
export declare const maybeAddSegment: typeof addSegment;
|
||||
/**
|
||||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||
* not add a mapping with a lower generated line/column than one that came before.
|
||||
*/
|
||||
export declare const maybeAddMapping: typeof addMapping;
|
||||
/**
|
||||
* Adds/removes the content of the source file to the source map.
|
||||
*/
|
||||
export declare function setSourceContent(map: GenMapping, source: string, content: string | null): void;
|
||||
export declare function setIgnore(map: GenMapping, source: string, ignore?: boolean): void;
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare function toDecodedMap(map: GenMapping): DecodedSourceMap;
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare function toEncodedMap(map: GenMapping): EncodedSourceMap;
|
||||
/**
|
||||
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||
*/
|
||||
export declare function fromMap(input: SourceMapInput): GenMapping;
|
||||
/**
|
||||
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||
* passed to the `source-map` library.
|
||||
*/
|
||||
export declare function allMappings(map: GenMapping): Mapping[];
|
||||
12
node_modules/@jridgewell/gen-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
12
node_modules/@jridgewell/gen-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
declare type GeneratedColumn = number;
|
||||
declare type SourcesIndex = number;
|
||||
declare type SourceLine = number;
|
||||
declare type SourceColumn = number;
|
||||
declare type NamesIndex = number;
|
||||
export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||
export declare const COLUMN = 0;
|
||||
export declare const SOURCES_INDEX = 1;
|
||||
export declare const SOURCE_LINE = 2;
|
||||
export declare const SOURCE_COLUMN = 3;
|
||||
export declare const NAMES_INDEX = 4;
|
||||
export {};
|
||||
36
node_modules/@jridgewell/gen-mapping/dist/types/types.d.ts
generated
vendored
Normal file
36
node_modules/@jridgewell/gen-mapping/dist/types/types.d.ts
generated
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
export interface SourceMapV3 {
|
||||
file?: string | null;
|
||||
names: readonly string[];
|
||||
sourceRoot?: string;
|
||||
sources: readonly (string | null)[];
|
||||
sourcesContent?: readonly (string | null)[];
|
||||
version: 3;
|
||||
ignoreList?: readonly number[];
|
||||
}
|
||||
export interface EncodedSourceMap extends SourceMapV3 {
|
||||
mappings: string;
|
||||
}
|
||||
export interface DecodedSourceMap extends SourceMapV3 {
|
||||
mappings: readonly SourceMapSegment[][];
|
||||
}
|
||||
export interface Pos {
|
||||
line: number;
|
||||
column: number;
|
||||
}
|
||||
export declare type Mapping = {
|
||||
generated: Pos;
|
||||
source: undefined;
|
||||
original: undefined;
|
||||
name: undefined;
|
||||
} | {
|
||||
generated: Pos;
|
||||
source: string;
|
||||
original: Pos;
|
||||
name: string;
|
||||
} | {
|
||||
generated: Pos;
|
||||
source: string;
|
||||
original: Pos;
|
||||
name: undefined;
|
||||
};
|
||||
80
node_modules/@jridgewell/gen-mapping/package.json
generated
vendored
Normal file
80
node_modules/@jridgewell/gen-mapping/package.json
generated
vendored
Normal file
@ -0,0 +1,80 @@
|
||||
{
|
||||
"name": "@jridgewell/gen-mapping",
|
||||
"version": "0.3.8",
|
||||
"description": "Generate source maps",
|
||||
"keywords": [
|
||||
"source",
|
||||
"map"
|
||||
],
|
||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/jridgewell/gen-mapping",
|
||||
"main": "dist/gen-mapping.umd.js",
|
||||
"module": "dist/gen-mapping.mjs",
|
||||
"types": "dist/types/gen-mapping.d.ts",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/gen-mapping.d.ts",
|
||||
"browser": "./dist/gen-mapping.umd.js",
|
||||
"require": "./dist/gen-mapping.umd.js",
|
||||
"import": "./dist/gen-mapping.mjs"
|
||||
},
|
||||
"./dist/gen-mapping.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"benchmark": "run-s build:rollup benchmark:*",
|
||||
"benchmark:install": "cd benchmark && npm install",
|
||||
"benchmark:only": "node benchmark/index.mjs",
|
||||
"prebuild": "rm -rf dist",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "c8 mocha",
|
||||
"test:watch": "mocha --watch",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.3.2",
|
||||
"@types/mocha": "9.1.1",
|
||||
"@types/node": "17.0.29",
|
||||
"@typescript-eslint/eslint-plugin": "5.21.0",
|
||||
"@typescript-eslint/parser": "5.21.0",
|
||||
"benchmark": "2.1.4",
|
||||
"c8": "7.11.2",
|
||||
"eslint": "8.14.0",
|
||||
"eslint-config-prettier": "8.5.0",
|
||||
"mocha": "9.2.2",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.6.2",
|
||||
"rollup": "2.70.2",
|
||||
"tsx": "4.7.1",
|
||||
"typescript": "4.6.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@jridgewell/set-array": "^1.2.1",
|
||||
"@jridgewell/sourcemap-codec": "^1.4.10",
|
||||
"@jridgewell/trace-mapping": "^0.3.24"
|
||||
}
|
||||
|
||||
,"_resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz"
|
||||
,"_integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA=="
|
||||
,"_from": "@jridgewell/gen-mapping@^0.3.5"
|
||||
}
|
||||
19
node_modules/@jridgewell/resolve-uri/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/resolve-uri/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
Copyright 2019 Justin Ridgewell <jridgewell@google.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
40
node_modules/@jridgewell/resolve-uri/README.md
generated
vendored
Normal file
40
node_modules/@jridgewell/resolve-uri/README.md
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
# @jridgewell/resolve-uri
|
||||
|
||||
> Resolve a URI relative to an optional base URI
|
||||
|
||||
Resolve any combination of absolute URIs, protocol-realtive URIs, absolute paths, or relative paths.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @jridgewell/resolve-uri
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
function resolve(input: string, base?: string): string;
|
||||
```
|
||||
|
||||
```js
|
||||
import resolve from '@jridgewell/resolve-uri';
|
||||
|
||||
resolve('foo', 'https://example.com'); // => 'https://example.com/foo'
|
||||
```
|
||||
|
||||
| Input | Base | Resolution | Explanation |
|
||||
|-----------------------|-------------------------|--------------------------------|--------------------------------------------------------------|
|
||||
| `https://example.com` | _any_ | `https://example.com/` | Input is normalized only |
|
||||
| `//example.com` | `https://base.com/` | `https://example.com/` | Input inherits the base's protocol |
|
||||
| `//example.com` | _rest_ | `//example.com/` | Input is normalized only |
|
||||
| `/example` | `https://base.com/` | `https://base.com/example` | Input inherits the base's origin |
|
||||
| `/example` | `//base.com/` | `//base.com/example` | Input inherits the base's host and remains protocol relative |
|
||||
| `/example` | _rest_ | `/example` | Input is normalized only |
|
||||
| `example` | `https://base.com/dir/` | `https://base.com/dir/example` | Input is joined with the base |
|
||||
| `example` | `https://base.com/file` | `https://base.com/example` | Input is joined with the base without its file |
|
||||
| `example` | `//base.com/dir/` | `//base.com/dir/example` | Input is joined with the base's last directory |
|
||||
| `example` | `//base.com/file` | `//base.com/example` | Input is joined with the base without its file |
|
||||
| `example` | `/base/dir/` | `/base/dir/example` | Input is joined with the base's last directory |
|
||||
| `example` | `/base/file` | `/base/example` | Input is joined with the base without its file |
|
||||
| `example` | `base/dir/` | `base/dir/example` | Input is joined with the base's last directory |
|
||||
| `example` | `base/file` | `base/example` | Input is joined with the base without its file |
|
||||
232
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs
generated
vendored
Normal file
232
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs
generated
vendored
Normal file
@ -0,0 +1,232 @@
|
||||
// Matches the scheme of a URL, eg "http://"
|
||||
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||
/**
|
||||
* Matches the parts of a URL:
|
||||
* 1. Scheme, including ":", guaranteed.
|
||||
* 2. User/password, including "@", optional.
|
||||
* 3. Host, guaranteed.
|
||||
* 4. Port, including ":", optional.
|
||||
* 5. Path, including "/", optional.
|
||||
* 6. Query, including "?", optional.
|
||||
* 7. Hash, including "#", optional.
|
||||
*/
|
||||
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
||||
/**
|
||||
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
||||
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
||||
*
|
||||
* 1. Host, optional.
|
||||
* 2. Path, which may include "/", guaranteed.
|
||||
* 3. Query, including "?", optional.
|
||||
* 4. Hash, including "#", optional.
|
||||
*/
|
||||
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
||||
function isAbsoluteUrl(input) {
|
||||
return schemeRegex.test(input);
|
||||
}
|
||||
function isSchemeRelativeUrl(input) {
|
||||
return input.startsWith('//');
|
||||
}
|
||||
function isAbsolutePath(input) {
|
||||
return input.startsWith('/');
|
||||
}
|
||||
function isFileUrl(input) {
|
||||
return input.startsWith('file:');
|
||||
}
|
||||
function isRelative(input) {
|
||||
return /^[.?#]/.test(input);
|
||||
}
|
||||
function parseAbsoluteUrl(input) {
|
||||
const match = urlRegex.exec(input);
|
||||
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
||||
}
|
||||
function parseFileUrl(input) {
|
||||
const match = fileRegex.exec(input);
|
||||
const path = match[2];
|
||||
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
||||
}
|
||||
function makeUrl(scheme, user, host, port, path, query, hash) {
|
||||
return {
|
||||
scheme,
|
||||
user,
|
||||
host,
|
||||
port,
|
||||
path,
|
||||
query,
|
||||
hash,
|
||||
type: 7 /* Absolute */,
|
||||
};
|
||||
}
|
||||
function parseUrl(input) {
|
||||
if (isSchemeRelativeUrl(input)) {
|
||||
const url = parseAbsoluteUrl('http:' + input);
|
||||
url.scheme = '';
|
||||
url.type = 6 /* SchemeRelative */;
|
||||
return url;
|
||||
}
|
||||
if (isAbsolutePath(input)) {
|
||||
const url = parseAbsoluteUrl('http://foo.com' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = 5 /* AbsolutePath */;
|
||||
return url;
|
||||
}
|
||||
if (isFileUrl(input))
|
||||
return parseFileUrl(input);
|
||||
if (isAbsoluteUrl(input))
|
||||
return parseAbsoluteUrl(input);
|
||||
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = input
|
||||
? input.startsWith('?')
|
||||
? 3 /* Query */
|
||||
: input.startsWith('#')
|
||||
? 2 /* Hash */
|
||||
: 4 /* RelativePath */
|
||||
: 1 /* Empty */;
|
||||
return url;
|
||||
}
|
||||
function stripPathFilename(path) {
|
||||
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
||||
// paths. It's not a file, so we can't strip it.
|
||||
if (path.endsWith('/..'))
|
||||
return path;
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
function mergePaths(url, base) {
|
||||
normalizePath(base, base.type);
|
||||
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
||||
// path).
|
||||
if (url.path === '/') {
|
||||
url.path = base.path;
|
||||
}
|
||||
else {
|
||||
// Resolution happens relative to the base path's directory, not the file.
|
||||
url.path = stripPathFilename(base.path) + url.path;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
||||
* "foo/.". We need to normalize to a standard representation.
|
||||
*/
|
||||
function normalizePath(url, type) {
|
||||
const rel = type <= 4 /* RelativePath */;
|
||||
const pieces = url.path.split('/');
|
||||
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
||||
// pieces[0] is an empty string.
|
||||
let pointer = 1;
|
||||
// Positive is the number of real directories we've output, used for popping a parent directory.
|
||||
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
||||
let positive = 0;
|
||||
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
||||
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
||||
// real directory, we won't need to append, unless the other conditions happen again.
|
||||
let addTrailingSlash = false;
|
||||
for (let i = 1; i < pieces.length; i++) {
|
||||
const piece = pieces[i];
|
||||
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
||||
if (!piece) {
|
||||
addTrailingSlash = true;
|
||||
continue;
|
||||
}
|
||||
// If we encounter a real directory, then we don't need to append anymore.
|
||||
addTrailingSlash = false;
|
||||
// A current directory, which we can always drop.
|
||||
if (piece === '.')
|
||||
continue;
|
||||
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
||||
// have an excess of parents, and we'll need to keep the "..".
|
||||
if (piece === '..') {
|
||||
if (positive) {
|
||||
addTrailingSlash = true;
|
||||
positive--;
|
||||
pointer--;
|
||||
}
|
||||
else if (rel) {
|
||||
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
||||
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
||||
pieces[pointer++] = piece;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
||||
// any popped or dropped directories.
|
||||
pieces[pointer++] = piece;
|
||||
positive++;
|
||||
}
|
||||
let path = '';
|
||||
for (let i = 1; i < pointer; i++) {
|
||||
path += '/' + pieces[i];
|
||||
}
|
||||
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
||||
path += '/';
|
||||
}
|
||||
url.path = path;
|
||||
}
|
||||
/**
|
||||
* Attempts to resolve `input` URL/path relative to `base`.
|
||||
*/
|
||||
function resolve(input, base) {
|
||||
if (!input && !base)
|
||||
return '';
|
||||
const url = parseUrl(input);
|
||||
let inputType = url.type;
|
||||
if (base && inputType !== 7 /* Absolute */) {
|
||||
const baseUrl = parseUrl(base);
|
||||
const baseType = baseUrl.type;
|
||||
switch (inputType) {
|
||||
case 1 /* Empty */:
|
||||
url.hash = baseUrl.hash;
|
||||
// fall through
|
||||
case 2 /* Hash */:
|
||||
url.query = baseUrl.query;
|
||||
// fall through
|
||||
case 3 /* Query */:
|
||||
case 4 /* RelativePath */:
|
||||
mergePaths(url, baseUrl);
|
||||
// fall through
|
||||
case 5 /* AbsolutePath */:
|
||||
// The host, user, and port are joined, you can't copy one without the others.
|
||||
url.user = baseUrl.user;
|
||||
url.host = baseUrl.host;
|
||||
url.port = baseUrl.port;
|
||||
// fall through
|
||||
case 6 /* SchemeRelative */:
|
||||
// The input doesn't have a schema at least, so we need to copy at least that over.
|
||||
url.scheme = baseUrl.scheme;
|
||||
}
|
||||
if (baseType > inputType)
|
||||
inputType = baseType;
|
||||
}
|
||||
normalizePath(url, inputType);
|
||||
const queryHash = url.query + url.hash;
|
||||
switch (inputType) {
|
||||
// This is impossible, because of the empty checks at the start of the function.
|
||||
// case UrlType.Empty:
|
||||
case 2 /* Hash */:
|
||||
case 3 /* Query */:
|
||||
return queryHash;
|
||||
case 4 /* RelativePath */: {
|
||||
// The first char is always a "/", and we need it to be relative.
|
||||
const path = url.path.slice(1);
|
||||
if (!path)
|
||||
return queryHash || '.';
|
||||
if (isRelative(base || input) && !isRelative(path)) {
|
||||
// If base started with a leading ".", or there is no base and input started with a ".",
|
||||
// then we need to ensure that the relative path starts with a ".". We don't know if
|
||||
// relative starts with a "..", though, so check before prepending.
|
||||
return './' + path + queryHash;
|
||||
}
|
||||
return path + queryHash;
|
||||
}
|
||||
case 5 /* AbsolutePath */:
|
||||
return url.path + queryHash;
|
||||
default:
|
||||
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
||||
}
|
||||
}
|
||||
|
||||
export { resolve as default };
|
||||
//# sourceMappingURL=resolve-uri.mjs.map
|
||||
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
240
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js
generated
vendored
Normal file
240
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js
generated
vendored
Normal file
@ -0,0 +1,240 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
|
||||
typeof define === 'function' && define.amd ? define(factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.resolveURI = factory());
|
||||
})(this, (function () { 'use strict';
|
||||
|
||||
// Matches the scheme of a URL, eg "http://"
|
||||
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||
/**
|
||||
* Matches the parts of a URL:
|
||||
* 1. Scheme, including ":", guaranteed.
|
||||
* 2. User/password, including "@", optional.
|
||||
* 3. Host, guaranteed.
|
||||
* 4. Port, including ":", optional.
|
||||
* 5. Path, including "/", optional.
|
||||
* 6. Query, including "?", optional.
|
||||
* 7. Hash, including "#", optional.
|
||||
*/
|
||||
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
||||
/**
|
||||
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
||||
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
||||
*
|
||||
* 1. Host, optional.
|
||||
* 2. Path, which may include "/", guaranteed.
|
||||
* 3. Query, including "?", optional.
|
||||
* 4. Hash, including "#", optional.
|
||||
*/
|
||||
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
||||
function isAbsoluteUrl(input) {
|
||||
return schemeRegex.test(input);
|
||||
}
|
||||
function isSchemeRelativeUrl(input) {
|
||||
return input.startsWith('//');
|
||||
}
|
||||
function isAbsolutePath(input) {
|
||||
return input.startsWith('/');
|
||||
}
|
||||
function isFileUrl(input) {
|
||||
return input.startsWith('file:');
|
||||
}
|
||||
function isRelative(input) {
|
||||
return /^[.?#]/.test(input);
|
||||
}
|
||||
function parseAbsoluteUrl(input) {
|
||||
const match = urlRegex.exec(input);
|
||||
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
||||
}
|
||||
function parseFileUrl(input) {
|
||||
const match = fileRegex.exec(input);
|
||||
const path = match[2];
|
||||
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
||||
}
|
||||
function makeUrl(scheme, user, host, port, path, query, hash) {
|
||||
return {
|
||||
scheme,
|
||||
user,
|
||||
host,
|
||||
port,
|
||||
path,
|
||||
query,
|
||||
hash,
|
||||
type: 7 /* Absolute */,
|
||||
};
|
||||
}
|
||||
function parseUrl(input) {
|
||||
if (isSchemeRelativeUrl(input)) {
|
||||
const url = parseAbsoluteUrl('http:' + input);
|
||||
url.scheme = '';
|
||||
url.type = 6 /* SchemeRelative */;
|
||||
return url;
|
||||
}
|
||||
if (isAbsolutePath(input)) {
|
||||
const url = parseAbsoluteUrl('http://foo.com' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = 5 /* AbsolutePath */;
|
||||
return url;
|
||||
}
|
||||
if (isFileUrl(input))
|
||||
return parseFileUrl(input);
|
||||
if (isAbsoluteUrl(input))
|
||||
return parseAbsoluteUrl(input);
|
||||
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = input
|
||||
? input.startsWith('?')
|
||||
? 3 /* Query */
|
||||
: input.startsWith('#')
|
||||
? 2 /* Hash */
|
||||
: 4 /* RelativePath */
|
||||
: 1 /* Empty */;
|
||||
return url;
|
||||
}
|
||||
function stripPathFilename(path) {
|
||||
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
||||
// paths. It's not a file, so we can't strip it.
|
||||
if (path.endsWith('/..'))
|
||||
return path;
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
function mergePaths(url, base) {
|
||||
normalizePath(base, base.type);
|
||||
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
||||
// path).
|
||||
if (url.path === '/') {
|
||||
url.path = base.path;
|
||||
}
|
||||
else {
|
||||
// Resolution happens relative to the base path's directory, not the file.
|
||||
url.path = stripPathFilename(base.path) + url.path;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
||||
* "foo/.". We need to normalize to a standard representation.
|
||||
*/
|
||||
function normalizePath(url, type) {
|
||||
const rel = type <= 4 /* RelativePath */;
|
||||
const pieces = url.path.split('/');
|
||||
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
||||
// pieces[0] is an empty string.
|
||||
let pointer = 1;
|
||||
// Positive is the number of real directories we've output, used for popping a parent directory.
|
||||
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
||||
let positive = 0;
|
||||
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
||||
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
||||
// real directory, we won't need to append, unless the other conditions happen again.
|
||||
let addTrailingSlash = false;
|
||||
for (let i = 1; i < pieces.length; i++) {
|
||||
const piece = pieces[i];
|
||||
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
||||
if (!piece) {
|
||||
addTrailingSlash = true;
|
||||
continue;
|
||||
}
|
||||
// If we encounter a real directory, then we don't need to append anymore.
|
||||
addTrailingSlash = false;
|
||||
// A current directory, which we can always drop.
|
||||
if (piece === '.')
|
||||
continue;
|
||||
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
||||
// have an excess of parents, and we'll need to keep the "..".
|
||||
if (piece === '..') {
|
||||
if (positive) {
|
||||
addTrailingSlash = true;
|
||||
positive--;
|
||||
pointer--;
|
||||
}
|
||||
else if (rel) {
|
||||
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
||||
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
||||
pieces[pointer++] = piece;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
||||
// any popped or dropped directories.
|
||||
pieces[pointer++] = piece;
|
||||
positive++;
|
||||
}
|
||||
let path = '';
|
||||
for (let i = 1; i < pointer; i++) {
|
||||
path += '/' + pieces[i];
|
||||
}
|
||||
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
||||
path += '/';
|
||||
}
|
||||
url.path = path;
|
||||
}
|
||||
/**
|
||||
* Attempts to resolve `input` URL/path relative to `base`.
|
||||
*/
|
||||
function resolve(input, base) {
|
||||
if (!input && !base)
|
||||
return '';
|
||||
const url = parseUrl(input);
|
||||
let inputType = url.type;
|
||||
if (base && inputType !== 7 /* Absolute */) {
|
||||
const baseUrl = parseUrl(base);
|
||||
const baseType = baseUrl.type;
|
||||
switch (inputType) {
|
||||
case 1 /* Empty */:
|
||||
url.hash = baseUrl.hash;
|
||||
// fall through
|
||||
case 2 /* Hash */:
|
||||
url.query = baseUrl.query;
|
||||
// fall through
|
||||
case 3 /* Query */:
|
||||
case 4 /* RelativePath */:
|
||||
mergePaths(url, baseUrl);
|
||||
// fall through
|
||||
case 5 /* AbsolutePath */:
|
||||
// The host, user, and port are joined, you can't copy one without the others.
|
||||
url.user = baseUrl.user;
|
||||
url.host = baseUrl.host;
|
||||
url.port = baseUrl.port;
|
||||
// fall through
|
||||
case 6 /* SchemeRelative */:
|
||||
// The input doesn't have a schema at least, so we need to copy at least that over.
|
||||
url.scheme = baseUrl.scheme;
|
||||
}
|
||||
if (baseType > inputType)
|
||||
inputType = baseType;
|
||||
}
|
||||
normalizePath(url, inputType);
|
||||
const queryHash = url.query + url.hash;
|
||||
switch (inputType) {
|
||||
// This is impossible, because of the empty checks at the start of the function.
|
||||
// case UrlType.Empty:
|
||||
case 2 /* Hash */:
|
||||
case 3 /* Query */:
|
||||
return queryHash;
|
||||
case 4 /* RelativePath */: {
|
||||
// The first char is always a "/", and we need it to be relative.
|
||||
const path = url.path.slice(1);
|
||||
if (!path)
|
||||
return queryHash || '.';
|
||||
if (isRelative(base || input) && !isRelative(path)) {
|
||||
// If base started with a leading ".", or there is no base and input started with a ".",
|
||||
// then we need to ensure that the relative path starts with a ".". We don't know if
|
||||
// relative starts with a "..", though, so check before prepending.
|
||||
return './' + path + queryHash;
|
||||
}
|
||||
return path + queryHash;
|
||||
}
|
||||
case 5 /* AbsolutePath */:
|
||||
return url.path + queryHash;
|
||||
default:
|
||||
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
||||
}
|
||||
}
|
||||
|
||||
return resolve;
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=resolve-uri.umd.js.map
|
||||
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
4
node_modules/@jridgewell/resolve-uri/dist/types/resolve-uri.d.ts
generated
vendored
Normal file
4
node_modules/@jridgewell/resolve-uri/dist/types/resolve-uri.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
/**
|
||||
* Attempts to resolve `input` URL/path relative to `base`.
|
||||
*/
|
||||
export default function resolve(input: string, base: string | undefined): string;
|
||||
73
node_modules/@jridgewell/resolve-uri/package.json
generated
vendored
Normal file
73
node_modules/@jridgewell/resolve-uri/package.json
generated
vendored
Normal file
@ -0,0 +1,73 @@
|
||||
{
|
||||
"name": "@jridgewell/resolve-uri",
|
||||
"version": "3.1.2",
|
||||
"description": "Resolve a URI relative to an optional base URI",
|
||||
"keywords": [
|
||||
"resolve",
|
||||
"uri",
|
||||
"url",
|
||||
"path"
|
||||
],
|
||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/jridgewell/resolve-uri",
|
||||
"main": "dist/resolve-uri.umd.js",
|
||||
"module": "dist/resolve-uri.mjs",
|
||||
"types": "dist/types/resolve-uri.d.ts",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/resolve-uri.d.ts",
|
||||
"browser": "./dist/resolve-uri.umd.js",
|
||||
"require": "./dist/resolve-uri.umd.js",
|
||||
"import": "./dist/resolve-uri.mjs"
|
||||
},
|
||||
"./dist/resolve-uri.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"prebuild": "rm -rf dist",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"pretest": "run-s build:rollup",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "mocha",
|
||||
"test:coverage": "c8 mocha",
|
||||
"test:watch": "mocha --watch",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@jridgewell/resolve-uri-latest": "npm:@jridgewell/resolve-uri@*",
|
||||
"@rollup/plugin-typescript": "8.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||
"@typescript-eslint/parser": "5.10.0",
|
||||
"c8": "7.11.0",
|
||||
"eslint": "8.7.0",
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"mocha": "9.2.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.5.1",
|
||||
"rollup": "2.66.0",
|
||||
"typescript": "4.5.5"
|
||||
}
|
||||
|
||||
,"_resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz"
|
||||
,"_integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="
|
||||
,"_from": "@jridgewell/resolve-uri@^3.1.0"
|
||||
}
|
||||
19
node_modules/@jridgewell/set-array/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/set-array/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
Copyright 2022 Justin Ridgewell <jridgewell@google.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
37
node_modules/@jridgewell/set-array/README.md
generated
vendored
Normal file
37
node_modules/@jridgewell/set-array/README.md
generated
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
# @jridgewell/set-array
|
||||
|
||||
> Like a Set, but provides the index of the `key` in the backing array
|
||||
|
||||
This is designed to allow synchronizing a second array with the contents of the backing array, like
|
||||
how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, and there
|
||||
are never duplicates.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @jridgewell/set-array
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
import { SetArray, get, put, pop } from '@jridgewell/set-array';
|
||||
|
||||
const sa = new SetArray();
|
||||
|
||||
let index = put(sa, 'first');
|
||||
assert.strictEqual(index, 0);
|
||||
|
||||
index = put(sa, 'second');
|
||||
assert.strictEqual(index, 1);
|
||||
|
||||
assert.deepEqual(sa.array, [ 'first', 'second' ]);
|
||||
|
||||
index = get(sa, 'first');
|
||||
assert.strictEqual(index, 0);
|
||||
|
||||
pop(sa);
|
||||
index = get(sa, 'second');
|
||||
assert.strictEqual(index, undefined);
|
||||
assert.deepEqual(sa.array, [ 'first' ]);
|
||||
```
|
||||
69
node_modules/@jridgewell/set-array/dist/set-array.mjs
generated
vendored
Normal file
69
node_modules/@jridgewell/set-array/dist/set-array.mjs
generated
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
/**
|
||||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||
* index of the `key` in the backing array.
|
||||
*
|
||||
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||
* and there are never duplicates.
|
||||
*/
|
||||
class SetArray {
|
||||
constructor() {
|
||||
this._indexes = { __proto__: null };
|
||||
this.array = [];
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the set into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(set) {
|
||||
return set;
|
||||
}
|
||||
/**
|
||||
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||
*/
|
||||
function get(setarr, key) {
|
||||
return cast(setarr)._indexes[key];
|
||||
}
|
||||
/**
|
||||
* Puts `key` into the backing array, if it is not already present. Returns
|
||||
* the index of the `key` in the backing array.
|
||||
*/
|
||||
function put(setarr, key) {
|
||||
// The key may or may not be present. If it is present, it's a number.
|
||||
const index = get(setarr, key);
|
||||
if (index !== undefined)
|
||||
return index;
|
||||
const { array, _indexes: indexes } = cast(setarr);
|
||||
const length = array.push(key);
|
||||
return (indexes[key] = length - 1);
|
||||
}
|
||||
/**
|
||||
* Pops the last added item out of the SetArray.
|
||||
*/
|
||||
function pop(setarr) {
|
||||
const { array, _indexes: indexes } = cast(setarr);
|
||||
if (array.length === 0)
|
||||
return;
|
||||
const last = array.pop();
|
||||
indexes[last] = undefined;
|
||||
}
|
||||
/**
|
||||
* Removes the key, if it exists in the set.
|
||||
*/
|
||||
function remove(setarr, key) {
|
||||
const index = get(setarr, key);
|
||||
if (index === undefined)
|
||||
return;
|
||||
const { array, _indexes: indexes } = cast(setarr);
|
||||
for (let i = index + 1; i < array.length; i++) {
|
||||
const k = array[i];
|
||||
array[i - 1] = k;
|
||||
indexes[k]--;
|
||||
}
|
||||
indexes[key] = undefined;
|
||||
array.pop();
|
||||
}
|
||||
|
||||
export { SetArray, get, pop, put, remove };
|
||||
//# sourceMappingURL=set-array.mjs.map
|
||||
1
node_modules/@jridgewell/set-array/dist/set-array.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/set-array/dist/set-array.mjs.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"set-array.mjs","sources":["../src/set-array.ts"],"sourcesContent":["type Key = string | number | symbol;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray<T extends Key = Key> {\n private declare _indexes: Record<T, number | undefined>;\n declare array: readonly T[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n}\n\ninterface PublicSet<T extends Key> {\n array: T[];\n _indexes: SetArray<T>['_indexes'];\n}\n\n/**\n * Typescript doesn't allow friend access to private fields, so this just casts the set into a type\n * with public access modifiers.\n */\nfunction cast<T extends Key>(set: SetArray<T>): PublicSet<T> {\n return set as any;\n}\n\n/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined {\n return cast(setarr)._indexes[key];\n}\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport function put<T extends Key>(setarr: SetArray<T>, key: T): number {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(setarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = cast(setarr);\n\n const length = array.push(key);\n return (indexes[key] = length - 1);\n}\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport function pop<T extends Key>(setarr: SetArray<T>): void {\n const { array, _indexes: indexes } = cast(setarr);\n if (array.length === 0) return;\n\n const last = array.pop()!;\n indexes[last] = undefined;\n}\n\n/**\n * Removes the key, if it exists in the set.\n */\nexport function remove<T extends Key>(setarr: SetArray<T>, key: T): void {\n const index = get(setarr, key);\n if (index === undefined) return;\n\n const { array, _indexes: indexes } = cast(setarr);\n for (let i = index + 1; i < array.length; i++) {\n const k = array[i];\n array[i - 1] = k;\n indexes[k]!--;\n }\n indexes[key] = undefined;\n array.pop();\n}\n"],"names":[],"mappings":"AAEA;;;;;;;;MAQa,QAAQ;IAInB;QACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;QAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;KACjB;CACF;AAOD;;;;AAIA,SAAS,IAAI,CAAgB,GAAgB;IAC3C,OAAO,GAAU,CAAC;AACpB,CAAC;AAED;;;SAGgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;IAC5D,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;AACpC,CAAC;AAED;;;;SAIgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;;IAE5D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,KAAK,SAAS;QAAE,OAAO,KAAK,CAAC;IAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;IAElD,MAAM,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/B,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,GAAG,CAAC,EAAE;AACrC,CAAC;AAED;;;SAGgB,GAAG,CAAgB,MAAmB;IACpD,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;IAClD,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO;IAE/B,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,EAAG,CAAC;IAC1B,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;AAC5B,CAAC;AAED;;;SAGgB,MAAM,CAAgB,MAAmB,EAAE,GAAM;IAC/D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,KAAK,SAAS;QAAE,OAAO;IAEhC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;IAClD,KAAK,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QAC7C,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QACnB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;QACjB,OAAO,CAAC,CAAC,CAAE,EAAE,CAAC;KACf;IACD,OAAO,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC;IACzB,KAAK,CAAC,GAAG,EAAE,CAAC;AACd;;;;"}
|
||||
83
node_modules/@jridgewell/set-array/dist/set-array.umd.js
generated
vendored
Normal file
83
node_modules/@jridgewell/set-array/dist/set-array.umd.js
generated
vendored
Normal file
@ -0,0 +1,83 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.setArray = {}));
|
||||
})(this, (function (exports) { 'use strict';
|
||||
|
||||
/**
|
||||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||
* index of the `key` in the backing array.
|
||||
*
|
||||
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||
* and there are never duplicates.
|
||||
*/
|
||||
class SetArray {
|
||||
constructor() {
|
||||
this._indexes = { __proto__: null };
|
||||
this.array = [];
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the set into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(set) {
|
||||
return set;
|
||||
}
|
||||
/**
|
||||
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||
*/
|
||||
function get(setarr, key) {
|
||||
return cast(setarr)._indexes[key];
|
||||
}
|
||||
/**
|
||||
* Puts `key` into the backing array, if it is not already present. Returns
|
||||
* the index of the `key` in the backing array.
|
||||
*/
|
||||
function put(setarr, key) {
|
||||
// The key may or may not be present. If it is present, it's a number.
|
||||
const index = get(setarr, key);
|
||||
if (index !== undefined)
|
||||
return index;
|
||||
const { array, _indexes: indexes } = cast(setarr);
|
||||
const length = array.push(key);
|
||||
return (indexes[key] = length - 1);
|
||||
}
|
||||
/**
|
||||
* Pops the last added item out of the SetArray.
|
||||
*/
|
||||
function pop(setarr) {
|
||||
const { array, _indexes: indexes } = cast(setarr);
|
||||
if (array.length === 0)
|
||||
return;
|
||||
const last = array.pop();
|
||||
indexes[last] = undefined;
|
||||
}
|
||||
/**
|
||||
* Removes the key, if it exists in the set.
|
||||
*/
|
||||
function remove(setarr, key) {
|
||||
const index = get(setarr, key);
|
||||
if (index === undefined)
|
||||
return;
|
||||
const { array, _indexes: indexes } = cast(setarr);
|
||||
for (let i = index + 1; i < array.length; i++) {
|
||||
const k = array[i];
|
||||
array[i - 1] = k;
|
||||
indexes[k]--;
|
||||
}
|
||||
indexes[key] = undefined;
|
||||
array.pop();
|
||||
}
|
||||
|
||||
exports.SetArray = SetArray;
|
||||
exports.get = get;
|
||||
exports.pop = pop;
|
||||
exports.put = put;
|
||||
exports.remove = remove;
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=set-array.umd.js.map
|
||||
1
node_modules/@jridgewell/set-array/dist/set-array.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/set-array/dist/set-array.umd.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"set-array.umd.js","sources":["../src/set-array.ts"],"sourcesContent":["type Key = string | number | symbol;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray<T extends Key = Key> {\n private declare _indexes: Record<T, number | undefined>;\n declare array: readonly T[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n}\n\ninterface PublicSet<T extends Key> {\n array: T[];\n _indexes: SetArray<T>['_indexes'];\n}\n\n/**\n * Typescript doesn't allow friend access to private fields, so this just casts the set into a type\n * with public access modifiers.\n */\nfunction cast<T extends Key>(set: SetArray<T>): PublicSet<T> {\n return set as any;\n}\n\n/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined {\n return cast(setarr)._indexes[key];\n}\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport function put<T extends Key>(setarr: SetArray<T>, key: T): number {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(setarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = cast(setarr);\n\n const length = array.push(key);\n return (indexes[key] = length - 1);\n}\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport function pop<T extends Key>(setarr: SetArray<T>): void {\n const { array, _indexes: indexes } = cast(setarr);\n if (array.length === 0) return;\n\n const last = array.pop()!;\n indexes[last] = undefined;\n}\n\n/**\n * Removes the key, if it exists in the set.\n */\nexport function remove<T extends Key>(setarr: SetArray<T>, key: T): void {\n const index = get(setarr, key);\n if (index === undefined) return;\n\n const { array, _indexes: indexes } = cast(setarr);\n for (let i = index + 1; i < array.length; i++) {\n const k = array[i];\n array[i - 1] = k;\n indexes[k]!--;\n }\n indexes[key] = undefined;\n array.pop();\n}\n"],"names":[],"mappings":";;;;;;IAEA;;;;;;;;UAQa,QAAQ;QAInB;YACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;YAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;SACjB;KACF;IAOD;;;;IAIA,SAAS,IAAI,CAAgB,GAAgB;QAC3C,OAAO,GAAU,CAAC;IACpB,CAAC;IAED;;;aAGgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;QAC5D,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;IACpC,CAAC;IAED;;;;aAIgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;;QAE5D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAC/B,IAAI,KAAK,KAAK,SAAS;YAAE,OAAO,KAAK,CAAC;QAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAC/B,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,GAAG,CAAC,EAAE;IACrC,CAAC;IAED;;;aAGgB,GAAG,CAAgB,MAAmB;QACpD,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;QAClD,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO;QAE/B,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,EAAG,CAAC;QAC1B,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;IAC5B,CAAC;IAED;;;aAGgB,MAAM,CAAgB,MAAmB,EAAE,GAAM;QAC/D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAC/B,IAAI,KAAK,KAAK,SAAS;YAAE,OAAO;QAEhC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC7C,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;YACnB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;YACjB,OAAO,CAAC,CAAC,CAAE,EAAE,CAAC;SACf;QACD,OAAO,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC;QACzB,KAAK,CAAC,GAAG,EAAE,CAAC;IACd;;;;;;;;;;;;;;"}
|
||||
32
node_modules/@jridgewell/set-array/dist/types/set-array.d.ts
generated
vendored
Normal file
32
node_modules/@jridgewell/set-array/dist/types/set-array.d.ts
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
declare type Key = string | number | symbol;
|
||||
/**
|
||||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||
* index of the `key` in the backing array.
|
||||
*
|
||||
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||
* and there are never duplicates.
|
||||
*/
|
||||
export declare class SetArray<T extends Key = Key> {
|
||||
private _indexes;
|
||||
array: readonly T[];
|
||||
constructor();
|
||||
}
|
||||
/**
|
||||
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||
*/
|
||||
export declare function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined;
|
||||
/**
|
||||
* Puts `key` into the backing array, if it is not already present. Returns
|
||||
* the index of the `key` in the backing array.
|
||||
*/
|
||||
export declare function put<T extends Key>(setarr: SetArray<T>, key: T): number;
|
||||
/**
|
||||
* Pops the last added item out of the SetArray.
|
||||
*/
|
||||
export declare function pop<T extends Key>(setarr: SetArray<T>): void;
|
||||
/**
|
||||
* Removes the key, if it exists in the set.
|
||||
*/
|
||||
export declare function remove<T extends Key>(setarr: SetArray<T>, key: T): void;
|
||||
export {};
|
||||
69
node_modules/@jridgewell/set-array/package.json
generated
vendored
Normal file
69
node_modules/@jridgewell/set-array/package.json
generated
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
{
|
||||
"name": "@jridgewell/set-array",
|
||||
"version": "1.2.1",
|
||||
"description": "Like a Set, but provides the index of the `key` in the backing array",
|
||||
"keywords": [],
|
||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/jridgewell/set-array",
|
||||
"main": "dist/set-array.umd.js",
|
||||
"module": "dist/set-array.mjs",
|
||||
"typings": "dist/types/set-array.d.ts",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/set-array.d.ts",
|
||||
"browser": "./dist/set-array.umd.js",
|
||||
"require": "./dist/set-array.umd.js",
|
||||
"import": "./dist/set-array.mjs"
|
||||
},
|
||||
"./dist/set-array.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"prebuild": "rm -rf dist",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "mocha",
|
||||
"test:coverage": "c8 mocha",
|
||||
"test:watch": "mocha --watch",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.3.0",
|
||||
"@types/mocha": "9.1.1",
|
||||
"@types/node": "17.0.29",
|
||||
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||
"@typescript-eslint/parser": "5.10.0",
|
||||
"c8": "7.11.0",
|
||||
"eslint": "8.7.0",
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"mocha": "9.2.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.5.1",
|
||||
"rollup": "2.66.0",
|
||||
"tsx": "4.7.1",
|
||||
"typescript": "4.5.5"
|
||||
}
|
||||
|
||||
,"_resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz"
|
||||
,"_integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A=="
|
||||
,"_from": "@jridgewell/set-array@^1.2.1"
|
||||
}
|
||||
19
node_modules/@jridgewell/source-map/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/source-map/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
Copyright 2019 Justin Ridgewell <jridgewell@google.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
184
node_modules/@jridgewell/source-map/README.md
generated
vendored
Normal file
184
node_modules/@jridgewell/source-map/README.md
generated
vendored
Normal file
@ -0,0 +1,184 @@
|
||||
# @jridgewell/source-map
|
||||
|
||||
> Packages `@jridgewell/trace-mapping` and `@jridgewell/gen-mapping` into the familiar source-map API
|
||||
|
||||
This isn't the full API, but it's the core functionality. This wraps
|
||||
[@jridgewell/trace-mapping][trace-mapping] and [@jridgewell/gen-mapping][gen-mapping]
|
||||
implementations.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @jridgewell/source-map
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
TODO
|
||||
|
||||
### SourceMapConsumer
|
||||
|
||||
```typescript
|
||||
import { SourceMapConsumer } from '@jridgewell/source-map';
|
||||
const smc = new SourceMapConsumer({
|
||||
version: 3,
|
||||
names: ['foo'],
|
||||
sources: ['input.js'],
|
||||
mappings: 'AAAAA',
|
||||
});
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.fromSourceMap(mapGenerator[, mapUrl])
|
||||
|
||||
Transforms a `SourceMapGenerator` into a `SourceMapConsumer`.
|
||||
|
||||
```typescript
|
||||
const smg = new SourceMapGenerator();
|
||||
|
||||
const smc = SourceMapConsumer.fromSourceMap(map);
|
||||
smc.originalPositionFor({ line: 1, column: 0 });
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition)
|
||||
|
||||
```typescript
|
||||
const smc = new SourceMapConsumer(map);
|
||||
smc.originalPositionFor({ line: 1, column: 0 });
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.mappings
|
||||
|
||||
```typescript
|
||||
const smc = new SourceMapConsumer(map);
|
||||
smc.mappings; // AAAA
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)
|
||||
|
||||
```typescript
|
||||
const smc = new SourceMapConsumer(map);
|
||||
smc.allGeneratedpositionsfor({ line: 1, column: 5, source: "baz.ts" });
|
||||
// [
|
||||
// { line: 2, column: 8 }
|
||||
// ]
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.eachMapping(callback[, context[, order]])
|
||||
|
||||
> This implementation currently does not support the "order" parameter.
|
||||
> This function can only iterate in Generated order.
|
||||
|
||||
```typescript
|
||||
const smc = new SourceMapConsumer(map);
|
||||
smc.eachMapping((mapping) => {
|
||||
// { source: 'baz.ts',
|
||||
// generatedLine: 4,
|
||||
// generatedColumn: 5,
|
||||
// originalLine: 4,
|
||||
// originalColumn: 5,
|
||||
// name: null }
|
||||
});
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition)
|
||||
|
||||
```typescript
|
||||
const smc = new SourceMapConsumer(map);
|
||||
smc.generatedPositionFor({ line: 1, column: 5, source: "baz.ts" });
|
||||
// { line: 2, column: 8 }
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.hasContentsOfAllSources()
|
||||
|
||||
```typescript
|
||||
const smc = new SourceMapConsumer(map);
|
||||
smc.hasContentsOfAllSources();
|
||||
// true
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing])
|
||||
|
||||
```typescript
|
||||
const smc = new SourceMapConsumer(map);
|
||||
smc.generatedPositionFor("baz.ts");
|
||||
// "export default ..."
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.version
|
||||
|
||||
Returns the source map's version
|
||||
|
||||
### SourceMapGenerator
|
||||
|
||||
```typescript
|
||||
import { SourceMapGenerator } from '@jridgewell/source-map';
|
||||
const smg = new SourceMapGenerator({
|
||||
file: 'output.js',
|
||||
sourceRoot: 'https://example.com/',
|
||||
});
|
||||
```
|
||||
|
||||
#### SourceMapGenerator.fromSourceMap(map)
|
||||
|
||||
Transform a `SourceMapConsumer` into a `SourceMapGenerator`.
|
||||
|
||||
```typescript
|
||||
const smc = new SourceMapConsumer();
|
||||
const smg = SourceMapGenerator.fromSourceMap(smc);
|
||||
```
|
||||
|
||||
#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])
|
||||
|
||||
> This method is not implemented yet
|
||||
|
||||
#### SourceMapGenerator.prototype.addMapping(mapping)
|
||||
|
||||
```typescript
|
||||
const smg = new SourceMapGenerator();
|
||||
smg.addMapping({
|
||||
generated: { line: 1, column: 0 },
|
||||
source: 'input.js',
|
||||
original: { line: 1, column: 0 },
|
||||
name: 'foo',
|
||||
});
|
||||
```
|
||||
|
||||
#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)
|
||||
|
||||
```typescript
|
||||
const smg = new SourceMapGenerator();
|
||||
smg.setSourceContent('input.js', 'foobar');
|
||||
```
|
||||
|
||||
#### SourceMapGenerator.prototype.toJSON()
|
||||
|
||||
```typescript
|
||||
const smg = new SourceMapGenerator();
|
||||
smg.toJSON(); // { version: 3, names: [], sources: [], mappings: '' }
|
||||
```
|
||||
|
||||
#### SourceMapGenerator.prototype.toString()
|
||||
|
||||
```typescript
|
||||
const smg = new SourceMapGenerator();
|
||||
smg.toJSON(); // "{version:3,names:[],sources:[],mappings:''}"
|
||||
```
|
||||
|
||||
#### SourceMapGenerator.prototype.toDecodedMap()
|
||||
|
||||
```typescript
|
||||
const smg = new SourceMapGenerator();
|
||||
smg.toDecodedMap(); // { version: 3, names: [], sources: [], mappings: [] }
|
||||
```
|
||||
|
||||
## Known differences with other implementations
|
||||
|
||||
This implementation has some differences with `source-map` and `source-map-js`.
|
||||
|
||||
- `SourceMapConsumer.prototype.eachMapping()`
|
||||
- Does not support the `order` argument
|
||||
- `SourceMapGenerator.prototype.applySourceMap()`
|
||||
- Not implemented
|
||||
|
||||
[trace-mapping]: https://github.com/jridgewell/trace-mapping/
|
||||
[gen-mapping]: https://github.com/jridgewell/gen-mapping/
|
||||
95
node_modules/@jridgewell/source-map/dist/source-map.cjs
generated
vendored
Normal file
95
node_modules/@jridgewell/source-map/dist/source-map.cjs
generated
vendored
Normal file
@ -0,0 +1,95 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
var traceMapping = require('@jridgewell/trace-mapping');
|
||||
var genMapping = require('@jridgewell/gen-mapping');
|
||||
|
||||
class SourceMapConsumer {
|
||||
constructor(map, mapUrl) {
|
||||
const trace = (this._map = new traceMapping.AnyMap(map, mapUrl));
|
||||
this.file = trace.file;
|
||||
this.names = trace.names;
|
||||
this.sourceRoot = trace.sourceRoot;
|
||||
this.sources = trace.resolvedSources;
|
||||
this.sourcesContent = trace.sourcesContent;
|
||||
this.version = trace.version;
|
||||
}
|
||||
static fromSourceMap(map, mapUrl) {
|
||||
// This is more performant if we receive
|
||||
// a @jridgewell/source-map SourceMapGenerator
|
||||
if (map.toDecodedMap) {
|
||||
return new SourceMapConsumer(map.toDecodedMap(), mapUrl);
|
||||
}
|
||||
// This is a fallback for `source-map` and `source-map-js`
|
||||
return new SourceMapConsumer(map.toJSON(), mapUrl);
|
||||
}
|
||||
get mappings() {
|
||||
return traceMapping.encodedMappings(this._map);
|
||||
}
|
||||
originalPositionFor(needle) {
|
||||
return traceMapping.originalPositionFor(this._map, needle);
|
||||
}
|
||||
generatedPositionFor(originalPosition) {
|
||||
return traceMapping.generatedPositionFor(this._map, originalPosition);
|
||||
}
|
||||
allGeneratedPositionsFor(originalPosition) {
|
||||
return traceMapping.allGeneratedPositionsFor(this._map, originalPosition);
|
||||
}
|
||||
hasContentsOfAllSources() {
|
||||
if (!this.sourcesContent || this.sourcesContent.length !== this.sources.length) {
|
||||
return false;
|
||||
}
|
||||
for (const content of this.sourcesContent) {
|
||||
if (content == null) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
sourceContentFor(source, nullOnMissing) {
|
||||
const sourceContent = traceMapping.sourceContentFor(this._map, source);
|
||||
if (sourceContent != null) {
|
||||
return sourceContent;
|
||||
}
|
||||
if (nullOnMissing) {
|
||||
return null;
|
||||
}
|
||||
throw new Error(`"${source}" is not in the SourceMap.`);
|
||||
}
|
||||
eachMapping(callback, context /*, order?: number*/) {
|
||||
// order is ignored as @jridgewell/trace-map doesn't implement it
|
||||
traceMapping.eachMapping(this._map, context ? callback.bind(context) : callback);
|
||||
}
|
||||
destroy() {
|
||||
// noop.
|
||||
}
|
||||
}
|
||||
class SourceMapGenerator {
|
||||
constructor(opts) {
|
||||
// TODO :: should this be duck-typed ?
|
||||
this._map = opts instanceof genMapping.GenMapping ? opts : new genMapping.GenMapping(opts);
|
||||
}
|
||||
static fromSourceMap(consumer) {
|
||||
return new SourceMapGenerator(genMapping.fromMap(consumer));
|
||||
}
|
||||
addMapping(mapping) {
|
||||
genMapping.maybeAddMapping(this._map, mapping);
|
||||
}
|
||||
setSourceContent(source, content) {
|
||||
genMapping.setSourceContent(this._map, source, content);
|
||||
}
|
||||
toJSON() {
|
||||
return genMapping.toEncodedMap(this._map);
|
||||
}
|
||||
toString() {
|
||||
return JSON.stringify(this.toJSON());
|
||||
}
|
||||
toDecodedMap() {
|
||||
return genMapping.toDecodedMap(this._map);
|
||||
}
|
||||
}
|
||||
|
||||
exports.SourceMapConsumer = SourceMapConsumer;
|
||||
exports.SourceMapGenerator = SourceMapGenerator;
|
||||
//# sourceMappingURL=source-map.cjs.map
|
||||
1
node_modules/@jridgewell/source-map/dist/source-map.cjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/source-map/dist/source-map.cjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
90
node_modules/@jridgewell/source-map/dist/source-map.mjs
generated
vendored
Normal file
90
node_modules/@jridgewell/source-map/dist/source-map.mjs
generated
vendored
Normal file
@ -0,0 +1,90 @@
|
||||
import { AnyMap, encodedMappings, originalPositionFor, generatedPositionFor, allGeneratedPositionsFor, sourceContentFor, eachMapping } from '@jridgewell/trace-mapping';
|
||||
import { GenMapping, fromMap, maybeAddMapping, setSourceContent, toEncodedMap, toDecodedMap } from '@jridgewell/gen-mapping';
|
||||
|
||||
class SourceMapConsumer {
|
||||
constructor(map, mapUrl) {
|
||||
const trace = (this._map = new AnyMap(map, mapUrl));
|
||||
this.file = trace.file;
|
||||
this.names = trace.names;
|
||||
this.sourceRoot = trace.sourceRoot;
|
||||
this.sources = trace.resolvedSources;
|
||||
this.sourcesContent = trace.sourcesContent;
|
||||
this.version = trace.version;
|
||||
}
|
||||
static fromSourceMap(map, mapUrl) {
|
||||
// This is more performant if we receive
|
||||
// a @jridgewell/source-map SourceMapGenerator
|
||||
if (map.toDecodedMap) {
|
||||
return new SourceMapConsumer(map.toDecodedMap(), mapUrl);
|
||||
}
|
||||
// This is a fallback for `source-map` and `source-map-js`
|
||||
return new SourceMapConsumer(map.toJSON(), mapUrl);
|
||||
}
|
||||
get mappings() {
|
||||
return encodedMappings(this._map);
|
||||
}
|
||||
originalPositionFor(needle) {
|
||||
return originalPositionFor(this._map, needle);
|
||||
}
|
||||
generatedPositionFor(originalPosition) {
|
||||
return generatedPositionFor(this._map, originalPosition);
|
||||
}
|
||||
allGeneratedPositionsFor(originalPosition) {
|
||||
return allGeneratedPositionsFor(this._map, originalPosition);
|
||||
}
|
||||
hasContentsOfAllSources() {
|
||||
if (!this.sourcesContent || this.sourcesContent.length !== this.sources.length) {
|
||||
return false;
|
||||
}
|
||||
for (const content of this.sourcesContent) {
|
||||
if (content == null) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
sourceContentFor(source, nullOnMissing) {
|
||||
const sourceContent = sourceContentFor(this._map, source);
|
||||
if (sourceContent != null) {
|
||||
return sourceContent;
|
||||
}
|
||||
if (nullOnMissing) {
|
||||
return null;
|
||||
}
|
||||
throw new Error(`"${source}" is not in the SourceMap.`);
|
||||
}
|
||||
eachMapping(callback, context /*, order?: number*/) {
|
||||
// order is ignored as @jridgewell/trace-map doesn't implement it
|
||||
eachMapping(this._map, context ? callback.bind(context) : callback);
|
||||
}
|
||||
destroy() {
|
||||
// noop.
|
||||
}
|
||||
}
|
||||
class SourceMapGenerator {
|
||||
constructor(opts) {
|
||||
// TODO :: should this be duck-typed ?
|
||||
this._map = opts instanceof GenMapping ? opts : new GenMapping(opts);
|
||||
}
|
||||
static fromSourceMap(consumer) {
|
||||
return new SourceMapGenerator(fromMap(consumer));
|
||||
}
|
||||
addMapping(mapping) {
|
||||
maybeAddMapping(this._map, mapping);
|
||||
}
|
||||
setSourceContent(source, content) {
|
||||
setSourceContent(this._map, source, content);
|
||||
}
|
||||
toJSON() {
|
||||
return toEncodedMap(this._map);
|
||||
}
|
||||
toString() {
|
||||
return JSON.stringify(this.toJSON());
|
||||
}
|
||||
toDecodedMap() {
|
||||
return toDecodedMap(this._map);
|
||||
}
|
||||
}
|
||||
|
||||
export { SourceMapConsumer, SourceMapGenerator };
|
||||
//# sourceMappingURL=source-map.mjs.map
|
||||
1
node_modules/@jridgewell/source-map/dist/source-map.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/source-map/dist/source-map.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1242
node_modules/@jridgewell/source-map/dist/source-map.umd.js
generated
vendored
Normal file
1242
node_modules/@jridgewell/source-map/dist/source-map.umd.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
node_modules/@jridgewell/source-map/dist/source-map.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/source-map/dist/source-map.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
35
node_modules/@jridgewell/source-map/dist/types/source-map.d.ts
generated
vendored
Normal file
35
node_modules/@jridgewell/source-map/dist/types/source-map.d.ts
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
import { AnyMap, originalPositionFor, generatedPositionFor, eachMapping } from '@jridgewell/trace-mapping';
|
||||
import { GenMapping, maybeAddMapping, toDecodedMap, toEncodedMap, setSourceContent } from '@jridgewell/gen-mapping';
|
||||
import type { TraceMap, SourceMapInput, SectionedSourceMapInput, DecodedSourceMap } from '@jridgewell/trace-mapping';
|
||||
export type { TraceMap, SourceMapInput, SectionedSourceMapInput, DecodedSourceMap };
|
||||
import type { Mapping, EncodedSourceMap } from '@jridgewell/gen-mapping';
|
||||
export type { Mapping, EncodedSourceMap };
|
||||
export declare class SourceMapConsumer {
|
||||
private _map;
|
||||
file: TraceMap['file'];
|
||||
names: TraceMap['names'];
|
||||
sourceRoot: TraceMap['sourceRoot'];
|
||||
sources: TraceMap['sources'];
|
||||
sourcesContent: TraceMap['sourcesContent'];
|
||||
version: TraceMap['version'];
|
||||
constructor(map: ConstructorParameters<typeof AnyMap>[0], mapUrl: Parameters<typeof AnyMap>[1]);
|
||||
static fromSourceMap(map: SourceMapGenerator, mapUrl: Parameters<typeof AnyMap>[1]): SourceMapConsumer;
|
||||
get mappings(): string;
|
||||
originalPositionFor(needle: Parameters<typeof originalPositionFor>[1]): ReturnType<typeof originalPositionFor>;
|
||||
generatedPositionFor(originalPosition: Parameters<typeof generatedPositionFor>[1]): ReturnType<typeof generatedPositionFor>;
|
||||
allGeneratedPositionsFor(originalPosition: Parameters<typeof generatedPositionFor>[1]): ReturnType<typeof generatedPositionFor>[];
|
||||
hasContentsOfAllSources(): boolean;
|
||||
sourceContentFor(source: string, nullOnMissing?: boolean): string | null;
|
||||
eachMapping(callback: Parameters<typeof eachMapping>[1], context?: any): void;
|
||||
destroy(): void;
|
||||
}
|
||||
export declare class SourceMapGenerator {
|
||||
private _map;
|
||||
constructor(opts: ConstructorParameters<typeof GenMapping>[0] | GenMapping);
|
||||
static fromSourceMap(consumer: SourceMapConsumer): SourceMapGenerator;
|
||||
addMapping(mapping: Parameters<typeof maybeAddMapping>[1]): ReturnType<typeof maybeAddMapping>;
|
||||
setSourceContent(source: Parameters<typeof setSourceContent>[1], content: Parameters<typeof setSourceContent>[2]): ReturnType<typeof setSourceContent>;
|
||||
toJSON(): ReturnType<typeof toEncodedMap>;
|
||||
toString(): string;
|
||||
toDecodedMap(): ReturnType<typeof toDecodedMap>;
|
||||
}
|
||||
75
node_modules/@jridgewell/source-map/package.json
generated
vendored
Normal file
75
node_modules/@jridgewell/source-map/package.json
generated
vendored
Normal file
@ -0,0 +1,75 @@
|
||||
{
|
||||
"name": "@jridgewell/source-map",
|
||||
"version": "0.3.6",
|
||||
"description": "Packages @jridgewell/trace-mapping and @jridgewell/gen-mapping into the familiar source-map API",
|
||||
"keywords": [
|
||||
"sourcemap",
|
||||
"source",
|
||||
"map"
|
||||
],
|
||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/jridgewell/source-map",
|
||||
"main": "dist/source-map.cjs",
|
||||
"module": "dist/source-map.mjs",
|
||||
"types": "dist/types/source-map.d.ts",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/source-map.d.ts",
|
||||
"browser": "./dist/source-map.umd.js",
|
||||
"require": "./dist/source-map.cjs",
|
||||
"import": "./dist/source-map.mjs"
|
||||
},
|
||||
"./dist/source-map.cjs"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"prebuild": "rm -rf dist",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "ts-mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "ts-mocha",
|
||||
"test:coverage": "c8 --reporter text --reporter html ts-mocha",
|
||||
"test:watch": "ts-mocha --watch",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-node-resolve": "13.2.1",
|
||||
"@rollup/plugin-typescript": "8.3.0",
|
||||
"@types/mocha": "9.1.1",
|
||||
"@types/node": "17.0.30",
|
||||
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||
"@typescript-eslint/parser": "5.10.0",
|
||||
"c8": "7.11.0",
|
||||
"eslint": "8.7.0",
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"mocha": "10.0.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.5.1",
|
||||
"rollup": "2.66.0",
|
||||
"ts-mocha": "10.0.0",
|
||||
"typescript": "4.5.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"@jridgewell/gen-mapping": "^0.3.5",
|
||||
"@jridgewell/trace-mapping": "^0.3.25"
|
||||
}
|
||||
|
||||
,"_resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.6.tgz"
|
||||
,"_integrity": "sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ=="
|
||||
,"_from": "@jridgewell/source-map@^0.3.3"
|
||||
}
|
||||
21
node_modules/@jridgewell/sourcemap-codec/LICENSE
generated
vendored
Normal file
21
node_modules/@jridgewell/sourcemap-codec/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2015 Rich Harris
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
264
node_modules/@jridgewell/sourcemap-codec/README.md
generated
vendored
Normal file
264
node_modules/@jridgewell/sourcemap-codec/README.md
generated
vendored
Normal file
@ -0,0 +1,264 @@
|
||||
# @jridgewell/sourcemap-codec
|
||||
|
||||
Encode/decode the `mappings` property of a [sourcemap](https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit).
|
||||
|
||||
|
||||
## Why?
|
||||
|
||||
Sourcemaps are difficult to generate and manipulate, because the `mappings` property – the part that actually links the generated code back to the original source – is encoded using an obscure method called [Variable-length quantity](https://en.wikipedia.org/wiki/Variable-length_quantity). On top of that, each segment in the mapping contains offsets rather than absolute indices, which means that you can't look at a segment in isolation – you have to understand the whole sourcemap.
|
||||
|
||||
This package makes the process slightly easier.
|
||||
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @jridgewell/sourcemap-codec
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||
|
||||
var decoded = decode( ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
||||
|
||||
assert.deepEqual( decoded, [
|
||||
// the first line (of the generated code) has no mappings,
|
||||
// as shown by the starting semi-colon (which separates lines)
|
||||
[],
|
||||
|
||||
// the second line contains four (comma-separated) segments
|
||||
[
|
||||
// segments are encoded as you'd expect:
|
||||
// [ generatedCodeColumn, sourceIndex, sourceCodeLine, sourceCodeColumn, nameIndex ]
|
||||
|
||||
// i.e. the first segment begins at column 2, and maps back to the second column
|
||||
// of the second line (both zero-based) of the 0th source, and uses the 0th
|
||||
// name in the `map.names` array
|
||||
[ 2, 0, 2, 2, 0 ],
|
||||
|
||||
// the remaining segments are 4-length rather than 5-length,
|
||||
// because they don't map a name
|
||||
[ 4, 0, 2, 4 ],
|
||||
[ 6, 0, 2, 5 ],
|
||||
[ 7, 0, 2, 7 ]
|
||||
],
|
||||
|
||||
// the final line contains two segments
|
||||
[
|
||||
[ 2, 1, 10, 19 ],
|
||||
[ 12, 1, 11, 20 ]
|
||||
]
|
||||
]);
|
||||
|
||||
var encoded = encode( decoded );
|
||||
assert.equal( encoded, ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
```
|
||||
node v20.10.0
|
||||
|
||||
amp.js.map - 45120 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 5815135 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 5868160 bytes
|
||||
sourcemap-codec 5492584 bytes
|
||||
source-map-0.6.1 13569984 bytes
|
||||
source-map-0.8.0 6390584 bytes
|
||||
chrome dev tools 8011136 bytes
|
||||
Smallest memory usage is sourcemap-codec
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 492 ops/sec ±1.22% (90 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 499 ops/sec ±1.16% (89 runs sampled)
|
||||
decode: sourcemap-codec x 376 ops/sec ±1.66% (89 runs sampled)
|
||||
decode: source-map-0.6.1 x 34.99 ops/sec ±0.94% (48 runs sampled)
|
||||
decode: source-map-0.8.0 x 351 ops/sec ±0.07% (95 runs sampled)
|
||||
chrome dev tools x 165 ops/sec ±0.91% (86 runs sampled)
|
||||
Fastest is decode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 444248 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 623024 bytes
|
||||
sourcemap-codec 8696280 bytes
|
||||
source-map-0.6.1 8745176 bytes
|
||||
source-map-0.8.0 8736624 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 796 ops/sec ±0.11% (97 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 795 ops/sec ±0.25% (98 runs sampled)
|
||||
encode: sourcemap-codec x 231 ops/sec ±0.83% (86 runs sampled)
|
||||
encode: source-map-0.6.1 x 166 ops/sec ±0.57% (86 runs sampled)
|
||||
encode: source-map-0.8.0 x 203 ops/sec ±0.45% (88 runs sampled)
|
||||
Fastest is encode: local code,encode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
babel.min.js.map - 347793 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 35424960 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 35424696 bytes
|
||||
sourcemap-codec 36033464 bytes
|
||||
source-map-0.6.1 62253704 bytes
|
||||
source-map-0.8.0 43843920 bytes
|
||||
chrome dev tools 45111400 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 38.18 ops/sec ±5.44% (52 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 38.36 ops/sec ±5.02% (52 runs sampled)
|
||||
decode: sourcemap-codec x 34.05 ops/sec ±4.45% (47 runs sampled)
|
||||
decode: source-map-0.6.1 x 4.31 ops/sec ±2.76% (15 runs sampled)
|
||||
decode: source-map-0.8.0 x 55.60 ops/sec ±0.13% (73 runs sampled)
|
||||
chrome dev tools x 16.94 ops/sec ±3.78% (46 runs sampled)
|
||||
Fastest is decode: source-map-0.8.0
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 2606016 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 2626440 bytes
|
||||
sourcemap-codec 21152576 bytes
|
||||
source-map-0.6.1 25023928 bytes
|
||||
source-map-0.8.0 25256448 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 127 ops/sec ±0.18% (83 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 128 ops/sec ±0.26% (83 runs sampled)
|
||||
encode: sourcemap-codec x 29.31 ops/sec ±2.55% (53 runs sampled)
|
||||
encode: source-map-0.6.1 x 18.85 ops/sec ±3.19% (36 runs sampled)
|
||||
encode: source-map-0.8.0 x 19.34 ops/sec ±1.97% (36 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
preact.js.map - 1992 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 261696 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 244296 bytes
|
||||
sourcemap-codec 302816 bytes
|
||||
source-map-0.6.1 939176 bytes
|
||||
source-map-0.8.0 336 bytes
|
||||
chrome dev tools 587368 bytes
|
||||
Smallest memory usage is source-map-0.8.0
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 17,782 ops/sec ±0.32% (97 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 17,863 ops/sec ±0.40% (100 runs sampled)
|
||||
decode: sourcemap-codec x 12,453 ops/sec ±0.27% (101 runs sampled)
|
||||
decode: source-map-0.6.1 x 1,288 ops/sec ±1.05% (96 runs sampled)
|
||||
decode: source-map-0.8.0 x 9,289 ops/sec ±0.27% (101 runs sampled)
|
||||
chrome dev tools x 4,769 ops/sec ±0.18% (100 runs sampled)
|
||||
Fastest is decode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 262944 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 25544 bytes
|
||||
sourcemap-codec 323048 bytes
|
||||
source-map-0.6.1 507808 bytes
|
||||
source-map-0.8.0 507480 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 24,207 ops/sec ±0.79% (95 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 24,288 ops/sec ±0.48% (96 runs sampled)
|
||||
encode: sourcemap-codec x 6,761 ops/sec ±0.21% (100 runs sampled)
|
||||
encode: source-map-0.6.1 x 5,374 ops/sec ±0.17% (99 runs sampled)
|
||||
encode: source-map-0.8.0 x 5,633 ops/sec ±0.32% (99 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec 1.4.15,encode: local code
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
react.js.map - 5726 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 678816 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 678816 bytes
|
||||
sourcemap-codec 816400 bytes
|
||||
source-map-0.6.1 2288864 bytes
|
||||
source-map-0.8.0 721360 bytes
|
||||
chrome dev tools 1012512 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 6,178 ops/sec ±0.19% (98 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 6,261 ops/sec ±0.22% (100 runs sampled)
|
||||
decode: sourcemap-codec x 4,472 ops/sec ±0.90% (99 runs sampled)
|
||||
decode: source-map-0.6.1 x 449 ops/sec ±0.31% (95 runs sampled)
|
||||
decode: source-map-0.8.0 x 3,219 ops/sec ±0.13% (100 runs sampled)
|
||||
chrome dev tools x 1,743 ops/sec ±0.20% (99 runs sampled)
|
||||
Fastest is decode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 140960 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 159808 bytes
|
||||
sourcemap-codec 969304 bytes
|
||||
source-map-0.6.1 930520 bytes
|
||||
source-map-0.8.0 930248 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 8,013 ops/sec ±0.19% (100 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 7,989 ops/sec ±0.20% (101 runs sampled)
|
||||
encode: sourcemap-codec x 2,472 ops/sec ±0.21% (99 runs sampled)
|
||||
encode: source-map-0.6.1 x 2,200 ops/sec ±0.17% (99 runs sampled)
|
||||
encode: source-map-0.8.0 x 2,220 ops/sec ±0.37% (99 runs sampled)
|
||||
Fastest is encode: local code
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
vscode.map - 2141001 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 198955264 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 199175352 bytes
|
||||
sourcemap-codec 199102688 bytes
|
||||
source-map-0.6.1 386323432 bytes
|
||||
source-map-0.8.0 244116432 bytes
|
||||
chrome dev tools 293734280 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 3.90 ops/sec ±22.21% (15 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 3.95 ops/sec ±23.53% (15 runs sampled)
|
||||
decode: sourcemap-codec x 3.82 ops/sec ±17.94% (14 runs sampled)
|
||||
decode: source-map-0.6.1 x 0.61 ops/sec ±7.81% (6 runs sampled)
|
||||
decode: source-map-0.8.0 x 9.54 ops/sec ±0.28% (28 runs sampled)
|
||||
chrome dev tools x 2.18 ops/sec ±10.58% (10 runs sampled)
|
||||
Fastest is decode: source-map-0.8.0
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 13509880 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 13537648 bytes
|
||||
sourcemap-codec 32540104 bytes
|
||||
source-map-0.6.1 127531040 bytes
|
||||
source-map-0.8.0 127535312 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 20.10 ops/sec ±0.19% (38 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 20.26 ops/sec ±0.32% (38 runs sampled)
|
||||
encode: sourcemap-codec x 5.44 ops/sec ±1.64% (18 runs sampled)
|
||||
encode: source-map-0.6.1 x 2.30 ops/sec ±4.79% (10 runs sampled)
|
||||
encode: source-map-0.8.0 x 2.46 ops/sec ±6.53% (10 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec 1.4.15
|
||||
```
|
||||
|
||||
# License
|
||||
|
||||
MIT
|
||||
424
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
Normal file
424
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
Normal file
@ -0,0 +1,424 @@
|
||||
const comma = ','.charCodeAt(0);
|
||||
const semicolon = ';'.charCodeAt(0);
|
||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||
for (let i = 0; i < chars.length; i++) {
|
||||
const c = chars.charCodeAt(i);
|
||||
intToChar[i] = c;
|
||||
charToInt[c] = i;
|
||||
}
|
||||
function decodeInteger(reader, relative) {
|
||||
let value = 0;
|
||||
let shift = 0;
|
||||
let integer = 0;
|
||||
do {
|
||||
const c = reader.next();
|
||||
integer = charToInt[c];
|
||||
value |= (integer & 31) << shift;
|
||||
shift += 5;
|
||||
} while (integer & 32);
|
||||
const shouldNegate = value & 1;
|
||||
value >>>= 1;
|
||||
if (shouldNegate) {
|
||||
value = -0x80000000 | -value;
|
||||
}
|
||||
return relative + value;
|
||||
}
|
||||
function encodeInteger(builder, num, relative) {
|
||||
let delta = num - relative;
|
||||
delta = delta < 0 ? (-delta << 1) | 1 : delta << 1;
|
||||
do {
|
||||
let clamped = delta & 0b011111;
|
||||
delta >>>= 5;
|
||||
if (delta > 0)
|
||||
clamped |= 0b100000;
|
||||
builder.write(intToChar[clamped]);
|
||||
} while (delta > 0);
|
||||
return num;
|
||||
}
|
||||
function hasMoreVlq(reader, max) {
|
||||
if (reader.pos >= max)
|
||||
return false;
|
||||
return reader.peek() !== comma;
|
||||
}
|
||||
|
||||
const bufLength = 1024 * 16;
|
||||
// Provide a fallback for older environments.
|
||||
const td = typeof TextDecoder !== 'undefined'
|
||||
? /* #__PURE__ */ new TextDecoder()
|
||||
: typeof Buffer !== 'undefined'
|
||||
? {
|
||||
decode(buf) {
|
||||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||
return out.toString();
|
||||
},
|
||||
}
|
||||
: {
|
||||
decode(buf) {
|
||||
let out = '';
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
out += String.fromCharCode(buf[i]);
|
||||
}
|
||||
return out;
|
||||
},
|
||||
};
|
||||
class StringWriter {
|
||||
constructor() {
|
||||
this.pos = 0;
|
||||
this.out = '';
|
||||
this.buffer = new Uint8Array(bufLength);
|
||||
}
|
||||
write(v) {
|
||||
const { buffer } = this;
|
||||
buffer[this.pos++] = v;
|
||||
if (this.pos === bufLength) {
|
||||
this.out += td.decode(buffer);
|
||||
this.pos = 0;
|
||||
}
|
||||
}
|
||||
flush() {
|
||||
const { buffer, out, pos } = this;
|
||||
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
|
||||
}
|
||||
}
|
||||
class StringReader {
|
||||
constructor(buffer) {
|
||||
this.pos = 0;
|
||||
this.buffer = buffer;
|
||||
}
|
||||
next() {
|
||||
return this.buffer.charCodeAt(this.pos++);
|
||||
}
|
||||
peek() {
|
||||
return this.buffer.charCodeAt(this.pos);
|
||||
}
|
||||
indexOf(char) {
|
||||
const { buffer, pos } = this;
|
||||
const idx = buffer.indexOf(char, pos);
|
||||
return idx === -1 ? buffer.length : idx;
|
||||
}
|
||||
}
|
||||
|
||||
const EMPTY = [];
|
||||
function decodeOriginalScopes(input) {
|
||||
const { length } = input;
|
||||
const reader = new StringReader(input);
|
||||
const scopes = [];
|
||||
const stack = [];
|
||||
let line = 0;
|
||||
for (; reader.pos < length; reader.pos++) {
|
||||
line = decodeInteger(reader, line);
|
||||
const column = decodeInteger(reader, 0);
|
||||
if (!hasMoreVlq(reader, length)) {
|
||||
const last = stack.pop();
|
||||
last[2] = line;
|
||||
last[3] = column;
|
||||
continue;
|
||||
}
|
||||
const kind = decodeInteger(reader, 0);
|
||||
const fields = decodeInteger(reader, 0);
|
||||
const hasName = fields & 0b0001;
|
||||
const scope = (hasName ? [line, column, 0, 0, kind, decodeInteger(reader, 0)] : [line, column, 0, 0, kind]);
|
||||
let vars = EMPTY;
|
||||
if (hasMoreVlq(reader, length)) {
|
||||
vars = [];
|
||||
do {
|
||||
const varsIndex = decodeInteger(reader, 0);
|
||||
vars.push(varsIndex);
|
||||
} while (hasMoreVlq(reader, length));
|
||||
}
|
||||
scope.vars = vars;
|
||||
scopes.push(scope);
|
||||
stack.push(scope);
|
||||
}
|
||||
return scopes;
|
||||
}
|
||||
function encodeOriginalScopes(scopes) {
|
||||
const writer = new StringWriter();
|
||||
for (let i = 0; i < scopes.length;) {
|
||||
i = _encodeOriginalScopes(scopes, i, writer, [0]);
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
function _encodeOriginalScopes(scopes, index, writer, state) {
|
||||
const scope = scopes[index];
|
||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, 4: kind, vars } = scope;
|
||||
if (index > 0)
|
||||
writer.write(comma);
|
||||
state[0] = encodeInteger(writer, startLine, state[0]);
|
||||
encodeInteger(writer, startColumn, 0);
|
||||
encodeInteger(writer, kind, 0);
|
||||
const fields = scope.length === 6 ? 0b0001 : 0;
|
||||
encodeInteger(writer, fields, 0);
|
||||
if (scope.length === 6)
|
||||
encodeInteger(writer, scope[5], 0);
|
||||
for (const v of vars) {
|
||||
encodeInteger(writer, v, 0);
|
||||
}
|
||||
for (index++; index < scopes.length;) {
|
||||
const next = scopes[index];
|
||||
const { 0: l, 1: c } = next;
|
||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
||||
break;
|
||||
}
|
||||
index = _encodeOriginalScopes(scopes, index, writer, state);
|
||||
}
|
||||
writer.write(comma);
|
||||
state[0] = encodeInteger(writer, endLine, state[0]);
|
||||
encodeInteger(writer, endColumn, 0);
|
||||
return index;
|
||||
}
|
||||
function decodeGeneratedRanges(input) {
|
||||
const { length } = input;
|
||||
const reader = new StringReader(input);
|
||||
const ranges = [];
|
||||
const stack = [];
|
||||
let genLine = 0;
|
||||
let definitionSourcesIndex = 0;
|
||||
let definitionScopeIndex = 0;
|
||||
let callsiteSourcesIndex = 0;
|
||||
let callsiteLine = 0;
|
||||
let callsiteColumn = 0;
|
||||
let bindingLine = 0;
|
||||
let bindingColumn = 0;
|
||||
do {
|
||||
const semi = reader.indexOf(';');
|
||||
let genColumn = 0;
|
||||
for (; reader.pos < semi; reader.pos++) {
|
||||
genColumn = decodeInteger(reader, genColumn);
|
||||
if (!hasMoreVlq(reader, semi)) {
|
||||
const last = stack.pop();
|
||||
last[2] = genLine;
|
||||
last[3] = genColumn;
|
||||
continue;
|
||||
}
|
||||
const fields = decodeInteger(reader, 0);
|
||||
const hasDefinition = fields & 0b0001;
|
||||
const hasCallsite = fields & 0b0010;
|
||||
const hasScope = fields & 0b0100;
|
||||
let callsite = null;
|
||||
let bindings = EMPTY;
|
||||
let range;
|
||||
if (hasDefinition) {
|
||||
const defSourcesIndex = decodeInteger(reader, definitionSourcesIndex);
|
||||
definitionScopeIndex = decodeInteger(reader, definitionSourcesIndex === defSourcesIndex ? definitionScopeIndex : 0);
|
||||
definitionSourcesIndex = defSourcesIndex;
|
||||
range = [genLine, genColumn, 0, 0, defSourcesIndex, definitionScopeIndex];
|
||||
}
|
||||
else {
|
||||
range = [genLine, genColumn, 0, 0];
|
||||
}
|
||||
range.isScope = !!hasScope;
|
||||
if (hasCallsite) {
|
||||
const prevCsi = callsiteSourcesIndex;
|
||||
const prevLine = callsiteLine;
|
||||
callsiteSourcesIndex = decodeInteger(reader, callsiteSourcesIndex);
|
||||
const sameSource = prevCsi === callsiteSourcesIndex;
|
||||
callsiteLine = decodeInteger(reader, sameSource ? callsiteLine : 0);
|
||||
callsiteColumn = decodeInteger(reader, sameSource && prevLine === callsiteLine ? callsiteColumn : 0);
|
||||
callsite = [callsiteSourcesIndex, callsiteLine, callsiteColumn];
|
||||
}
|
||||
range.callsite = callsite;
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
bindings = [];
|
||||
do {
|
||||
bindingLine = genLine;
|
||||
bindingColumn = genColumn;
|
||||
const expressionsCount = decodeInteger(reader, 0);
|
||||
let expressionRanges;
|
||||
if (expressionsCount < -1) {
|
||||
expressionRanges = [[decodeInteger(reader, 0)]];
|
||||
for (let i = -1; i > expressionsCount; i--) {
|
||||
const prevBl = bindingLine;
|
||||
bindingLine = decodeInteger(reader, bindingLine);
|
||||
bindingColumn = decodeInteger(reader, bindingLine === prevBl ? bindingColumn : 0);
|
||||
const expression = decodeInteger(reader, 0);
|
||||
expressionRanges.push([expression, bindingLine, bindingColumn]);
|
||||
}
|
||||
}
|
||||
else {
|
||||
expressionRanges = [[expressionsCount]];
|
||||
}
|
||||
bindings.push(expressionRanges);
|
||||
} while (hasMoreVlq(reader, semi));
|
||||
}
|
||||
range.bindings = bindings;
|
||||
ranges.push(range);
|
||||
stack.push(range);
|
||||
}
|
||||
genLine++;
|
||||
reader.pos = semi + 1;
|
||||
} while (reader.pos < length);
|
||||
return ranges;
|
||||
}
|
||||
function encodeGeneratedRanges(ranges) {
|
||||
if (ranges.length === 0)
|
||||
return '';
|
||||
const writer = new StringWriter();
|
||||
for (let i = 0; i < ranges.length;) {
|
||||
i = _encodeGeneratedRanges(ranges, i, writer, [0, 0, 0, 0, 0, 0, 0]);
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
function _encodeGeneratedRanges(ranges, index, writer, state) {
|
||||
const range = ranges[index];
|
||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, isScope, callsite, bindings, } = range;
|
||||
if (state[0] < startLine) {
|
||||
catchupLine(writer, state[0], startLine);
|
||||
state[0] = startLine;
|
||||
state[1] = 0;
|
||||
}
|
||||
else if (index > 0) {
|
||||
writer.write(comma);
|
||||
}
|
||||
state[1] = encodeInteger(writer, range[1], state[1]);
|
||||
const fields = (range.length === 6 ? 0b0001 : 0) | (callsite ? 0b0010 : 0) | (isScope ? 0b0100 : 0);
|
||||
encodeInteger(writer, fields, 0);
|
||||
if (range.length === 6) {
|
||||
const { 4: sourcesIndex, 5: scopesIndex } = range;
|
||||
if (sourcesIndex !== state[2]) {
|
||||
state[3] = 0;
|
||||
}
|
||||
state[2] = encodeInteger(writer, sourcesIndex, state[2]);
|
||||
state[3] = encodeInteger(writer, scopesIndex, state[3]);
|
||||
}
|
||||
if (callsite) {
|
||||
const { 0: sourcesIndex, 1: callLine, 2: callColumn } = range.callsite;
|
||||
if (sourcesIndex !== state[4]) {
|
||||
state[5] = 0;
|
||||
state[6] = 0;
|
||||
}
|
||||
else if (callLine !== state[5]) {
|
||||
state[6] = 0;
|
||||
}
|
||||
state[4] = encodeInteger(writer, sourcesIndex, state[4]);
|
||||
state[5] = encodeInteger(writer, callLine, state[5]);
|
||||
state[6] = encodeInteger(writer, callColumn, state[6]);
|
||||
}
|
||||
if (bindings) {
|
||||
for (const binding of bindings) {
|
||||
if (binding.length > 1)
|
||||
encodeInteger(writer, -binding.length, 0);
|
||||
const expression = binding[0][0];
|
||||
encodeInteger(writer, expression, 0);
|
||||
let bindingStartLine = startLine;
|
||||
let bindingStartColumn = startColumn;
|
||||
for (let i = 1; i < binding.length; i++) {
|
||||
const expRange = binding[i];
|
||||
bindingStartLine = encodeInteger(writer, expRange[1], bindingStartLine);
|
||||
bindingStartColumn = encodeInteger(writer, expRange[2], bindingStartColumn);
|
||||
encodeInteger(writer, expRange[0], 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (index++; index < ranges.length;) {
|
||||
const next = ranges[index];
|
||||
const { 0: l, 1: c } = next;
|
||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
||||
break;
|
||||
}
|
||||
index = _encodeGeneratedRanges(ranges, index, writer, state);
|
||||
}
|
||||
if (state[0] < endLine) {
|
||||
catchupLine(writer, state[0], endLine);
|
||||
state[0] = endLine;
|
||||
state[1] = 0;
|
||||
}
|
||||
else {
|
||||
writer.write(comma);
|
||||
}
|
||||
state[1] = encodeInteger(writer, endColumn, state[1]);
|
||||
return index;
|
||||
}
|
||||
function catchupLine(writer, lastLine, line) {
|
||||
do {
|
||||
writer.write(semicolon);
|
||||
} while (++lastLine < line);
|
||||
}
|
||||
|
||||
function decode(mappings) {
|
||||
const { length } = mappings;
|
||||
const reader = new StringReader(mappings);
|
||||
const decoded = [];
|
||||
let genColumn = 0;
|
||||
let sourcesIndex = 0;
|
||||
let sourceLine = 0;
|
||||
let sourceColumn = 0;
|
||||
let namesIndex = 0;
|
||||
do {
|
||||
const semi = reader.indexOf(';');
|
||||
const line = [];
|
||||
let sorted = true;
|
||||
let lastCol = 0;
|
||||
genColumn = 0;
|
||||
while (reader.pos < semi) {
|
||||
let seg;
|
||||
genColumn = decodeInteger(reader, genColumn);
|
||||
if (genColumn < lastCol)
|
||||
sorted = false;
|
||||
lastCol = genColumn;
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
sourcesIndex = decodeInteger(reader, sourcesIndex);
|
||||
sourceLine = decodeInteger(reader, sourceLine);
|
||||
sourceColumn = decodeInteger(reader, sourceColumn);
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
namesIndex = decodeInteger(reader, namesIndex);
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
|
||||
}
|
||||
else {
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
|
||||
}
|
||||
}
|
||||
else {
|
||||
seg = [genColumn];
|
||||
}
|
||||
line.push(seg);
|
||||
reader.pos++;
|
||||
}
|
||||
if (!sorted)
|
||||
sort(line);
|
||||
decoded.push(line);
|
||||
reader.pos = semi + 1;
|
||||
} while (reader.pos <= length);
|
||||
return decoded;
|
||||
}
|
||||
function sort(line) {
|
||||
line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[0] - b[0];
|
||||
}
|
||||
function encode(decoded) {
|
||||
const writer = new StringWriter();
|
||||
let sourcesIndex = 0;
|
||||
let sourceLine = 0;
|
||||
let sourceColumn = 0;
|
||||
let namesIndex = 0;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
if (i > 0)
|
||||
writer.write(semicolon);
|
||||
if (line.length === 0)
|
||||
continue;
|
||||
let genColumn = 0;
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const segment = line[j];
|
||||
if (j > 0)
|
||||
writer.write(comma);
|
||||
genColumn = encodeInteger(writer, segment[0], genColumn);
|
||||
if (segment.length === 1)
|
||||
continue;
|
||||
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
|
||||
sourceLine = encodeInteger(writer, segment[2], sourceLine);
|
||||
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
|
||||
if (segment.length === 4)
|
||||
continue;
|
||||
namesIndex = encodeInteger(writer, segment[4], namesIndex);
|
||||
}
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
|
||||
export { decode, decodeGeneratedRanges, decodeOriginalScopes, encode, encodeGeneratedRanges, encodeOriginalScopes };
|
||||
//# sourceMappingURL=sourcemap-codec.mjs.map
|
||||
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
439
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
Normal file
439
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
Normal file
@ -0,0 +1,439 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sourcemapCodec = {}));
|
||||
})(this, (function (exports) { 'use strict';
|
||||
|
||||
const comma = ','.charCodeAt(0);
|
||||
const semicolon = ';'.charCodeAt(0);
|
||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||
for (let i = 0; i < chars.length; i++) {
|
||||
const c = chars.charCodeAt(i);
|
||||
intToChar[i] = c;
|
||||
charToInt[c] = i;
|
||||
}
|
||||
function decodeInteger(reader, relative) {
|
||||
let value = 0;
|
||||
let shift = 0;
|
||||
let integer = 0;
|
||||
do {
|
||||
const c = reader.next();
|
||||
integer = charToInt[c];
|
||||
value |= (integer & 31) << shift;
|
||||
shift += 5;
|
||||
} while (integer & 32);
|
||||
const shouldNegate = value & 1;
|
||||
value >>>= 1;
|
||||
if (shouldNegate) {
|
||||
value = -0x80000000 | -value;
|
||||
}
|
||||
return relative + value;
|
||||
}
|
||||
function encodeInteger(builder, num, relative) {
|
||||
let delta = num - relative;
|
||||
delta = delta < 0 ? (-delta << 1) | 1 : delta << 1;
|
||||
do {
|
||||
let clamped = delta & 0b011111;
|
||||
delta >>>= 5;
|
||||
if (delta > 0)
|
||||
clamped |= 0b100000;
|
||||
builder.write(intToChar[clamped]);
|
||||
} while (delta > 0);
|
||||
return num;
|
||||
}
|
||||
function hasMoreVlq(reader, max) {
|
||||
if (reader.pos >= max)
|
||||
return false;
|
||||
return reader.peek() !== comma;
|
||||
}
|
||||
|
||||
const bufLength = 1024 * 16;
|
||||
// Provide a fallback for older environments.
|
||||
const td = typeof TextDecoder !== 'undefined'
|
||||
? /* #__PURE__ */ new TextDecoder()
|
||||
: typeof Buffer !== 'undefined'
|
||||
? {
|
||||
decode(buf) {
|
||||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||
return out.toString();
|
||||
},
|
||||
}
|
||||
: {
|
||||
decode(buf) {
|
||||
let out = '';
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
out += String.fromCharCode(buf[i]);
|
||||
}
|
||||
return out;
|
||||
},
|
||||
};
|
||||
class StringWriter {
|
||||
constructor() {
|
||||
this.pos = 0;
|
||||
this.out = '';
|
||||
this.buffer = new Uint8Array(bufLength);
|
||||
}
|
||||
write(v) {
|
||||
const { buffer } = this;
|
||||
buffer[this.pos++] = v;
|
||||
if (this.pos === bufLength) {
|
||||
this.out += td.decode(buffer);
|
||||
this.pos = 0;
|
||||
}
|
||||
}
|
||||
flush() {
|
||||
const { buffer, out, pos } = this;
|
||||
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
|
||||
}
|
||||
}
|
||||
class StringReader {
|
||||
constructor(buffer) {
|
||||
this.pos = 0;
|
||||
this.buffer = buffer;
|
||||
}
|
||||
next() {
|
||||
return this.buffer.charCodeAt(this.pos++);
|
||||
}
|
||||
peek() {
|
||||
return this.buffer.charCodeAt(this.pos);
|
||||
}
|
||||
indexOf(char) {
|
||||
const { buffer, pos } = this;
|
||||
const idx = buffer.indexOf(char, pos);
|
||||
return idx === -1 ? buffer.length : idx;
|
||||
}
|
||||
}
|
||||
|
||||
const EMPTY = [];
|
||||
function decodeOriginalScopes(input) {
|
||||
const { length } = input;
|
||||
const reader = new StringReader(input);
|
||||
const scopes = [];
|
||||
const stack = [];
|
||||
let line = 0;
|
||||
for (; reader.pos < length; reader.pos++) {
|
||||
line = decodeInteger(reader, line);
|
||||
const column = decodeInteger(reader, 0);
|
||||
if (!hasMoreVlq(reader, length)) {
|
||||
const last = stack.pop();
|
||||
last[2] = line;
|
||||
last[3] = column;
|
||||
continue;
|
||||
}
|
||||
const kind = decodeInteger(reader, 0);
|
||||
const fields = decodeInteger(reader, 0);
|
||||
const hasName = fields & 0b0001;
|
||||
const scope = (hasName ? [line, column, 0, 0, kind, decodeInteger(reader, 0)] : [line, column, 0, 0, kind]);
|
||||
let vars = EMPTY;
|
||||
if (hasMoreVlq(reader, length)) {
|
||||
vars = [];
|
||||
do {
|
||||
const varsIndex = decodeInteger(reader, 0);
|
||||
vars.push(varsIndex);
|
||||
} while (hasMoreVlq(reader, length));
|
||||
}
|
||||
scope.vars = vars;
|
||||
scopes.push(scope);
|
||||
stack.push(scope);
|
||||
}
|
||||
return scopes;
|
||||
}
|
||||
function encodeOriginalScopes(scopes) {
|
||||
const writer = new StringWriter();
|
||||
for (let i = 0; i < scopes.length;) {
|
||||
i = _encodeOriginalScopes(scopes, i, writer, [0]);
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
function _encodeOriginalScopes(scopes, index, writer, state) {
|
||||
const scope = scopes[index];
|
||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, 4: kind, vars } = scope;
|
||||
if (index > 0)
|
||||
writer.write(comma);
|
||||
state[0] = encodeInteger(writer, startLine, state[0]);
|
||||
encodeInteger(writer, startColumn, 0);
|
||||
encodeInteger(writer, kind, 0);
|
||||
const fields = scope.length === 6 ? 0b0001 : 0;
|
||||
encodeInteger(writer, fields, 0);
|
||||
if (scope.length === 6)
|
||||
encodeInteger(writer, scope[5], 0);
|
||||
for (const v of vars) {
|
||||
encodeInteger(writer, v, 0);
|
||||
}
|
||||
for (index++; index < scopes.length;) {
|
||||
const next = scopes[index];
|
||||
const { 0: l, 1: c } = next;
|
||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
||||
break;
|
||||
}
|
||||
index = _encodeOriginalScopes(scopes, index, writer, state);
|
||||
}
|
||||
writer.write(comma);
|
||||
state[0] = encodeInteger(writer, endLine, state[0]);
|
||||
encodeInteger(writer, endColumn, 0);
|
||||
return index;
|
||||
}
|
||||
function decodeGeneratedRanges(input) {
|
||||
const { length } = input;
|
||||
const reader = new StringReader(input);
|
||||
const ranges = [];
|
||||
const stack = [];
|
||||
let genLine = 0;
|
||||
let definitionSourcesIndex = 0;
|
||||
let definitionScopeIndex = 0;
|
||||
let callsiteSourcesIndex = 0;
|
||||
let callsiteLine = 0;
|
||||
let callsiteColumn = 0;
|
||||
let bindingLine = 0;
|
||||
let bindingColumn = 0;
|
||||
do {
|
||||
const semi = reader.indexOf(';');
|
||||
let genColumn = 0;
|
||||
for (; reader.pos < semi; reader.pos++) {
|
||||
genColumn = decodeInteger(reader, genColumn);
|
||||
if (!hasMoreVlq(reader, semi)) {
|
||||
const last = stack.pop();
|
||||
last[2] = genLine;
|
||||
last[3] = genColumn;
|
||||
continue;
|
||||
}
|
||||
const fields = decodeInteger(reader, 0);
|
||||
const hasDefinition = fields & 0b0001;
|
||||
const hasCallsite = fields & 0b0010;
|
||||
const hasScope = fields & 0b0100;
|
||||
let callsite = null;
|
||||
let bindings = EMPTY;
|
||||
let range;
|
||||
if (hasDefinition) {
|
||||
const defSourcesIndex = decodeInteger(reader, definitionSourcesIndex);
|
||||
definitionScopeIndex = decodeInteger(reader, definitionSourcesIndex === defSourcesIndex ? definitionScopeIndex : 0);
|
||||
definitionSourcesIndex = defSourcesIndex;
|
||||
range = [genLine, genColumn, 0, 0, defSourcesIndex, definitionScopeIndex];
|
||||
}
|
||||
else {
|
||||
range = [genLine, genColumn, 0, 0];
|
||||
}
|
||||
range.isScope = !!hasScope;
|
||||
if (hasCallsite) {
|
||||
const prevCsi = callsiteSourcesIndex;
|
||||
const prevLine = callsiteLine;
|
||||
callsiteSourcesIndex = decodeInteger(reader, callsiteSourcesIndex);
|
||||
const sameSource = prevCsi === callsiteSourcesIndex;
|
||||
callsiteLine = decodeInteger(reader, sameSource ? callsiteLine : 0);
|
||||
callsiteColumn = decodeInteger(reader, sameSource && prevLine === callsiteLine ? callsiteColumn : 0);
|
||||
callsite = [callsiteSourcesIndex, callsiteLine, callsiteColumn];
|
||||
}
|
||||
range.callsite = callsite;
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
bindings = [];
|
||||
do {
|
||||
bindingLine = genLine;
|
||||
bindingColumn = genColumn;
|
||||
const expressionsCount = decodeInteger(reader, 0);
|
||||
let expressionRanges;
|
||||
if (expressionsCount < -1) {
|
||||
expressionRanges = [[decodeInteger(reader, 0)]];
|
||||
for (let i = -1; i > expressionsCount; i--) {
|
||||
const prevBl = bindingLine;
|
||||
bindingLine = decodeInteger(reader, bindingLine);
|
||||
bindingColumn = decodeInteger(reader, bindingLine === prevBl ? bindingColumn : 0);
|
||||
const expression = decodeInteger(reader, 0);
|
||||
expressionRanges.push([expression, bindingLine, bindingColumn]);
|
||||
}
|
||||
}
|
||||
else {
|
||||
expressionRanges = [[expressionsCount]];
|
||||
}
|
||||
bindings.push(expressionRanges);
|
||||
} while (hasMoreVlq(reader, semi));
|
||||
}
|
||||
range.bindings = bindings;
|
||||
ranges.push(range);
|
||||
stack.push(range);
|
||||
}
|
||||
genLine++;
|
||||
reader.pos = semi + 1;
|
||||
} while (reader.pos < length);
|
||||
return ranges;
|
||||
}
|
||||
function encodeGeneratedRanges(ranges) {
|
||||
if (ranges.length === 0)
|
||||
return '';
|
||||
const writer = new StringWriter();
|
||||
for (let i = 0; i < ranges.length;) {
|
||||
i = _encodeGeneratedRanges(ranges, i, writer, [0, 0, 0, 0, 0, 0, 0]);
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
function _encodeGeneratedRanges(ranges, index, writer, state) {
|
||||
const range = ranges[index];
|
||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, isScope, callsite, bindings, } = range;
|
||||
if (state[0] < startLine) {
|
||||
catchupLine(writer, state[0], startLine);
|
||||
state[0] = startLine;
|
||||
state[1] = 0;
|
||||
}
|
||||
else if (index > 0) {
|
||||
writer.write(comma);
|
||||
}
|
||||
state[1] = encodeInteger(writer, range[1], state[1]);
|
||||
const fields = (range.length === 6 ? 0b0001 : 0) | (callsite ? 0b0010 : 0) | (isScope ? 0b0100 : 0);
|
||||
encodeInteger(writer, fields, 0);
|
||||
if (range.length === 6) {
|
||||
const { 4: sourcesIndex, 5: scopesIndex } = range;
|
||||
if (sourcesIndex !== state[2]) {
|
||||
state[3] = 0;
|
||||
}
|
||||
state[2] = encodeInteger(writer, sourcesIndex, state[2]);
|
||||
state[3] = encodeInteger(writer, scopesIndex, state[3]);
|
||||
}
|
||||
if (callsite) {
|
||||
const { 0: sourcesIndex, 1: callLine, 2: callColumn } = range.callsite;
|
||||
if (sourcesIndex !== state[4]) {
|
||||
state[5] = 0;
|
||||
state[6] = 0;
|
||||
}
|
||||
else if (callLine !== state[5]) {
|
||||
state[6] = 0;
|
||||
}
|
||||
state[4] = encodeInteger(writer, sourcesIndex, state[4]);
|
||||
state[5] = encodeInteger(writer, callLine, state[5]);
|
||||
state[6] = encodeInteger(writer, callColumn, state[6]);
|
||||
}
|
||||
if (bindings) {
|
||||
for (const binding of bindings) {
|
||||
if (binding.length > 1)
|
||||
encodeInteger(writer, -binding.length, 0);
|
||||
const expression = binding[0][0];
|
||||
encodeInteger(writer, expression, 0);
|
||||
let bindingStartLine = startLine;
|
||||
let bindingStartColumn = startColumn;
|
||||
for (let i = 1; i < binding.length; i++) {
|
||||
const expRange = binding[i];
|
||||
bindingStartLine = encodeInteger(writer, expRange[1], bindingStartLine);
|
||||
bindingStartColumn = encodeInteger(writer, expRange[2], bindingStartColumn);
|
||||
encodeInteger(writer, expRange[0], 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (index++; index < ranges.length;) {
|
||||
const next = ranges[index];
|
||||
const { 0: l, 1: c } = next;
|
||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
||||
break;
|
||||
}
|
||||
index = _encodeGeneratedRanges(ranges, index, writer, state);
|
||||
}
|
||||
if (state[0] < endLine) {
|
||||
catchupLine(writer, state[0], endLine);
|
||||
state[0] = endLine;
|
||||
state[1] = 0;
|
||||
}
|
||||
else {
|
||||
writer.write(comma);
|
||||
}
|
||||
state[1] = encodeInteger(writer, endColumn, state[1]);
|
||||
return index;
|
||||
}
|
||||
function catchupLine(writer, lastLine, line) {
|
||||
do {
|
||||
writer.write(semicolon);
|
||||
} while (++lastLine < line);
|
||||
}
|
||||
|
||||
function decode(mappings) {
|
||||
const { length } = mappings;
|
||||
const reader = new StringReader(mappings);
|
||||
const decoded = [];
|
||||
let genColumn = 0;
|
||||
let sourcesIndex = 0;
|
||||
let sourceLine = 0;
|
||||
let sourceColumn = 0;
|
||||
let namesIndex = 0;
|
||||
do {
|
||||
const semi = reader.indexOf(';');
|
||||
const line = [];
|
||||
let sorted = true;
|
||||
let lastCol = 0;
|
||||
genColumn = 0;
|
||||
while (reader.pos < semi) {
|
||||
let seg;
|
||||
genColumn = decodeInteger(reader, genColumn);
|
||||
if (genColumn < lastCol)
|
||||
sorted = false;
|
||||
lastCol = genColumn;
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
sourcesIndex = decodeInteger(reader, sourcesIndex);
|
||||
sourceLine = decodeInteger(reader, sourceLine);
|
||||
sourceColumn = decodeInteger(reader, sourceColumn);
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
namesIndex = decodeInteger(reader, namesIndex);
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
|
||||
}
|
||||
else {
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
|
||||
}
|
||||
}
|
||||
else {
|
||||
seg = [genColumn];
|
||||
}
|
||||
line.push(seg);
|
||||
reader.pos++;
|
||||
}
|
||||
if (!sorted)
|
||||
sort(line);
|
||||
decoded.push(line);
|
||||
reader.pos = semi + 1;
|
||||
} while (reader.pos <= length);
|
||||
return decoded;
|
||||
}
|
||||
function sort(line) {
|
||||
line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[0] - b[0];
|
||||
}
|
||||
function encode(decoded) {
|
||||
const writer = new StringWriter();
|
||||
let sourcesIndex = 0;
|
||||
let sourceLine = 0;
|
||||
let sourceColumn = 0;
|
||||
let namesIndex = 0;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
if (i > 0)
|
||||
writer.write(semicolon);
|
||||
if (line.length === 0)
|
||||
continue;
|
||||
let genColumn = 0;
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const segment = line[j];
|
||||
if (j > 0)
|
||||
writer.write(comma);
|
||||
genColumn = encodeInteger(writer, segment[0], genColumn);
|
||||
if (segment.length === 1)
|
||||
continue;
|
||||
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
|
||||
sourceLine = encodeInteger(writer, segment[2], sourceLine);
|
||||
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
|
||||
if (segment.length === 4)
|
||||
continue;
|
||||
namesIndex = encodeInteger(writer, segment[4], namesIndex);
|
||||
}
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
|
||||
exports.decode = decode;
|
||||
exports.decodeGeneratedRanges = decodeGeneratedRanges;
|
||||
exports.decodeOriginalScopes = decodeOriginalScopes;
|
||||
exports.encode = encode;
|
||||
exports.encodeGeneratedRanges = encodeGeneratedRanges;
|
||||
exports.encodeOriginalScopes = encodeOriginalScopes;
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=sourcemap-codec.umd.js.map
|
||||
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
49
node_modules/@jridgewell/sourcemap-codec/dist/types/scopes.d.ts
generated
vendored
Normal file
49
node_modules/@jridgewell/sourcemap-codec/dist/types/scopes.d.ts
generated
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
declare type Line = number;
|
||||
declare type Column = number;
|
||||
declare type Kind = number;
|
||||
declare type Name = number;
|
||||
declare type Var = number;
|
||||
declare type SourcesIndex = number;
|
||||
declare type ScopesIndex = number;
|
||||
declare type Mix<A, B, O> = (A & O) | (B & O);
|
||||
export declare type OriginalScope = Mix<[
|
||||
Line,
|
||||
Column,
|
||||
Line,
|
||||
Column,
|
||||
Kind
|
||||
], [
|
||||
Line,
|
||||
Column,
|
||||
Line,
|
||||
Column,
|
||||
Kind,
|
||||
Name
|
||||
], {
|
||||
vars: Var[];
|
||||
}>;
|
||||
export declare type GeneratedRange = Mix<[
|
||||
Line,
|
||||
Column,
|
||||
Line,
|
||||
Column
|
||||
], [
|
||||
Line,
|
||||
Column,
|
||||
Line,
|
||||
Column,
|
||||
SourcesIndex,
|
||||
ScopesIndex
|
||||
], {
|
||||
callsite: CallSite | null;
|
||||
bindings: Binding[];
|
||||
isScope: boolean;
|
||||
}>;
|
||||
export declare type CallSite = [SourcesIndex, Line, Column];
|
||||
declare type Binding = BindingExpressionRange[];
|
||||
export declare type BindingExpressionRange = [Name] | [Name, Line, Column];
|
||||
export declare function decodeOriginalScopes(input: string): OriginalScope[];
|
||||
export declare function encodeOriginalScopes(scopes: OriginalScope[]): string;
|
||||
export declare function decodeGeneratedRanges(input: string): GeneratedRange[];
|
||||
export declare function encodeGeneratedRanges(ranges: GeneratedRange[]): string;
|
||||
export {};
|
||||
8
node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
Normal file
8
node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
export { decodeOriginalScopes, encodeOriginalScopes, decodeGeneratedRanges, encodeGeneratedRanges, } from './scopes';
|
||||
export type { OriginalScope, GeneratedRange, CallSite, BindingExpressionRange } from './scopes';
|
||||
export declare type SourceMapSegment = [number] | [number, number, number, number] | [number, number, number, number, number];
|
||||
export declare type SourceMapLine = SourceMapSegment[];
|
||||
export declare type SourceMapMappings = SourceMapLine[];
|
||||
export declare function decode(mappings: string): SourceMapMappings;
|
||||
export declare function encode(decoded: SourceMapMappings): string;
|
||||
export declare function encode(decoded: Readonly<SourceMapMappings>): string;
|
||||
15
node_modules/@jridgewell/sourcemap-codec/dist/types/strings.d.ts
generated
vendored
Normal file
15
node_modules/@jridgewell/sourcemap-codec/dist/types/strings.d.ts
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
export declare class StringWriter {
|
||||
pos: number;
|
||||
private out;
|
||||
private buffer;
|
||||
write(v: number): void;
|
||||
flush(): string;
|
||||
}
|
||||
export declare class StringReader {
|
||||
pos: number;
|
||||
private buffer;
|
||||
constructor(buffer: string);
|
||||
next(): number;
|
||||
peek(): number;
|
||||
indexOf(char: string): number;
|
||||
}
|
||||
6
node_modules/@jridgewell/sourcemap-codec/dist/types/vlq.d.ts
generated
vendored
Normal file
6
node_modules/@jridgewell/sourcemap-codec/dist/types/vlq.d.ts
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
import type { StringReader, StringWriter } from './strings';
|
||||
export declare const comma: number;
|
||||
export declare const semicolon: number;
|
||||
export declare function decodeInteger(reader: StringReader, relative: number): number;
|
||||
export declare function encodeInteger(builder: StringWriter, num: number, relative: number): number;
|
||||
export declare function hasMoreVlq(reader: StringReader, max: number): boolean;
|
||||
79
node_modules/@jridgewell/sourcemap-codec/package.json
generated
vendored
Normal file
79
node_modules/@jridgewell/sourcemap-codec/package.json
generated
vendored
Normal file
@ -0,0 +1,79 @@
|
||||
{
|
||||
"name": "@jridgewell/sourcemap-codec",
|
||||
"version": "1.5.0",
|
||||
"description": "Encode/decode sourcemap mappings",
|
||||
"keywords": [
|
||||
"sourcemap",
|
||||
"vlq"
|
||||
],
|
||||
"main": "dist/sourcemap-codec.umd.js",
|
||||
"module": "dist/sourcemap-codec.mjs",
|
||||
"types": "dist/types/sourcemap-codec.d.ts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/sourcemap-codec.d.ts",
|
||||
"browser": "./dist/sourcemap-codec.umd.js",
|
||||
"require": "./dist/sourcemap-codec.umd.js",
|
||||
"import": "./dist/sourcemap-codec.mjs"
|
||||
},
|
||||
"./dist/sourcemap-codec.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"scripts": {
|
||||
"benchmark": "run-s build:rollup benchmark:*",
|
||||
"benchmark:install": "cd benchmark && npm install",
|
||||
"benchmark:only": "node --expose-gc benchmark/index.js",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"prebuild": "rm -rf dist",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "mocha",
|
||||
"test:coverage": "c8 mocha",
|
||||
"test:watch": "mocha --watch"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/jridgewell/sourcemap-codec.git"
|
||||
},
|
||||
"author": "Rich Harris",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.3.0",
|
||||
"@types/mocha": "10.0.6",
|
||||
"@types/node": "17.0.15",
|
||||
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||
"@typescript-eslint/parser": "5.10.0",
|
||||
"benchmark": "2.1.4",
|
||||
"c8": "7.11.2",
|
||||
"eslint": "8.7.0",
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"mocha": "9.2.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.5.1",
|
||||
"rollup": "2.64.0",
|
||||
"source-map": "0.6.1",
|
||||
"source-map-js": "1.0.2",
|
||||
"sourcemap-codec": "1.4.8",
|
||||
"tsx": "4.7.1",
|
||||
"typescript": "4.5.4"
|
||||
}
|
||||
|
||||
,"_resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz"
|
||||
,"_integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ=="
|
||||
,"_from": "@jridgewell/sourcemap-codec@^1.4.10"
|
||||
}
|
||||
19
node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
Copyright 2022 Justin Ridgewell <justin@ridgewell.name>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
257
node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
257
node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
@ -0,0 +1,257 @@
|
||||
# @jridgewell/trace-mapping
|
||||
|
||||
> Trace the original position through a source map
|
||||
|
||||
`trace-mapping` allows you to take the line and column of an output file and trace it to the
|
||||
original location in the source file through a source map.
|
||||
|
||||
You may already be familiar with the [`source-map`][source-map] package's `SourceMapConsumer`. This
|
||||
provides the same `originalPositionFor` and `generatedPositionFor` API, without requiring WASM.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @jridgewell/trace-mapping
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
import {
|
||||
TraceMap,
|
||||
originalPositionFor,
|
||||
generatedPositionFor,
|
||||
sourceContentFor,
|
||||
isIgnored,
|
||||
} from '@jridgewell/trace-mapping';
|
||||
|
||||
const tracer = new TraceMap({
|
||||
version: 3,
|
||||
sources: ['input.js'],
|
||||
sourcesContent: ['content of input.js'],
|
||||
names: ['foo'],
|
||||
mappings: 'KAyCIA',
|
||||
ignoreList: [],
|
||||
});
|
||||
|
||||
// Lines start at line 1, columns at column 0.
|
||||
const traced = originalPositionFor(tracer, { line: 1, column: 5 });
|
||||
assert.deepEqual(traced, {
|
||||
source: 'input.js',
|
||||
line: 42,
|
||||
column: 4,
|
||||
name: 'foo',
|
||||
});
|
||||
|
||||
const content = sourceContentFor(tracer, traced.source);
|
||||
assert.strictEqual(content, 'content for input.js');
|
||||
|
||||
const generated = generatedPositionFor(tracer, {
|
||||
source: 'input.js',
|
||||
line: 42,
|
||||
column: 4,
|
||||
});
|
||||
assert.deepEqual(generated, {
|
||||
line: 1,
|
||||
column: 5,
|
||||
});
|
||||
|
||||
const ignored = isIgnored(tracer, 'input.js');
|
||||
assert.equal(ignored, false);
|
||||
```
|
||||
|
||||
We also provide a lower level API to get the actual segment that matches our line and column. Unlike
|
||||
`originalPositionFor`, `traceSegment` uses a 0-base for `line`:
|
||||
|
||||
```typescript
|
||||
import { traceSegment } from '@jridgewell/trace-mapping';
|
||||
|
||||
// line is 0-base.
|
||||
const traced = traceSegment(tracer, /* line */ 0, /* column */ 5);
|
||||
|
||||
// Segments are [outputColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||
// Again, line is 0-base and so is sourceLine
|
||||
assert.deepEqual(traced, [5, 0, 41, 4, 0]);
|
||||
```
|
||||
|
||||
### SectionedSourceMaps
|
||||
|
||||
The sourcemap spec defines a special `sections` field that's designed to handle concatenation of
|
||||
output code with associated sourcemaps. This type of sourcemap is rarely used (no major build tool
|
||||
produces it), but if you are hand coding a concatenation you may need it. We provide an `AnyMap`
|
||||
helper that can receive either a regular sourcemap or a `SectionedSourceMap` and returns a
|
||||
`TraceMap` instance:
|
||||
|
||||
```typescript
|
||||
import { AnyMap } from '@jridgewell/trace-mapping';
|
||||
const fooOutput = 'foo';
|
||||
const barOutput = 'bar';
|
||||
const output = [fooOutput, barOutput].join('\n');
|
||||
|
||||
const sectioned = new AnyMap({
|
||||
version: 3,
|
||||
sections: [
|
||||
{
|
||||
// 0-base line and column
|
||||
offset: { line: 0, column: 0 },
|
||||
// fooOutput's sourcemap
|
||||
map: {
|
||||
version: 3,
|
||||
sources: ['foo.js'],
|
||||
names: ['foo'],
|
||||
mappings: 'AAAAA',
|
||||
},
|
||||
},
|
||||
{
|
||||
// barOutput's sourcemap will not affect the first line, only the second
|
||||
offset: { line: 1, column: 0 },
|
||||
map: {
|
||||
version: 3,
|
||||
sources: ['bar.js'],
|
||||
names: ['bar'],
|
||||
mappings: 'AAAAA',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const traced = originalPositionFor(sectioned, {
|
||||
line: 2,
|
||||
column: 0,
|
||||
});
|
||||
|
||||
assert.deepEqual(traced, {
|
||||
source: 'bar.js',
|
||||
line: 1,
|
||||
column: 0,
|
||||
name: 'bar',
|
||||
});
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
```
|
||||
node v18.0.0
|
||||
|
||||
amp.js.map - 45120 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 562400 bytes
|
||||
trace-mapping encoded 5706544 bytes
|
||||
source-map-js 10717664 bytes
|
||||
source-map-0.6.1 17446384 bytes
|
||||
source-map-0.8.0 9701757 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 180 ops/sec ±0.34% (85 runs sampled)
|
||||
trace-mapping: encoded JSON input x 364 ops/sec ±1.77% (89 runs sampled)
|
||||
trace-mapping: decoded Object input x 3,116 ops/sec ±0.50% (96 runs sampled)
|
||||
trace-mapping: encoded Object input x 410 ops/sec ±2.62% (85 runs sampled)
|
||||
source-map-js: encoded Object input x 84.23 ops/sec ±0.91% (73 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 37.21 ops/sec ±2.08% (51 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 3,952,212 ops/sec ±0.17% (98 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 3,487,468 ops/sec ±1.58% (90 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 827,730 ops/sec ±0.78% (97 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 748,991 ops/sec ±0.53% (94 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 2,532,894 ops/sec ±0.57% (95 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
babel.min.js.map - 347793 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 89832 bytes
|
||||
trace-mapping encoded 35474640 bytes
|
||||
source-map-js 51257176 bytes
|
||||
source-map-0.6.1 63515664 bytes
|
||||
source-map-0.8.0 42933752 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 15.41 ops/sec ±8.65% (34 runs sampled)
|
||||
trace-mapping: encoded JSON input x 28.20 ops/sec ±12.87% (42 runs sampled)
|
||||
trace-mapping: decoded Object input x 964 ops/sec ±0.36% (99 runs sampled)
|
||||
trace-mapping: encoded Object input x 31.77 ops/sec ±13.79% (45 runs sampled)
|
||||
source-map-js: encoded Object input x 6.45 ops/sec ±5.16% (21 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 4.07 ops/sec ±5.24% (15 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 7,183,038 ops/sec ±0.58% (95 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 5,192,185 ops/sec ±0.41% (100 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 4,259,489 ops/sec ±0.79% (94 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 3,742,629 ops/sec ±0.71% (95 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 6,270,211 ops/sec ±0.64% (94 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
preact.js.map - 1992 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 37128 bytes
|
||||
trace-mapping encoded 247280 bytes
|
||||
source-map-js 1143536 bytes
|
||||
source-map-0.6.1 1290992 bytes
|
||||
source-map-0.8.0 96544 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 3,483 ops/sec ±0.30% (98 runs sampled)
|
||||
trace-mapping: encoded JSON input x 6,092 ops/sec ±0.18% (97 runs sampled)
|
||||
trace-mapping: decoded Object input x 249,076 ops/sec ±0.24% (98 runs sampled)
|
||||
trace-mapping: encoded Object input x 14,555 ops/sec ±0.48% (100 runs sampled)
|
||||
source-map-js: encoded Object input x 2,447 ops/sec ±0.36% (99 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 1,201 ops/sec ±0.57% (96 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 7,620,192 ops/sec ±0.09% (99 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 6,872,554 ops/sec ±0.30% (97 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 2,489,570 ops/sec ±0.35% (94 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 1,698,633 ops/sec ±0.28% (98 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 4,015,644 ops/sec ±0.22% (98 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
react.js.map - 5726 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 16176 bytes
|
||||
trace-mapping encoded 681552 bytes
|
||||
source-map-js 2418352 bytes
|
||||
source-map-0.6.1 2443672 bytes
|
||||
source-map-0.8.0 111768 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 1,720 ops/sec ±0.34% (98 runs sampled)
|
||||
trace-mapping: encoded JSON input x 4,406 ops/sec ±0.35% (100 runs sampled)
|
||||
trace-mapping: decoded Object input x 92,122 ops/sec ±0.10% (99 runs sampled)
|
||||
trace-mapping: encoded Object input x 5,385 ops/sec ±0.37% (99 runs sampled)
|
||||
source-map-js: encoded Object input x 794 ops/sec ±0.40% (98 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 416 ops/sec ±0.54% (91 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 32,759,519 ops/sec ±0.33% (100 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 31,116,306 ops/sec ±0.33% (97 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 17,458,435 ops/sec ±0.44% (97 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 12,687,097 ops/sec ±0.43% (95 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 23,538,275 ops/sec ±0.38% (95 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
```
|
||||
|
||||
[source-map]: https://www.npmjs.com/package/source-map
|
||||
580
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
580
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
@ -0,0 +1,580 @@
|
||||
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||
import resolveUri from '@jridgewell/resolve-uri';
|
||||
|
||||
function resolve(input, base) {
|
||||
// The base is always treated as a directory, if it's not empty.
|
||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||
if (base && !base.endsWith('/'))
|
||||
base += '/';
|
||||
return resolveUri(input, base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
function stripFilename(path) {
|
||||
if (!path)
|
||||
return '';
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
const REV_GENERATED_LINE = 1;
|
||||
const REV_GENERATED_COLUMN = 2;
|
||||
|
||||
function maybeSort(mappings, owned) {
|
||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||
if (unsortedIndex === mappings.length)
|
||||
return mappings;
|
||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||
// not, we do not want to modify the consumer's input array.
|
||||
if (!owned)
|
||||
mappings = mappings.slice();
|
||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||
mappings[i] = sortSegments(mappings[i], owned);
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
function nextUnsortedSegmentLine(mappings, start) {
|
||||
for (let i = start; i < mappings.length; i++) {
|
||||
if (!isSorted(mappings[i]))
|
||||
return i;
|
||||
}
|
||||
return mappings.length;
|
||||
}
|
||||
function isSorted(line) {
|
||||
for (let j = 1; j < line.length; j++) {
|
||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function sortSegments(line, owned) {
|
||||
if (!owned)
|
||||
line = line.slice();
|
||||
return line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[COLUMN] - b[COLUMN];
|
||||
}
|
||||
|
||||
let found = false;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
function binarySearch(haystack, needle, low, high) {
|
||||
while (low <= high) {
|
||||
const mid = low + ((high - low) >> 1);
|
||||
const cmp = haystack[mid][COLUMN] - needle;
|
||||
if (cmp === 0) {
|
||||
found = true;
|
||||
return mid;
|
||||
}
|
||||
if (cmp < 0) {
|
||||
low = mid + 1;
|
||||
}
|
||||
else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
found = false;
|
||||
return low - 1;
|
||||
}
|
||||
function upperBound(haystack, needle, index) {
|
||||
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function lowerBound(haystack, needle, index) {
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function memoizedState() {
|
||||
return {
|
||||
lastKey: -1,
|
||||
lastNeedle: -1,
|
||||
lastIndex: -1,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||
const { lastKey, lastNeedle, lastIndex } = state;
|
||||
let low = 0;
|
||||
let high = haystack.length - 1;
|
||||
if (key === lastKey) {
|
||||
if (needle === lastNeedle) {
|
||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||
return lastIndex;
|
||||
}
|
||||
if (needle >= lastNeedle) {
|
||||
// lastIndex may be -1 if the previous needle was not found.
|
||||
low = lastIndex === -1 ? 0 : lastIndex;
|
||||
}
|
||||
else {
|
||||
high = lastIndex;
|
||||
}
|
||||
}
|
||||
state.lastKey = key;
|
||||
state.lastNeedle = needle;
|
||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||
}
|
||||
|
||||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||
// of generated line/column.
|
||||
function buildBySources(decoded, memos) {
|
||||
const sources = memos.map(buildNullArray);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
if (seg.length === 1)
|
||||
continue;
|
||||
const sourceIndex = seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
const originalSource = sources[sourceIndex];
|
||||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||
const memo = memos[sourceIndex];
|
||||
// The binary search either found a match, or it found the left-index just before where the
|
||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||
// generated segments associated with an original location, so there may need to move several
|
||||
// indexes before we find where we need to insert.
|
||||
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||
memo.lastIndex = ++index;
|
||||
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
||||
}
|
||||
}
|
||||
return sources;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||
// order when iterating with for-in.
|
||||
function buildNullArray() {
|
||||
return { __proto__: null };
|
||||
}
|
||||
|
||||
const AnyMap = function (map, mapUrl) {
|
||||
const parsed = parse(map);
|
||||
if (!('sections' in parsed)) {
|
||||
return new TraceMap(parsed, mapUrl);
|
||||
}
|
||||
const mappings = [];
|
||||
const sources = [];
|
||||
const sourcesContent = [];
|
||||
const names = [];
|
||||
const ignoreList = [];
|
||||
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
||||
const joined = {
|
||||
version: 3,
|
||||
file: parsed.file,
|
||||
names,
|
||||
sources,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
ignoreList,
|
||||
};
|
||||
return presortedDecodedMap(joined);
|
||||
};
|
||||
function parse(map) {
|
||||
return typeof map === 'string' ? JSON.parse(map) : map;
|
||||
}
|
||||
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const { sections } = input;
|
||||
for (let i = 0; i < sections.length; i++) {
|
||||
const { map, offset } = sections[i];
|
||||
let sl = stopLine;
|
||||
let sc = stopColumn;
|
||||
if (i + 1 < sections.length) {
|
||||
const nextOffset = sections[i + 1].offset;
|
||||
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||
if (sl === stopLine) {
|
||||
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||
}
|
||||
else if (sl < stopLine) {
|
||||
sc = columnOffset + nextOffset.column;
|
||||
}
|
||||
}
|
||||
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||
}
|
||||
}
|
||||
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const parsed = parse(input);
|
||||
if ('sections' in parsed)
|
||||
return recurse(...arguments);
|
||||
const map = new TraceMap(parsed, mapUrl);
|
||||
const sourcesOffset = sources.length;
|
||||
const namesOffset = names.length;
|
||||
const decoded = decodedMappings(map);
|
||||
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
||||
append(sources, resolvedSources);
|
||||
append(names, map.names);
|
||||
if (contents)
|
||||
append(sourcesContent, contents);
|
||||
else
|
||||
for (let i = 0; i < resolvedSources.length; i++)
|
||||
sourcesContent.push(null);
|
||||
if (ignores)
|
||||
for (let i = 0; i < ignores.length; i++)
|
||||
ignoreList.push(ignores[i] + sourcesOffset);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const lineI = lineOffset + i;
|
||||
// We can only add so many lines before we step into the range that the next section's map
|
||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||
// still need to check that we don't overstep lines, too.
|
||||
if (lineI > stopLine)
|
||||
return;
|
||||
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||
const out = getLine(mappings, lineI);
|
||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||
// map can be multiple lines), it doesn't.
|
||||
const cOffset = i === 0 ? columnOffset : 0;
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const column = cOffset + seg[COLUMN];
|
||||
// If this segment steps into the column range that the next section's map controls, we need
|
||||
// to stop early.
|
||||
if (lineI === stopLine && column >= stopColumn)
|
||||
return;
|
||||
if (seg.length === 1) {
|
||||
out.push([column]);
|
||||
continue;
|
||||
}
|
||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
out.push(seg.length === 4
|
||||
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
function append(arr, other) {
|
||||
for (let i = 0; i < other.length; i++)
|
||||
arr.push(other[i]);
|
||||
}
|
||||
function getLine(arr, index) {
|
||||
for (let i = arr.length; i <= index; i++)
|
||||
arr[i] = [];
|
||||
return arr[index];
|
||||
}
|
||||
|
||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||
const LEAST_UPPER_BOUND = -1;
|
||||
const GREATEST_LOWER_BOUND = 1;
|
||||
class TraceMap {
|
||||
constructor(map, mapUrl) {
|
||||
const isString = typeof map === 'string';
|
||||
if (!isString && map._decodedMemo)
|
||||
return map;
|
||||
const parsed = (isString ? JSON.parse(map) : map);
|
||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||
this.version = version;
|
||||
this.file = file;
|
||||
this.names = names || [];
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sources = sources;
|
||||
this.sourcesContent = sourcesContent;
|
||||
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||
const { mappings } = parsed;
|
||||
if (typeof mappings === 'string') {
|
||||
this._encoded = mappings;
|
||||
this._decoded = undefined;
|
||||
}
|
||||
else {
|
||||
this._encoded = undefined;
|
||||
this._decoded = maybeSort(mappings, isString);
|
||||
}
|
||||
this._decodedMemo = memoizedState();
|
||||
this._bySources = undefined;
|
||||
this._bySourceMemos = undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(map) {
|
||||
return map;
|
||||
}
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function encodedMappings(map) {
|
||||
var _a;
|
||||
var _b;
|
||||
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = encode(cast(map)._decoded)));
|
||||
}
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function decodedMappings(map) {
|
||||
var _a;
|
||||
return ((_a = cast(map))._decoded || (_a._decoded = decode(cast(map)._encoded)));
|
||||
}
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
function traceSegment(map, line, column) {
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return null;
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||
return index === -1 ? null : segments[index];
|
||||
}
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
function originalPositionFor(map, needle) {
|
||||
let { line, column, bias } = needle;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return OMapping(null, null, null, null);
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (index === -1)
|
||||
return OMapping(null, null, null, null);
|
||||
const segment = segments[index];
|
||||
if (segment.length === 1)
|
||||
return OMapping(null, null, null, null);
|
||||
const { names, resolvedSources } = map;
|
||||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||
}
|
||||
/**
|
||||
* Finds the generated line/column position of the provided source/line/column source position.
|
||||
*/
|
||||
function generatedPositionFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
||||
}
|
||||
/**
|
||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||
*/
|
||||
function allGeneratedPositionsFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
||||
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
||||
}
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
function eachMapping(map, cb) {
|
||||
const decoded = decodedMappings(map);
|
||||
const { names, resolvedSources } = map;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generatedLine = i + 1;
|
||||
const generatedColumn = seg[0];
|
||||
let source = null;
|
||||
let originalLine = null;
|
||||
let originalColumn = null;
|
||||
let name = null;
|
||||
if (seg.length !== 1) {
|
||||
source = resolvedSources[seg[1]];
|
||||
originalLine = seg[2] + 1;
|
||||
originalColumn = seg[3];
|
||||
}
|
||||
if (seg.length === 5)
|
||||
name = names[seg[4]];
|
||||
cb({
|
||||
generatedLine,
|
||||
generatedColumn,
|
||||
source,
|
||||
originalLine,
|
||||
originalColumn,
|
||||
name,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
function sourceIndex(map, source) {
|
||||
const { sources, resolvedSources } = map;
|
||||
let index = sources.indexOf(source);
|
||||
if (index === -1)
|
||||
index = resolvedSources.indexOf(source);
|
||||
return index;
|
||||
}
|
||||
/**
|
||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||
*/
|
||||
function sourceContentFor(map, source) {
|
||||
const { sourcesContent } = map;
|
||||
if (sourcesContent == null)
|
||||
return null;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? null : sourcesContent[index];
|
||||
}
|
||||
/**
|
||||
* Determines if the source is marked to ignore by the source map.
|
||||
*/
|
||||
function isIgnored(map, source) {
|
||||
const { ignoreList } = map;
|
||||
if (ignoreList == null)
|
||||
return false;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? false : ignoreList.includes(index);
|
||||
}
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
function presortedDecodedMap(map, mapUrl) {
|
||||
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||
cast(tracer)._decoded = map.mappings;
|
||||
return tracer;
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function decodedMap(map) {
|
||||
return clone(map, decodedMappings(map));
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function encodedMap(map) {
|
||||
return clone(map, encodedMappings(map));
|
||||
}
|
||||
function clone(map, mappings) {
|
||||
return {
|
||||
version: map.version,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings,
|
||||
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
||||
};
|
||||
}
|
||||
function OMapping(source, line, column, name) {
|
||||
return { source, line, column, name };
|
||||
}
|
||||
function GMapping(line, column) {
|
||||
return { line, column };
|
||||
}
|
||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||
if (found) {
|
||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||
}
|
||||
else if (bias === LEAST_UPPER_BOUND)
|
||||
index++;
|
||||
if (index === -1 || index === segments.length)
|
||||
return -1;
|
||||
return index;
|
||||
}
|
||||
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
||||
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
||||
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
||||
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
||||
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
||||
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
||||
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
||||
// match LEAST_UPPER_BOUND.
|
||||
if (!found && bias === LEAST_UPPER_BOUND)
|
||||
min++;
|
||||
if (min === -1 || min === segments.length)
|
||||
return [];
|
||||
// We may have found the segment that started at an earlier column. If this is the case, then we
|
||||
// need to slice all generated segments that match _that_ column, because all such segments span
|
||||
// to our desired column.
|
||||
const matchedColumn = found ? column : segments[min][COLUMN];
|
||||
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
||||
if (!found)
|
||||
min = lowerBound(segments, matchedColumn, min);
|
||||
const max = upperBound(segments, matchedColumn, min);
|
||||
const result = [];
|
||||
for (; min <= max; min++) {
|
||||
const segment = segments[min];
|
||||
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function generatedPosition(map, source, line, column, bias, all) {
|
||||
var _a;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const { sources, resolvedSources } = map;
|
||||
let sourceIndex = sources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
sourceIndex = resolvedSources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
||||
const segments = generated[sourceIndex][line];
|
||||
if (segments == null)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const memo = cast(map)._bySourceMemos[sourceIndex];
|
||||
if (all)
|
||||
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
||||
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
||||
if (index === -1)
|
||||
return GMapping(null, null);
|
||||
const segment = segments[index];
|
||||
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||
}
|
||||
|
||||
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, allGeneratedPositionsFor, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, isIgnored, originalPositionFor, presortedDecodedMap, sourceContentFor, traceSegment };
|
||||
//# sourceMappingURL=trace-mapping.mjs.map
|
||||
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
600
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
600
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
@ -0,0 +1,600 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/sourcemap-codec'), require('@jridgewell/resolve-uri')) :
|
||||
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/sourcemap-codec', '@jridgewell/resolve-uri'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.traceMapping = {}, global.sourcemapCodec, global.resolveURI));
|
||||
})(this, (function (exports, sourcemapCodec, resolveUri) { 'use strict';
|
||||
|
||||
function resolve(input, base) {
|
||||
// The base is always treated as a directory, if it's not empty.
|
||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||
if (base && !base.endsWith('/'))
|
||||
base += '/';
|
||||
return resolveUri(input, base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
function stripFilename(path) {
|
||||
if (!path)
|
||||
return '';
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
const REV_GENERATED_LINE = 1;
|
||||
const REV_GENERATED_COLUMN = 2;
|
||||
|
||||
function maybeSort(mappings, owned) {
|
||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||
if (unsortedIndex === mappings.length)
|
||||
return mappings;
|
||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||
// not, we do not want to modify the consumer's input array.
|
||||
if (!owned)
|
||||
mappings = mappings.slice();
|
||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||
mappings[i] = sortSegments(mappings[i], owned);
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
function nextUnsortedSegmentLine(mappings, start) {
|
||||
for (let i = start; i < mappings.length; i++) {
|
||||
if (!isSorted(mappings[i]))
|
||||
return i;
|
||||
}
|
||||
return mappings.length;
|
||||
}
|
||||
function isSorted(line) {
|
||||
for (let j = 1; j < line.length; j++) {
|
||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function sortSegments(line, owned) {
|
||||
if (!owned)
|
||||
line = line.slice();
|
||||
return line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[COLUMN] - b[COLUMN];
|
||||
}
|
||||
|
||||
let found = false;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
function binarySearch(haystack, needle, low, high) {
|
||||
while (low <= high) {
|
||||
const mid = low + ((high - low) >> 1);
|
||||
const cmp = haystack[mid][COLUMN] - needle;
|
||||
if (cmp === 0) {
|
||||
found = true;
|
||||
return mid;
|
||||
}
|
||||
if (cmp < 0) {
|
||||
low = mid + 1;
|
||||
}
|
||||
else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
found = false;
|
||||
return low - 1;
|
||||
}
|
||||
function upperBound(haystack, needle, index) {
|
||||
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function lowerBound(haystack, needle, index) {
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function memoizedState() {
|
||||
return {
|
||||
lastKey: -1,
|
||||
lastNeedle: -1,
|
||||
lastIndex: -1,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||
const { lastKey, lastNeedle, lastIndex } = state;
|
||||
let low = 0;
|
||||
let high = haystack.length - 1;
|
||||
if (key === lastKey) {
|
||||
if (needle === lastNeedle) {
|
||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||
return lastIndex;
|
||||
}
|
||||
if (needle >= lastNeedle) {
|
||||
// lastIndex may be -1 if the previous needle was not found.
|
||||
low = lastIndex === -1 ? 0 : lastIndex;
|
||||
}
|
||||
else {
|
||||
high = lastIndex;
|
||||
}
|
||||
}
|
||||
state.lastKey = key;
|
||||
state.lastNeedle = needle;
|
||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||
}
|
||||
|
||||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||
// of generated line/column.
|
||||
function buildBySources(decoded, memos) {
|
||||
const sources = memos.map(buildNullArray);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
if (seg.length === 1)
|
||||
continue;
|
||||
const sourceIndex = seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
const originalSource = sources[sourceIndex];
|
||||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||
const memo = memos[sourceIndex];
|
||||
// The binary search either found a match, or it found the left-index just before where the
|
||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||
// generated segments associated with an original location, so there may need to move several
|
||||
// indexes before we find where we need to insert.
|
||||
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||
memo.lastIndex = ++index;
|
||||
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
||||
}
|
||||
}
|
||||
return sources;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||
// order when iterating with for-in.
|
||||
function buildNullArray() {
|
||||
return { __proto__: null };
|
||||
}
|
||||
|
||||
const AnyMap = function (map, mapUrl) {
|
||||
const parsed = parse(map);
|
||||
if (!('sections' in parsed)) {
|
||||
return new TraceMap(parsed, mapUrl);
|
||||
}
|
||||
const mappings = [];
|
||||
const sources = [];
|
||||
const sourcesContent = [];
|
||||
const names = [];
|
||||
const ignoreList = [];
|
||||
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
||||
const joined = {
|
||||
version: 3,
|
||||
file: parsed.file,
|
||||
names,
|
||||
sources,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
ignoreList,
|
||||
};
|
||||
return presortedDecodedMap(joined);
|
||||
};
|
||||
function parse(map) {
|
||||
return typeof map === 'string' ? JSON.parse(map) : map;
|
||||
}
|
||||
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const { sections } = input;
|
||||
for (let i = 0; i < sections.length; i++) {
|
||||
const { map, offset } = sections[i];
|
||||
let sl = stopLine;
|
||||
let sc = stopColumn;
|
||||
if (i + 1 < sections.length) {
|
||||
const nextOffset = sections[i + 1].offset;
|
||||
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||
if (sl === stopLine) {
|
||||
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||
}
|
||||
else if (sl < stopLine) {
|
||||
sc = columnOffset + nextOffset.column;
|
||||
}
|
||||
}
|
||||
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||
}
|
||||
}
|
||||
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const parsed = parse(input);
|
||||
if ('sections' in parsed)
|
||||
return recurse(...arguments);
|
||||
const map = new TraceMap(parsed, mapUrl);
|
||||
const sourcesOffset = sources.length;
|
||||
const namesOffset = names.length;
|
||||
const decoded = decodedMappings(map);
|
||||
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
||||
append(sources, resolvedSources);
|
||||
append(names, map.names);
|
||||
if (contents)
|
||||
append(sourcesContent, contents);
|
||||
else
|
||||
for (let i = 0; i < resolvedSources.length; i++)
|
||||
sourcesContent.push(null);
|
||||
if (ignores)
|
||||
for (let i = 0; i < ignores.length; i++)
|
||||
ignoreList.push(ignores[i] + sourcesOffset);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const lineI = lineOffset + i;
|
||||
// We can only add so many lines before we step into the range that the next section's map
|
||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||
// still need to check that we don't overstep lines, too.
|
||||
if (lineI > stopLine)
|
||||
return;
|
||||
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||
const out = getLine(mappings, lineI);
|
||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||
// map can be multiple lines), it doesn't.
|
||||
const cOffset = i === 0 ? columnOffset : 0;
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const column = cOffset + seg[COLUMN];
|
||||
// If this segment steps into the column range that the next section's map controls, we need
|
||||
// to stop early.
|
||||
if (lineI === stopLine && column >= stopColumn)
|
||||
return;
|
||||
if (seg.length === 1) {
|
||||
out.push([column]);
|
||||
continue;
|
||||
}
|
||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
out.push(seg.length === 4
|
||||
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
function append(arr, other) {
|
||||
for (let i = 0; i < other.length; i++)
|
||||
arr.push(other[i]);
|
||||
}
|
||||
function getLine(arr, index) {
|
||||
for (let i = arr.length; i <= index; i++)
|
||||
arr[i] = [];
|
||||
return arr[index];
|
||||
}
|
||||
|
||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||
const LEAST_UPPER_BOUND = -1;
|
||||
const GREATEST_LOWER_BOUND = 1;
|
||||
class TraceMap {
|
||||
constructor(map, mapUrl) {
|
||||
const isString = typeof map === 'string';
|
||||
if (!isString && map._decodedMemo)
|
||||
return map;
|
||||
const parsed = (isString ? JSON.parse(map) : map);
|
||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||
this.version = version;
|
||||
this.file = file;
|
||||
this.names = names || [];
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sources = sources;
|
||||
this.sourcesContent = sourcesContent;
|
||||
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||
const { mappings } = parsed;
|
||||
if (typeof mappings === 'string') {
|
||||
this._encoded = mappings;
|
||||
this._decoded = undefined;
|
||||
}
|
||||
else {
|
||||
this._encoded = undefined;
|
||||
this._decoded = maybeSort(mappings, isString);
|
||||
}
|
||||
this._decodedMemo = memoizedState();
|
||||
this._bySources = undefined;
|
||||
this._bySourceMemos = undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(map) {
|
||||
return map;
|
||||
}
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function encodedMappings(map) {
|
||||
var _a;
|
||||
var _b;
|
||||
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = sourcemapCodec.encode(cast(map)._decoded)));
|
||||
}
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function decodedMappings(map) {
|
||||
var _a;
|
||||
return ((_a = cast(map))._decoded || (_a._decoded = sourcemapCodec.decode(cast(map)._encoded)));
|
||||
}
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
function traceSegment(map, line, column) {
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return null;
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||
return index === -1 ? null : segments[index];
|
||||
}
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
function originalPositionFor(map, needle) {
|
||||
let { line, column, bias } = needle;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return OMapping(null, null, null, null);
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (index === -1)
|
||||
return OMapping(null, null, null, null);
|
||||
const segment = segments[index];
|
||||
if (segment.length === 1)
|
||||
return OMapping(null, null, null, null);
|
||||
const { names, resolvedSources } = map;
|
||||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||
}
|
||||
/**
|
||||
* Finds the generated line/column position of the provided source/line/column source position.
|
||||
*/
|
||||
function generatedPositionFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
||||
}
|
||||
/**
|
||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||
*/
|
||||
function allGeneratedPositionsFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
||||
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
||||
}
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
function eachMapping(map, cb) {
|
||||
const decoded = decodedMappings(map);
|
||||
const { names, resolvedSources } = map;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generatedLine = i + 1;
|
||||
const generatedColumn = seg[0];
|
||||
let source = null;
|
||||
let originalLine = null;
|
||||
let originalColumn = null;
|
||||
let name = null;
|
||||
if (seg.length !== 1) {
|
||||
source = resolvedSources[seg[1]];
|
||||
originalLine = seg[2] + 1;
|
||||
originalColumn = seg[3];
|
||||
}
|
||||
if (seg.length === 5)
|
||||
name = names[seg[4]];
|
||||
cb({
|
||||
generatedLine,
|
||||
generatedColumn,
|
||||
source,
|
||||
originalLine,
|
||||
originalColumn,
|
||||
name,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
function sourceIndex(map, source) {
|
||||
const { sources, resolvedSources } = map;
|
||||
let index = sources.indexOf(source);
|
||||
if (index === -1)
|
||||
index = resolvedSources.indexOf(source);
|
||||
return index;
|
||||
}
|
||||
/**
|
||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||
*/
|
||||
function sourceContentFor(map, source) {
|
||||
const { sourcesContent } = map;
|
||||
if (sourcesContent == null)
|
||||
return null;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? null : sourcesContent[index];
|
||||
}
|
||||
/**
|
||||
* Determines if the source is marked to ignore by the source map.
|
||||
*/
|
||||
function isIgnored(map, source) {
|
||||
const { ignoreList } = map;
|
||||
if (ignoreList == null)
|
||||
return false;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? false : ignoreList.includes(index);
|
||||
}
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
function presortedDecodedMap(map, mapUrl) {
|
||||
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||
cast(tracer)._decoded = map.mappings;
|
||||
return tracer;
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function decodedMap(map) {
|
||||
return clone(map, decodedMappings(map));
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function encodedMap(map) {
|
||||
return clone(map, encodedMappings(map));
|
||||
}
|
||||
function clone(map, mappings) {
|
||||
return {
|
||||
version: map.version,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings,
|
||||
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
||||
};
|
||||
}
|
||||
function OMapping(source, line, column, name) {
|
||||
return { source, line, column, name };
|
||||
}
|
||||
function GMapping(line, column) {
|
||||
return { line, column };
|
||||
}
|
||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||
if (found) {
|
||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||
}
|
||||
else if (bias === LEAST_UPPER_BOUND)
|
||||
index++;
|
||||
if (index === -1 || index === segments.length)
|
||||
return -1;
|
||||
return index;
|
||||
}
|
||||
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
||||
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
||||
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
||||
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
||||
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
||||
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
||||
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
||||
// match LEAST_UPPER_BOUND.
|
||||
if (!found && bias === LEAST_UPPER_BOUND)
|
||||
min++;
|
||||
if (min === -1 || min === segments.length)
|
||||
return [];
|
||||
// We may have found the segment that started at an earlier column. If this is the case, then we
|
||||
// need to slice all generated segments that match _that_ column, because all such segments span
|
||||
// to our desired column.
|
||||
const matchedColumn = found ? column : segments[min][COLUMN];
|
||||
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
||||
if (!found)
|
||||
min = lowerBound(segments, matchedColumn, min);
|
||||
const max = upperBound(segments, matchedColumn, min);
|
||||
const result = [];
|
||||
for (; min <= max; min++) {
|
||||
const segment = segments[min];
|
||||
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function generatedPosition(map, source, line, column, bias, all) {
|
||||
var _a;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const { sources, resolvedSources } = map;
|
||||
let sourceIndex = sources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
sourceIndex = resolvedSources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
||||
const segments = generated[sourceIndex][line];
|
||||
if (segments == null)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const memo = cast(map)._bySourceMemos[sourceIndex];
|
||||
if (all)
|
||||
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
||||
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
||||
if (index === -1)
|
||||
return GMapping(null, null);
|
||||
const segment = segments[index];
|
||||
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||
}
|
||||
|
||||
exports.AnyMap = AnyMap;
|
||||
exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND;
|
||||
exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND;
|
||||
exports.TraceMap = TraceMap;
|
||||
exports.allGeneratedPositionsFor = allGeneratedPositionsFor;
|
||||
exports.decodedMap = decodedMap;
|
||||
exports.decodedMappings = decodedMappings;
|
||||
exports.eachMapping = eachMapping;
|
||||
exports.encodedMap = encodedMap;
|
||||
exports.encodedMappings = encodedMappings;
|
||||
exports.generatedPositionFor = generatedPositionFor;
|
||||
exports.isIgnored = isIgnored;
|
||||
exports.originalPositionFor = originalPositionFor;
|
||||
exports.presortedDecodedMap = presortedDecodedMap;
|
||||
exports.sourceContentFor = sourceContentFor;
|
||||
exports.traceSegment = traceSegment;
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=trace-mapping.umd.js.map
|
||||
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
8
node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
8
node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
import { TraceMap } from './trace-mapping';
|
||||
import type { SectionedSourceMapInput } from './types';
|
||||
type AnyMap = {
|
||||
new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||
(map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||
};
|
||||
export declare const AnyMap: AnyMap;
|
||||
export {};
|
||||
32
node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
32
node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
|
||||
export type MemoState = {
|
||||
lastKey: number;
|
||||
lastNeedle: number;
|
||||
lastIndex: number;
|
||||
};
|
||||
export declare let found: boolean;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
export declare function binarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, low: number, high: number): number;
|
||||
export declare function upperBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||
export declare function lowerBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||
export declare function memoizedState(): MemoState;
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
export declare function memoizedBinarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, state: MemoState, key: number): number;
|
||||
7
node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
7
node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';
|
||||
import type { MemoState } from './binary-search';
|
||||
export type Source = {
|
||||
__proto__: null;
|
||||
[line: number]: Exclude<ReverseSegment, [number]>[];
|
||||
};
|
||||
export default function buildBySources(decoded: readonly SourceMapSegment[][], memos: MemoState[]): Source[];
|
||||
1
node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
1
node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
export default function resolve(input: string, base: string | undefined): string;
|
||||
2
node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
2
node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
export default function maybeSort(mappings: SourceMapSegment[][], owned: boolean): SourceMapSegment[][];
|
||||
16
node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
16
node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
type GeneratedColumn = number;
|
||||
type SourcesIndex = number;
|
||||
type SourceLine = number;
|
||||
type SourceColumn = number;
|
||||
type NamesIndex = number;
|
||||
type GeneratedLine = number;
|
||||
export type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||
export type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];
|
||||
export declare const COLUMN = 0;
|
||||
export declare const SOURCES_INDEX = 1;
|
||||
export declare const SOURCE_LINE = 2;
|
||||
export declare const SOURCE_COLUMN = 3;
|
||||
export declare const NAMES_INDEX = 4;
|
||||
export declare const REV_GENERATED_LINE = 1;
|
||||
export declare const REV_GENERATED_COLUMN = 2;
|
||||
export {};
|
||||
4
node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
4
node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
export default function stripFilename(path: string | undefined | null): string;
|
||||
79
node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
79
node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
@ -0,0 +1,79 @@
|
||||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
import type { SourceMapV3, DecodedSourceMap, EncodedSourceMap, InvalidOriginalMapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, SourceMapInput, Needle, SourceNeedle, SourceMap, EachMapping } from './types';
|
||||
export type { SourceMapSegment } from './sourcemap-segment';
|
||||
export type { SourceMap, DecodedSourceMap, EncodedSourceMap, Section, SectionedSourceMap, SourceMapV3, Bias, EachMapping, GeneratedMapping, InvalidGeneratedMapping, InvalidOriginalMapping, Needle, OriginalMapping, OriginalMapping as Mapping, SectionedSourceMapInput, SourceMapInput, SourceNeedle, XInput, EncodedSourceMapXInput, DecodedSourceMapXInput, SectionedSourceMapXInput, SectionXInput, } from './types';
|
||||
export declare const LEAST_UPPER_BOUND = -1;
|
||||
export declare const GREATEST_LOWER_BOUND = 1;
|
||||
export { AnyMap } from './any-map';
|
||||
export declare class TraceMap implements SourceMap {
|
||||
version: SourceMapV3['version'];
|
||||
file: SourceMapV3['file'];
|
||||
names: SourceMapV3['names'];
|
||||
sourceRoot: SourceMapV3['sourceRoot'];
|
||||
sources: SourceMapV3['sources'];
|
||||
sourcesContent: SourceMapV3['sourcesContent'];
|
||||
ignoreList: SourceMapV3['ignoreList'];
|
||||
resolvedSources: string[];
|
||||
private _encoded;
|
||||
private _decoded;
|
||||
private _decodedMemo;
|
||||
private _bySources;
|
||||
private _bySourceMemos;
|
||||
constructor(map: SourceMapInput, mapUrl?: string | null);
|
||||
}
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
export declare function encodedMappings(map: TraceMap): EncodedSourceMap['mappings'];
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
export declare function decodedMappings(map: TraceMap): Readonly<DecodedSourceMap['mappings']>;
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
export declare function traceSegment(map: TraceMap, line: number, column: number): Readonly<SourceMapSegment> | null;
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
export declare function originalPositionFor(map: TraceMap, needle: Needle): OriginalMapping | InvalidOriginalMapping;
|
||||
/**
|
||||
* Finds the generated line/column position of the provided source/line/column source position.
|
||||
*/
|
||||
export declare function generatedPositionFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping | InvalidGeneratedMapping;
|
||||
/**
|
||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||
*/
|
||||
export declare function allGeneratedPositionsFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping[];
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
export declare function eachMapping(map: TraceMap, cb: (mapping: EachMapping) => void): void;
|
||||
/**
|
||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||
*/
|
||||
export declare function sourceContentFor(map: TraceMap, source: string): string | null;
|
||||
/**
|
||||
* Determines if the source is marked to ignore by the source map.
|
||||
*/
|
||||
export declare function isIgnored(map: TraceMap, source: string): boolean;
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
export declare function presortedDecodedMap(map: DecodedSourceMap, mapUrl?: string): TraceMap;
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare function decodedMap(map: TraceMap): Omit<DecodedSourceMap, 'mappings'> & {
|
||||
mappings: readonly SourceMapSegment[][];
|
||||
};
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare function encodedMap(map: TraceMap): EncodedSourceMap;
|
||||
99
node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
99
node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
@ -0,0 +1,99 @@
|
||||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
import type { GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap } from './trace-mapping';
|
||||
export interface SourceMapV3 {
|
||||
file?: string | null;
|
||||
names: string[];
|
||||
sourceRoot?: string;
|
||||
sources: (string | null)[];
|
||||
sourcesContent?: (string | null)[];
|
||||
version: 3;
|
||||
ignoreList?: number[];
|
||||
}
|
||||
export interface EncodedSourceMap extends SourceMapV3 {
|
||||
mappings: string;
|
||||
}
|
||||
export interface DecodedSourceMap extends SourceMapV3 {
|
||||
mappings: SourceMapSegment[][];
|
||||
}
|
||||
export interface Section {
|
||||
offset: {
|
||||
line: number;
|
||||
column: number;
|
||||
};
|
||||
map: EncodedSourceMap | DecodedSourceMap | SectionedSourceMap;
|
||||
}
|
||||
export interface SectionedSourceMap {
|
||||
file?: string | null;
|
||||
sections: Section[];
|
||||
version: 3;
|
||||
}
|
||||
export type OriginalMapping = {
|
||||
source: string | null;
|
||||
line: number;
|
||||
column: number;
|
||||
name: string | null;
|
||||
};
|
||||
export type InvalidOriginalMapping = {
|
||||
source: null;
|
||||
line: null;
|
||||
column: null;
|
||||
name: null;
|
||||
};
|
||||
export type GeneratedMapping = {
|
||||
line: number;
|
||||
column: number;
|
||||
};
|
||||
export type InvalidGeneratedMapping = {
|
||||
line: null;
|
||||
column: null;
|
||||
};
|
||||
export type Bias = typeof GREATEST_LOWER_BOUND | typeof LEAST_UPPER_BOUND;
|
||||
export type XInput = {
|
||||
x_google_ignoreList?: SourceMapV3['ignoreList'];
|
||||
};
|
||||
export type EncodedSourceMapXInput = EncodedSourceMap & XInput;
|
||||
export type DecodedSourceMapXInput = DecodedSourceMap & XInput;
|
||||
export type SectionedSourceMapXInput = Omit<SectionedSourceMap, 'sections'> & {
|
||||
sections: SectionXInput[];
|
||||
};
|
||||
export type SectionXInput = Omit<Section, 'map'> & {
|
||||
map: SectionedSourceMapInput;
|
||||
};
|
||||
export type SourceMapInput = string | EncodedSourceMapXInput | DecodedSourceMapXInput | TraceMap;
|
||||
export type SectionedSourceMapInput = SourceMapInput | SectionedSourceMapXInput;
|
||||
export type Needle = {
|
||||
line: number;
|
||||
column: number;
|
||||
bias?: Bias;
|
||||
};
|
||||
export type SourceNeedle = {
|
||||
source: string;
|
||||
line: number;
|
||||
column: number;
|
||||
bias?: Bias;
|
||||
};
|
||||
export type EachMapping = {
|
||||
generatedLine: number;
|
||||
generatedColumn: number;
|
||||
source: null;
|
||||
originalLine: null;
|
||||
originalColumn: null;
|
||||
name: null;
|
||||
} | {
|
||||
generatedLine: number;
|
||||
generatedColumn: number;
|
||||
source: string | null;
|
||||
originalLine: number;
|
||||
originalColumn: number;
|
||||
name: string | null;
|
||||
};
|
||||
export declare abstract class SourceMap {
|
||||
version: SourceMapV3['version'];
|
||||
file: SourceMapV3['file'];
|
||||
names: SourceMapV3['names'];
|
||||
sourceRoot: SourceMapV3['sourceRoot'];
|
||||
sources: SourceMapV3['sources'];
|
||||
sourcesContent: SourceMapV3['sourcesContent'];
|
||||
resolvedSources: SourceMapV3['sources'];
|
||||
ignoreList: SourceMapV3['ignoreList'];
|
||||
}
|
||||
81
node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
Normal file
81
node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
Normal file
@ -0,0 +1,81 @@
|
||||
{
|
||||
"name": "@jridgewell/trace-mapping",
|
||||
"version": "0.3.25",
|
||||
"description": "Trace the original position through a source map",
|
||||
"keywords": [
|
||||
"source",
|
||||
"map"
|
||||
],
|
||||
"main": "dist/trace-mapping.umd.js",
|
||||
"module": "dist/trace-mapping.mjs",
|
||||
"types": "dist/types/trace-mapping.d.ts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/trace-mapping.d.ts",
|
||||
"browser": "./dist/trace-mapping.umd.js",
|
||||
"require": "./dist/trace-mapping.umd.js",
|
||||
"import": "./dist/trace-mapping.mjs"
|
||||
},
|
||||
"./dist/trace-mapping.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/jridgewell/trace-mapping.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"benchmark": "run-s build:rollup benchmark:*",
|
||||
"benchmark:install": "cd benchmark && npm install",
|
||||
"benchmark:only": "node --expose-gc benchmark/index.mjs",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.mjs",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"prebuild": "rm -rf dist",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts' '**/*.md'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "c8 mocha",
|
||||
"test:watch": "mocha --watch"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "11.1.6",
|
||||
"@types/mocha": "10.0.6",
|
||||
"@types/node": "20.11.20",
|
||||
"@typescript-eslint/eslint-plugin": "6.18.1",
|
||||
"@typescript-eslint/parser": "6.18.1",
|
||||
"benchmark": "2.1.4",
|
||||
"c8": "9.0.0",
|
||||
"esbuild": "0.19.11",
|
||||
"eslint": "8.56.0",
|
||||
"eslint-config-prettier": "9.1.0",
|
||||
"eslint-plugin-no-only-tests": "3.1.0",
|
||||
"mocha": "10.3.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "3.1.1",
|
||||
"rollup": "4.9.4",
|
||||
"tsx": "4.7.0",
|
||||
"typescript": "5.3.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@jridgewell/resolve-uri": "^3.1.0",
|
||||
"@jridgewell/sourcemap-codec": "^1.4.14"
|
||||
}
|
||||
|
||||
,"_resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz"
|
||||
,"_integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="
|
||||
,"_from": "@jridgewell/trace-mapping@^0.3.25"
|
||||
}
|
||||
21
node_modules/@types/eslint-scope/LICENSE
generated
vendored
Normal file
21
node_modules/@types/eslint-scope/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
||||
90
node_modules/@types/eslint-scope/README.md
generated
vendored
Normal file
90
node_modules/@types/eslint-scope/README.md
generated
vendored
Normal file
@ -0,0 +1,90 @@
|
||||
# Installation
|
||||
> `npm install --save @types/eslint-scope`
|
||||
|
||||
# Summary
|
||||
This package contains type definitions for eslint-scope (https://github.com/eslint/eslint-scope).
|
||||
|
||||
# Details
|
||||
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/eslint-scope.
|
||||
## [index.d.ts](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/eslint-scope/index.d.ts)
|
||||
````ts
|
||||
import * as eslint from "eslint";
|
||||
import * as estree from "estree";
|
||||
|
||||
export const version: string;
|
||||
|
||||
export class ScopeManager implements eslint.Scope.ScopeManager {
|
||||
scopes: Scope[];
|
||||
globalScope: Scope;
|
||||
acquire(node: {}, inner?: boolean): Scope | null;
|
||||
getDeclaredVariables(node: {}): Variable[];
|
||||
}
|
||||
|
||||
export class Scope implements eslint.Scope.Scope {
|
||||
type:
|
||||
| "block"
|
||||
| "catch"
|
||||
| "class"
|
||||
| "for"
|
||||
| "function"
|
||||
| "function-expression-name"
|
||||
| "global"
|
||||
| "module"
|
||||
| "switch"
|
||||
| "with"
|
||||
| "TDZ";
|
||||
isStrict: boolean;
|
||||
upper: Scope | null;
|
||||
childScopes: Scope[];
|
||||
variableScope: Scope;
|
||||
block: estree.Node;
|
||||
variables: Variable[];
|
||||
set: Map<string, Variable>;
|
||||
references: Reference[];
|
||||
through: Reference[];
|
||||
functionExpressionScope: boolean;
|
||||
}
|
||||
|
||||
export class Variable implements eslint.Scope.Variable {
|
||||
name: string;
|
||||
scope: Scope;
|
||||
identifiers: estree.Identifier[];
|
||||
references: Reference[];
|
||||
defs: eslint.Scope.Definition[];
|
||||
}
|
||||
|
||||
export class Reference implements eslint.Scope.Reference {
|
||||
identifier: estree.Identifier;
|
||||
from: Scope;
|
||||
resolved: Variable | null;
|
||||
writeExpr: estree.Node | null;
|
||||
init: boolean;
|
||||
|
||||
isWrite(): boolean;
|
||||
isRead(): boolean;
|
||||
isWriteOnly(): boolean;
|
||||
isReadOnly(): boolean;
|
||||
isReadWrite(): boolean;
|
||||
}
|
||||
|
||||
export interface AnalysisOptions {
|
||||
optimistic?: boolean;
|
||||
directive?: boolean;
|
||||
ignoreEval?: boolean;
|
||||
nodejsScope?: boolean;
|
||||
impliedStrict?: boolean;
|
||||
fallback?: string | ((node: {}) => string[]);
|
||||
sourceType?: "script" | "module";
|
||||
ecmaVersion?: number;
|
||||
}
|
||||
|
||||
export function analyze(ast: {}, options?: AnalysisOptions): ScopeManager;
|
||||
|
||||
````
|
||||
|
||||
### Additional Details
|
||||
* Last updated: Mon, 06 Nov 2023 22:41:05 GMT
|
||||
* Dependencies: [@types/eslint](https://npmjs.com/package/@types/eslint), [@types/estree](https://npmjs.com/package/@types/estree)
|
||||
|
||||
# Credits
|
||||
These definitions were written by [Toru Nagashima](https://github.com/mysticatea).
|
||||
71
node_modules/@types/eslint-scope/index.d.ts
generated
vendored
Normal file
71
node_modules/@types/eslint-scope/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,71 @@
|
||||
import * as eslint from "eslint";
|
||||
import * as estree from "estree";
|
||||
|
||||
export const version: string;
|
||||
|
||||
export class ScopeManager implements eslint.Scope.ScopeManager {
|
||||
scopes: Scope[];
|
||||
globalScope: Scope;
|
||||
acquire(node: {}, inner?: boolean): Scope | null;
|
||||
getDeclaredVariables(node: {}): Variable[];
|
||||
}
|
||||
|
||||
export class Scope implements eslint.Scope.Scope {
|
||||
type:
|
||||
| "block"
|
||||
| "catch"
|
||||
| "class"
|
||||
| "for"
|
||||
| "function"
|
||||
| "function-expression-name"
|
||||
| "global"
|
||||
| "module"
|
||||
| "switch"
|
||||
| "with"
|
||||
| "TDZ";
|
||||
isStrict: boolean;
|
||||
upper: Scope | null;
|
||||
childScopes: Scope[];
|
||||
variableScope: Scope;
|
||||
block: estree.Node;
|
||||
variables: Variable[];
|
||||
set: Map<string, Variable>;
|
||||
references: Reference[];
|
||||
through: Reference[];
|
||||
functionExpressionScope: boolean;
|
||||
}
|
||||
|
||||
export class Variable implements eslint.Scope.Variable {
|
||||
name: string;
|
||||
scope: Scope;
|
||||
identifiers: estree.Identifier[];
|
||||
references: Reference[];
|
||||
defs: eslint.Scope.Definition[];
|
||||
}
|
||||
|
||||
export class Reference implements eslint.Scope.Reference {
|
||||
identifier: estree.Identifier;
|
||||
from: Scope;
|
||||
resolved: Variable | null;
|
||||
writeExpr: estree.Node | null;
|
||||
init: boolean;
|
||||
|
||||
isWrite(): boolean;
|
||||
isRead(): boolean;
|
||||
isWriteOnly(): boolean;
|
||||
isReadOnly(): boolean;
|
||||
isReadWrite(): boolean;
|
||||
}
|
||||
|
||||
export interface AnalysisOptions {
|
||||
optimistic?: boolean;
|
||||
directive?: boolean;
|
||||
ignoreEval?: boolean;
|
||||
nodejsScope?: boolean;
|
||||
impliedStrict?: boolean;
|
||||
fallback?: string | ((node: {}) => string[]);
|
||||
sourceType?: "script" | "module";
|
||||
ecmaVersion?: number;
|
||||
}
|
||||
|
||||
export function analyze(ast: {}, options?: AnalysisOptions): ScopeManager;
|
||||
32
node_modules/@types/eslint-scope/package.json
generated
vendored
Normal file
32
node_modules/@types/eslint-scope/package.json
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
{
|
||||
"name": "@types/eslint-scope",
|
||||
"version": "3.7.7",
|
||||
"description": "TypeScript definitions for eslint-scope",
|
||||
"homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/eslint-scope",
|
||||
"license": "MIT",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Toru Nagashima",
|
||||
"githubUsername": "mysticatea",
|
||||
"url": "https://github.com/mysticatea"
|
||||
}
|
||||
],
|
||||
"main": "",
|
||||
"types": "index.d.ts",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git",
|
||||
"directory": "types/eslint-scope"
|
||||
},
|
||||
"scripts": {},
|
||||
"dependencies": {
|
||||
"@types/eslint": "*",
|
||||
"@types/estree": "*"
|
||||
},
|
||||
"typesPublisherContentHash": "49eee35b78c19e2c83bc96ce190c7a88329006f876dd7f1fb378c1e8034fc8f2",
|
||||
"typeScriptVersion": "4.5"
|
||||
|
||||
,"_resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.7.tgz"
|
||||
,"_integrity": "sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg=="
|
||||
,"_from": "@types/eslint-scope@^3.7.7"
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user