Website Structure
This commit is contained in:
parent
62812f2090
commit
71f0676a62
22365 changed files with 4265753 additions and 791 deletions
2
Frontend-Learner/node_modules/eslint-plugin-regexp/dist/utils/string-literal-parser/index.d.ts
generated
vendored
Normal file
2
Frontend-Learner/node_modules/eslint-plugin-regexp/dist/utils/string-literal-parser/index.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
export * from "./parser";
|
||||
export * from "./tokens";
|
||||
18
Frontend-Learner/node_modules/eslint-plugin-regexp/dist/utils/string-literal-parser/index.js
generated
vendored
Normal file
18
Frontend-Learner/node_modules/eslint-plugin-regexp/dist/utils/string-literal-parser/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
||||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
__exportStar(require("./parser"), exports);
|
||||
__exportStar(require("./tokens"), exports);
|
||||
17
Frontend-Learner/node_modules/eslint-plugin-regexp/dist/utils/string-literal-parser/parser.d.ts
generated
vendored
Normal file
17
Frontend-Learner/node_modules/eslint-plugin-regexp/dist/utils/string-literal-parser/parser.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
import type { Token } from "./tokens";
|
||||
export type StringLiteral = {
|
||||
tokens: Token[];
|
||||
value: string;
|
||||
range: [number, number];
|
||||
};
|
||||
export type EcmaVersion = 3 | 5 | 6 | 2015 | 7 | 2016 | 8 | 2017 | 9 | 2018 | 10 | 2019 | 11 | 2020 | 12 | 2021;
|
||||
export declare function parseStringLiteral(source: string, option?: {
|
||||
start?: number;
|
||||
end?: number;
|
||||
ecmaVersion?: EcmaVersion;
|
||||
}): StringLiteral;
|
||||
export declare function parseStringTokens(source: string, option?: {
|
||||
start?: number;
|
||||
end?: number;
|
||||
ecmaVersion?: EcmaVersion;
|
||||
}): Generator<Token>;
|
||||
39
Frontend-Learner/node_modules/eslint-plugin-regexp/dist/utils/string-literal-parser/parser.js
generated
vendored
Normal file
39
Frontend-Learner/node_modules/eslint-plugin-regexp/dist/utils/string-literal-parser/parser.js
generated
vendored
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.parseStringLiteral = parseStringLiteral;
|
||||
exports.parseStringTokens = parseStringTokens;
|
||||
const tokenizer_1 = require("./tokenizer");
|
||||
function parseStringLiteral(source, option) {
|
||||
var _a, _b;
|
||||
const startIndex = (_a = option === null || option === void 0 ? void 0 : option.start) !== null && _a !== void 0 ? _a : 0;
|
||||
const cp = source.codePointAt(startIndex);
|
||||
const ecmaVersion = (_b = option === null || option === void 0 ? void 0 : option.ecmaVersion) !== null && _b !== void 0 ? _b : Infinity;
|
||||
const tokenizer = new tokenizer_1.Tokenizer(source, {
|
||||
start: startIndex + 1,
|
||||
end: option === null || option === void 0 ? void 0 : option.end,
|
||||
ecmaVersion: ecmaVersion >= 6 && ecmaVersion < 2015
|
||||
? ecmaVersion + 2009
|
||||
: ecmaVersion,
|
||||
});
|
||||
const tokens = [...tokenizer.parseTokens(cp)];
|
||||
return {
|
||||
tokens,
|
||||
get value() {
|
||||
return tokens.map((t) => t.value).join("");
|
||||
},
|
||||
range: [startIndex, tokenizer.pos],
|
||||
};
|
||||
}
|
||||
function* parseStringTokens(source, option) {
|
||||
var _a, _b;
|
||||
const startIndex = (_a = option === null || option === void 0 ? void 0 : option.start) !== null && _a !== void 0 ? _a : 0;
|
||||
const ecmaVersion = (_b = option === null || option === void 0 ? void 0 : option.ecmaVersion) !== null && _b !== void 0 ? _b : Infinity;
|
||||
const tokenizer = new tokenizer_1.Tokenizer(source, {
|
||||
start: startIndex,
|
||||
end: option === null || option === void 0 ? void 0 : option.end,
|
||||
ecmaVersion: ecmaVersion >= 6 && ecmaVersion < 2015
|
||||
? ecmaVersion + 2009
|
||||
: ecmaVersion,
|
||||
});
|
||||
yield* tokenizer.parseTokens();
|
||||
}
|
||||
16
Frontend-Learner/node_modules/eslint-plugin-regexp/dist/utils/string-literal-parser/tokenizer.d.ts
generated
vendored
Normal file
16
Frontend-Learner/node_modules/eslint-plugin-regexp/dist/utils/string-literal-parser/tokenizer.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
import type { Token } from "./tokens";
|
||||
export declare class Tokenizer {
|
||||
private readonly source;
|
||||
pos: number;
|
||||
private readonly end;
|
||||
private readonly ecmaVersion;
|
||||
constructor(source: string, options: {
|
||||
start: number;
|
||||
end?: number;
|
||||
ecmaVersion: number;
|
||||
});
|
||||
parseTokens(quote?: number): Generator<Token>;
|
||||
private readEscape;
|
||||
private readUnicode;
|
||||
private readHex;
|
||||
}
|
||||
196
Frontend-Learner/node_modules/eslint-plugin-regexp/dist/utils/string-literal-parser/tokenizer.js
generated
vendored
Normal file
196
Frontend-Learner/node_modules/eslint-plugin-regexp/dist/utils/string-literal-parser/tokenizer.js
generated
vendored
Normal file
|
|
@ -0,0 +1,196 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Tokenizer = void 0;
|
||||
const CP_BACK_SLASH = "\\".codePointAt(0);
|
||||
const CP_BACKTICK = "`".codePointAt(0);
|
||||
const CP_CR = "\r".codePointAt(0);
|
||||
const CP_LF = "\n".codePointAt(0);
|
||||
const CP_OPENING_BRACE = "{".codePointAt(0);
|
||||
const CP_a = "a".codePointAt(0);
|
||||
const CP_A = "A".codePointAt(0);
|
||||
const CP_n = "n".codePointAt(0);
|
||||
const CP_r = "r".codePointAt(0);
|
||||
const CP_t = "t".codePointAt(0);
|
||||
const CP_b = "b".codePointAt(0);
|
||||
const CP_v = "v".codePointAt(0);
|
||||
const CP_f = "f".codePointAt(0);
|
||||
const CP_u = "u".codePointAt(0);
|
||||
const CP_x = "x".codePointAt(0);
|
||||
const CP_0 = "0".codePointAt(0);
|
||||
const CP_7 = "7".codePointAt(0);
|
||||
const CP_8 = "8".codePointAt(0);
|
||||
const CP_9 = "9".codePointAt(0);
|
||||
class Tokenizer {
|
||||
constructor(source, options) {
|
||||
var _a;
|
||||
this.source = source;
|
||||
this.pos = options.start;
|
||||
this.end = (_a = options.end) !== null && _a !== void 0 ? _a : null;
|
||||
this.ecmaVersion = options.ecmaVersion;
|
||||
}
|
||||
*parseTokens(quote) {
|
||||
var _a;
|
||||
const inTemplate = quote === CP_BACKTICK;
|
||||
const endIndex = (_a = this.end) !== null && _a !== void 0 ? _a : this.source.length;
|
||||
while (this.pos < endIndex) {
|
||||
const start = this.pos;
|
||||
const cp = this.source.codePointAt(start);
|
||||
if (cp == null) {
|
||||
throw new Error("Unterminated string constant");
|
||||
}
|
||||
this.pos = inc(start, cp);
|
||||
if (cp === quote)
|
||||
break;
|
||||
if (cp === CP_BACK_SLASH) {
|
||||
const { value, kind } = this.readEscape(inTemplate);
|
||||
yield {
|
||||
type: "EscapeToken",
|
||||
kind,
|
||||
value,
|
||||
range: [start, this.pos],
|
||||
};
|
||||
}
|
||||
else if (cp === CP_CR || cp === CP_LF) {
|
||||
if (inTemplate) {
|
||||
if (cp === CP_CR &&
|
||||
this.source.codePointAt(this.pos) === CP_LF) {
|
||||
this.pos++;
|
||||
}
|
||||
yield {
|
||||
type: "CharacterToken",
|
||||
value: "\n",
|
||||
range: [start, this.pos],
|
||||
};
|
||||
}
|
||||
else {
|
||||
throw new Error("Unterminated string constant");
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (this.ecmaVersion >= 2019 &&
|
||||
(cp === 0x2028 || cp === 0x2029) &&
|
||||
!inTemplate) {
|
||||
throw new Error("Unterminated string constant");
|
||||
}
|
||||
yield {
|
||||
type: "CharacterToken",
|
||||
value: String.fromCodePoint(cp),
|
||||
range: [start, this.pos],
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
readEscape(inTemplate) {
|
||||
const cp = this.source.codePointAt(this.pos);
|
||||
if (cp == null) {
|
||||
throw new Error("Invalid or unexpected token");
|
||||
}
|
||||
this.pos = inc(this.pos, cp);
|
||||
switch (cp) {
|
||||
case CP_n:
|
||||
return { value: "\n", kind: "special" };
|
||||
case CP_r:
|
||||
return { value: "\r", kind: "special" };
|
||||
case CP_t:
|
||||
return { value: "\t", kind: "special" };
|
||||
case CP_b:
|
||||
return { value: "\b", kind: "special" };
|
||||
case CP_v:
|
||||
return { value: "\v", kind: "special" };
|
||||
case CP_f:
|
||||
return { value: "\f", kind: "special" };
|
||||
case CP_CR:
|
||||
if (this.source.codePointAt(this.pos) === CP_LF) {
|
||||
this.pos++;
|
||||
}
|
||||
case CP_LF:
|
||||
return { value: "", kind: "eol" };
|
||||
case CP_x:
|
||||
return {
|
||||
value: String.fromCodePoint(this.readHex(2)),
|
||||
kind: "hex",
|
||||
};
|
||||
case CP_u:
|
||||
return {
|
||||
value: String.fromCodePoint(this.readUnicode()),
|
||||
kind: "unicode",
|
||||
};
|
||||
default:
|
||||
if (CP_0 <= cp && cp <= CP_7) {
|
||||
let octalStr = /^[0-7]+/u.exec(this.source.slice(this.pos - 1, this.pos + 2))[0];
|
||||
let octal = parseInt(octalStr, 8);
|
||||
if (octal > 255) {
|
||||
octalStr = octalStr.slice(0, -1);
|
||||
octal = parseInt(octalStr, 8);
|
||||
}
|
||||
this.pos += octalStr.length - 1;
|
||||
const nextCp = this.source.codePointAt(this.pos);
|
||||
if ((octalStr !== "0" ||
|
||||
nextCp === CP_8 ||
|
||||
nextCp === CP_9) &&
|
||||
inTemplate) {
|
||||
throw new Error("Octal literal in template string");
|
||||
}
|
||||
return {
|
||||
value: String.fromCodePoint(octal),
|
||||
kind: "octal",
|
||||
};
|
||||
}
|
||||
return {
|
||||
value: String.fromCodePoint(cp),
|
||||
kind: "char",
|
||||
};
|
||||
}
|
||||
}
|
||||
readUnicode() {
|
||||
const cp = this.source.codePointAt(this.pos);
|
||||
if (cp === CP_OPENING_BRACE) {
|
||||
if (this.ecmaVersion < 2015) {
|
||||
throw new Error(`Unexpected character '${String.fromCodePoint(cp)}'`);
|
||||
}
|
||||
this.pos++;
|
||||
const endIndex = this.source.indexOf("}", this.pos);
|
||||
if (endIndex < 0) {
|
||||
throw new Error("Invalid Unicode escape sequence");
|
||||
}
|
||||
const code = this.readHex(endIndex - this.pos);
|
||||
this.pos++;
|
||||
if (code > 0x10ffff) {
|
||||
throw new Error("Code point out of bounds");
|
||||
}
|
||||
return code;
|
||||
}
|
||||
return this.readHex(4);
|
||||
}
|
||||
readHex(length) {
|
||||
let total = 0;
|
||||
for (let i = 0; i < length; i++, this.pos++) {
|
||||
const cp = this.source.codePointAt(this.pos);
|
||||
if (cp == null) {
|
||||
throw new Error(`Invalid hexadecimal escape sequence`);
|
||||
}
|
||||
let val;
|
||||
if (CP_a <= cp) {
|
||||
val = cp - CP_a + 10;
|
||||
}
|
||||
else if (CP_A <= cp) {
|
||||
val = cp - CP_A + 10;
|
||||
}
|
||||
else if (CP_0 <= cp && cp <= CP_9) {
|
||||
val = cp - CP_0;
|
||||
}
|
||||
else {
|
||||
throw new Error(`Invalid hexadecimal escape sequence`);
|
||||
}
|
||||
if (val >= 16) {
|
||||
throw new Error(`Invalid hexadecimal escape sequence`);
|
||||
}
|
||||
total = total * 16 + val;
|
||||
}
|
||||
return total;
|
||||
}
|
||||
}
|
||||
exports.Tokenizer = Tokenizer;
|
||||
function inc(pos, cp) {
|
||||
return pos + (cp >= 0x10000 ? 2 : 1);
|
||||
}
|
||||
13
Frontend-Learner/node_modules/eslint-plugin-regexp/dist/utils/string-literal-parser/tokens.d.ts
generated
vendored
Normal file
13
Frontend-Learner/node_modules/eslint-plugin-regexp/dist/utils/string-literal-parser/tokens.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
export interface BaseToken {
|
||||
type: string;
|
||||
value: string;
|
||||
range: [number, number];
|
||||
}
|
||||
export type Token = CharacterToken | EscapeToken;
|
||||
export interface CharacterToken extends BaseToken {
|
||||
type: "CharacterToken";
|
||||
}
|
||||
export interface EscapeToken extends BaseToken {
|
||||
type: "EscapeToken";
|
||||
kind: "special" | "eol" | "unicode" | "hex" | "octal" | "char";
|
||||
}
|
||||
2
Frontend-Learner/node_modules/eslint-plugin-regexp/dist/utils/string-literal-parser/tokens.js
generated
vendored
Normal file
2
Frontend-Learner/node_modules/eslint-plugin-regexp/dist/utils/string-literal-parser/tokens.js
generated
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
Loading…
Add table
Add a link
Reference in a new issue