Website Structure
This commit is contained in:
parent
62812f2090
commit
71f0676a62
22365 changed files with 4265753 additions and 791 deletions
276
Frontend-Learner/node_modules/oxc-parser/src-js/raw-transfer/common.js
generated
vendored
Normal file
276
Frontend-Learner/node_modules/oxc-parser/src-js/raw-transfer/common.js
generated
vendored
Normal file
|
|
@ -0,0 +1,276 @@
|
|||
import os from "node:os";
|
||||
import { BUFFER_ALIGN, BUFFER_SIZE, IS_TS_FLAG_POS } from "../../generated/constants.js";
|
||||
import {
|
||||
getBufferOffset,
|
||||
parseRaw as parseRawBinding,
|
||||
parseRawSync as parseRawSyncBinding,
|
||||
} from "../bindings.js";
|
||||
import { rawTransferSupported } from "./supported.js";
|
||||
|
||||
// Throw an error if running on a platform which raw transfer doesn't support.
|
||||
//
|
||||
// Note: This module is lazy-loaded only when user calls `parseSync` or `parseAsync` with
|
||||
// `experimentalRawTransfer` or `experimentalLazy` options, or calls `experimentalGetLazyVisitor`.
|
||||
if (!rawTransferSupported()) {
|
||||
throw new Error(
|
||||
"`experimentalRawTransfer` and `experimentalLazy` options are not supported " +
|
||||
"on 32-bit or big-endian systems, versions of NodeJS prior to v22.0.0, " +
|
||||
"versions of Deno prior to v2.0.0, or other runtimes",
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse JS/TS source synchronously on current thread using raw transfer.
|
||||
*
|
||||
* Convert the buffer returned by Rust to a JS object with provided `convert` function.
|
||||
*
|
||||
* This function contains logic shared by both `parseSyncRaw` and `parseSyncLazy`.
|
||||
*
|
||||
* @param {string} filename - Filename
|
||||
* @param {string} sourceText - Source text of file
|
||||
* @param {Object} options - Parsing options
|
||||
* @param {function} convert - Function to convert the buffer returned from Rust into a JS object
|
||||
* @returns {Object} - The return value of `convert`
|
||||
*/
|
||||
export function parseSyncRawImpl(filename, sourceText, options, convert) {
|
||||
const { buffer, sourceByteLen } = prepareRaw(sourceText);
|
||||
parseRawSyncBinding(filename, buffer, sourceByteLen, options);
|
||||
return convert(buffer, sourceText, sourceByteLen, options);
|
||||
}
|
||||
|
||||
// User should not schedule more async tasks than there are available CPUs, as it hurts performance,
|
||||
// but it's a common mistake in async JS code to do exactly that.
|
||||
//
|
||||
// That anti-pattern looks like this when applied to Oxc:
|
||||
//
|
||||
// ```js
|
||||
// const asts = await Promise.all(
|
||||
// files.map(
|
||||
// async (filename) => {
|
||||
// const sourceText = await fs.readFile(filename, 'utf8');
|
||||
// const ast = await oxc.parseAsync(filename, sourceText);
|
||||
// return ast;
|
||||
// }
|
||||
// )
|
||||
// );
|
||||
// ```
|
||||
//
|
||||
// In most cases, that'd just result in a bit of degraded performance, and higher memory use because
|
||||
// of loading sources into memory prematurely.
|
||||
//
|
||||
// However, raw transfer uses a 6 GiB buffer for each parsing operation.
|
||||
// Most of the memory pages in those buffers are never touched, so this does not consume a huge amount
|
||||
// of physical memory, but it does still consume virtual memory.
|
||||
//
|
||||
// If we allowed creating a large number of 6 GiB buffers simultaneously, it would quickly consume
|
||||
// virtual memory space and risk memory exhaustion. The code above would exhaust all of bottom half
|
||||
// (heap) of 48-bit virtual memory space if `files.length >= 21_845`. This is not a number which
|
||||
// is unrealistic in real world code.
|
||||
//
|
||||
// To guard against this possibility, we implement a simple queue.
|
||||
// No more than `os.availableParallelism()` files can be parsed simultaneously, and any further calls to
|
||||
// `parseAsyncRaw` will be put in a queue, to execute once other tasks complete.
|
||||
//
|
||||
// Fallback to `os.cpus().length` on versions of NodeJS prior to v18.14.0, which do not support
|
||||
// `os.availableParallelism`.
|
||||
let availableCores = os.availableParallelism ? os.availableParallelism() : os.cpus().length;
|
||||
const queue = [];
|
||||
|
||||
/**
|
||||
* Parse JS/TS source asynchronously using raw transfer.
|
||||
*
|
||||
* Convert the buffer returned by Rust to a JS object with provided `convert` function.
|
||||
*
|
||||
* Queues up parsing operations if more calls than number of CPU cores (see above).
|
||||
*
|
||||
* This function contains logic shared by both `parseAsyncRaw` and `parseAsyncLazy`.
|
||||
*
|
||||
* @param {string} filename - Filename
|
||||
* @param {string} sourceText - Source text of file
|
||||
* @param {Object} options - Parsing options
|
||||
* @param {function} convert - Function to convert the buffer returned from Rust into a JS object
|
||||
* @returns {Object} - The return value of `convert`
|
||||
*/
|
||||
export async function parseAsyncRawImpl(filename, sourceText, options, convert) {
|
||||
// Wait for a free CPU core if all CPUs are currently busy.
|
||||
//
|
||||
// Note: `availableCores` is NOT decremented if have to wait in the queue first,
|
||||
// and NOT incremented when parsing completes and it runs next task in the queue.
|
||||
//
|
||||
// This is to avoid a race condition if `parseAsyncRaw` is called during the microtick in between
|
||||
// `resolve` being called below, and the promise resolving here. In that case the new task could
|
||||
// start running, and then the promise resolves, and the queued task also starts running.
|
||||
// We'd then have `availableParallelism() + 1` tasks running simultaneously. Potentially, this could
|
||||
// happen repeatedly, with the number of tasks running simultaneously ever-increasing.
|
||||
if (availableCores === 0) {
|
||||
// All CPU cores are busy. Put this task in queue and wait for capacity to become available.
|
||||
await new Promise((resolve, _) => {
|
||||
queue.push(resolve);
|
||||
});
|
||||
} else {
|
||||
// A CPU core is available. Mark core as busy, and run parsing now.
|
||||
availableCores--;
|
||||
}
|
||||
|
||||
// Parse
|
||||
const { buffer, sourceByteLen } = prepareRaw(sourceText);
|
||||
await parseRawBinding(filename, buffer, sourceByteLen, options);
|
||||
const data = convert(buffer, sourceText, sourceByteLen, options);
|
||||
|
||||
// Free the CPU core
|
||||
if (queue.length > 0) {
|
||||
// Some further tasks waiting in queue. Run the next one.
|
||||
// Do not increment `availableCores` (see above).
|
||||
const resolve = queue.shift();
|
||||
resolve();
|
||||
} else {
|
||||
// No tasks waiting in queue. This CPU is now free.
|
||||
availableCores++;
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
const ARRAY_BUFFER_SIZE = BUFFER_SIZE + BUFFER_ALIGN;
|
||||
const ONE_GIB = 1 << 30;
|
||||
|
||||
// We keep a cache of buffers for raw transfer, so we can reuse them as much as possible.
|
||||
//
|
||||
// When processing multiple files, it's ideal if can reuse an existing buffer, as it's more likely to
|
||||
// be warm in CPU cache, it avoids allocations, and it saves work for the garbage collector.
|
||||
//
|
||||
// However, we also don't want to keep a load of large buffers around indefinitely using up memory,
|
||||
// if they're not going to be used again.
|
||||
//
|
||||
// We have no knowledge of what pattern over time user may process files in (could be lots in quick
|
||||
// succession, or more occasionally in a long-running process). So we try to use flexible caching
|
||||
// strategy which is adaptable to many usage patterns.
|
||||
//
|
||||
// We use a 2-tier cache.
|
||||
// Tier 1 uses strong references, tier 2 uses weak references.
|
||||
//
|
||||
// When parsing is complete and the buffer is no longer in use, push it to `buffers` (tier 1 cache).
|
||||
// Set a timer to clear the cache when no activity for 10 seconds.
|
||||
//
|
||||
// When the timer expires, move all the buffers from tier 1 cache into `oldBuffers` (tier 2).
|
||||
// They are stored there as `WeakRef`s, so the garbage collector is free to reclaim them.
|
||||
//
|
||||
// On the next call to `parseSync` or `parseAsync`, promote any buffers in tier 2 cache which were not
|
||||
// already garbage collected back into tier 1 cache. This is on assumption that parsing one file
|
||||
// indicates parsing as a whole is an ongoing process, and there will likely be further calls to
|
||||
// `parseSync` / `parseAsync` in future.
|
||||
//
|
||||
// The weak tier 2 cache is because V8 does not necessarily free memory as soon as it's able to be
|
||||
// freed. We don't want to block it from freeing memory, but if it's not done that yet, there's no
|
||||
// point creating a new buffer, when one already exists.
|
||||
const CLEAR_BUFFERS_TIMEOUT = 10_000; // 10 seconds
|
||||
const buffers = [],
|
||||
oldBuffers = [];
|
||||
let clearBuffersTimeout = null;
|
||||
|
||||
const textEncoder = new TextEncoder();
|
||||
|
||||
/**
|
||||
* Get a buffer (from cache if possible), and copy source text into it.
|
||||
*
|
||||
* @param {string} sourceText - Source text of file
|
||||
* @returns {Object} - Object of form `{ buffer, sourceByteLen }`.
|
||||
* - `buffer`: `Uint8Array` containing the AST in raw form.
|
||||
* - `sourceByteLen`: Length of source text in UTF-8 bytes
|
||||
* (which may not be equal to `sourceText.length` if source contains non-ASCII characters).
|
||||
*/
|
||||
export function prepareRaw(sourceText) {
|
||||
// Cancel timeout for clearing buffers
|
||||
if (clearBuffersTimeout !== null) {
|
||||
clearTimeout(clearBuffersTimeout);
|
||||
clearBuffersTimeout = null;
|
||||
}
|
||||
|
||||
// Revive any discarded buffers which have not yet been garbage collected
|
||||
if (oldBuffers.length > 0) {
|
||||
const revivedBuffers = [];
|
||||
for (let oldBuffer of oldBuffers) {
|
||||
oldBuffer = oldBuffer.deref();
|
||||
if (oldBuffer !== undefined) revivedBuffers.push(oldBuffer);
|
||||
}
|
||||
oldBuffers.length = 0;
|
||||
if (revivedBuffers.length > 0) buffers.unshift(...revivedBuffers);
|
||||
}
|
||||
|
||||
// Reuse existing buffer, or create a new one
|
||||
const buffer = buffers.length > 0 ? buffers.pop() : createBuffer();
|
||||
|
||||
// Write source into start of buffer.
|
||||
// `TextEncoder` cannot write into a `Uint8Array` larger than 1 GiB,
|
||||
// so create a view into buffer of this size to write into.
|
||||
const sourceBuffer = new Uint8Array(buffer.buffer, buffer.byteOffset, ONE_GIB);
|
||||
const { read, written: sourceByteLen } = textEncoder.encodeInto(sourceText, sourceBuffer);
|
||||
if (read !== sourceText.length) throw new Error("Failed to write source text into buffer");
|
||||
|
||||
return { buffer, sourceByteLen };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get if AST should be parsed as JS or TS.
|
||||
* Rust side sets a `bool` in this position in buffer which is `true` if TS.
|
||||
*
|
||||
* @param {Uint8Array} buffer - Buffer containing AST in raw form
|
||||
* @returns {boolean} - `true` if AST is JS, `false` if TS
|
||||
*/
|
||||
export function isJsAst(buffer) {
|
||||
return buffer[IS_TS_FLAG_POS] === 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return buffer to cache, to be reused.
|
||||
* Set a timer to clear buffers.
|
||||
*
|
||||
* @param {Uint8Array} buffer - Buffer
|
||||
* @returns {undefined}
|
||||
*/
|
||||
export function returnBufferToCache(buffer) {
|
||||
buffers.push(buffer);
|
||||
|
||||
if (clearBuffersTimeout !== null) clearTimeout(clearBuffersTimeout);
|
||||
clearBuffersTimeout = setTimeout(clearBuffersCache, CLEAR_BUFFERS_TIMEOUT);
|
||||
clearBuffersTimeout.unref();
|
||||
}
|
||||
|
||||
/**
|
||||
* Downgrade buffers in tier 1 cache (`buffers`) to tier 2 (`oldBuffers`)
|
||||
* so they can be garbage collected.
|
||||
*
|
||||
* @returns {undefined}
|
||||
*/
|
||||
function clearBuffersCache() {
|
||||
clearBuffersTimeout = null;
|
||||
|
||||
for (const buffer of buffers) {
|
||||
oldBuffers.push(new WeakRef(buffer));
|
||||
}
|
||||
buffers.length = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a `Uint8Array` which is 2 GiB in size, with its start aligned on 4 GiB.
|
||||
*
|
||||
* Achieve this by creating a 6 GiB `ArrayBuffer`, getting the offset within it that's aligned to 4 GiB,
|
||||
* chopping off that number of bytes from the start, and shortening to 2 GiB.
|
||||
*
|
||||
* It's always possible to obtain a 2 GiB slice aligned on 4 GiB within a 6 GiB buffer,
|
||||
* no matter how the 6 GiB buffer is aligned.
|
||||
*
|
||||
* Note: On systems with virtual memory, this only consumes 6 GiB of *virtual* memory.
|
||||
* It does not consume physical memory until data is actually written to the `Uint8Array`.
|
||||
* Physical memory consumed corresponds to the quantity of data actually written.
|
||||
*
|
||||
* @returns {Uint8Array} - Buffer
|
||||
*/
|
||||
function createBuffer() {
|
||||
const arrayBuffer = new ArrayBuffer(ARRAY_BUFFER_SIZE);
|
||||
const offset = getBufferOffset(new Uint8Array(arrayBuffer));
|
||||
const buffer = new Uint8Array(arrayBuffer, offset, BUFFER_SIZE);
|
||||
buffer.uint32 = new Uint32Array(arrayBuffer, offset, BUFFER_SIZE / 4);
|
||||
buffer.float64 = new Float64Array(arrayBuffer, offset, BUFFER_SIZE / 8);
|
||||
return buffer;
|
||||
}
|
||||
119
Frontend-Learner/node_modules/oxc-parser/src-js/raw-transfer/eager.js
generated
vendored
Normal file
119
Frontend-Learner/node_modules/oxc-parser/src-js/raw-transfer/eager.js
generated
vendored
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
import { createRequire } from "node:module";
|
||||
import { isJsAst, parseAsyncRawImpl, parseSyncRawImpl, returnBufferToCache } from "./common.js";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
|
||||
/**
|
||||
* Parse JS/TS source synchronously on current thread, using raw transfer to speed up deserialization.
|
||||
*
|
||||
* @param {string} filename - Filename
|
||||
* @param {string} sourceText - Source text of file
|
||||
* @param {Object} options - Parsing options
|
||||
* @returns {Object} - Object with property getters for `program`, `module`, `comments`, and `errors`
|
||||
*/
|
||||
export function parseSyncRaw(filename, sourceText, options) {
|
||||
return parseSyncRawImpl(filename, sourceText, options, deserialize);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse JS/TS source asynchronously, using raw transfer to speed up deserialization.
|
||||
*
|
||||
* Note that not all of the workload can happen on a separate thread.
|
||||
* Parsing on Rust side does happen in a separate thread, but deserialization of the AST to JS objects
|
||||
* has to happen on current thread. This synchronous deserialization work typically outweighs
|
||||
* the asynchronous parsing by a factor of around 3.
|
||||
*
|
||||
* i.e. the majority of the workload cannot be parallelized by using this method.
|
||||
*
|
||||
* Generally `parseSyncRaw` is preferable to use as it does not have the overhead of spawning a thread.
|
||||
* If you need to parallelize parsing multiple files, it is recommended to use worker threads.
|
||||
*
|
||||
* @param {string} filename - Filename
|
||||
* @param {string} sourceText - Source text of file
|
||||
* @param {Object} options - Parsing options
|
||||
* @returns {Object} - Object with property getters for `program`, `module`, `comments`, and `errors`
|
||||
*/
|
||||
export function parse(filename, sourceText, options) {
|
||||
return parseAsyncRawImpl(filename, sourceText, options, deserialize);
|
||||
}
|
||||
|
||||
// Deserializers are large files, so lazy-loaded.
|
||||
// `deserialize` functions are stored in this array once loaded.
|
||||
// Index into these arrays is `isJs * 1 + range * 2 + experimentalParent * 4`.
|
||||
const deserializers = [null, null, null, null, null, null, null, null];
|
||||
const deserializerNames = [
|
||||
"ts",
|
||||
"js",
|
||||
"ts_range",
|
||||
"js_range",
|
||||
"ts_parent",
|
||||
"js_parent",
|
||||
"ts_range_parent",
|
||||
"js_range_parent",
|
||||
];
|
||||
|
||||
/**
|
||||
* Deserialize whole AST from buffer.
|
||||
*
|
||||
* @param {Uint8Array} buffer - Buffer containing AST in raw form
|
||||
* @param {string} sourceText - Source for the file
|
||||
* @param {number} sourceByteLen - Length of source text in UTF-8 bytes
|
||||
* @param {Object} options - Parsing options
|
||||
* @returns {Object} - Object with property getters for `program`, `module`, `comments`, and `errors`
|
||||
*/
|
||||
function deserialize(buffer, sourceText, sourceByteLen, options) {
|
||||
const isJs = isJsAst(buffer),
|
||||
range = !!options.range,
|
||||
parent = !!options.experimentalParent;
|
||||
|
||||
// Lazy load deserializer, and deserialize buffer to JS objects
|
||||
const deserializerIndex = +isJs | (+range << 1) | (+parent << 2);
|
||||
let deserializeThis = deserializers[deserializerIndex];
|
||||
if (deserializeThis === null) {
|
||||
deserializeThis = deserializers[deserializerIndex] = require(
|
||||
`../../generated/deserialize/${deserializerNames[deserializerIndex]}.js`,
|
||||
).deserialize;
|
||||
}
|
||||
|
||||
const data = deserializeThis(buffer, sourceText, sourceByteLen);
|
||||
|
||||
// Add a line comment for hashbang if JS.
|
||||
// Do not add comment if TS, to match `@typescript-eslint/parser`.
|
||||
// See https://github.com/oxc-project/oxc/blob/ea784f5f082e4c53c98afde9bf983afd0b95e44e/napi/parser/src/lib.rs#L106-L130
|
||||
if (isJs) {
|
||||
const { hashbang } = data.program;
|
||||
if (hashbang !== null) {
|
||||
data.comments.unshift(
|
||||
range
|
||||
? {
|
||||
type: "Line",
|
||||
value: hashbang.value,
|
||||
start: hashbang.start,
|
||||
end: hashbang.end,
|
||||
range: hashbang.range,
|
||||
}
|
||||
: { type: "Line", value: hashbang.value, start: hashbang.start, end: hashbang.end },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Return buffer to cache, to be reused
|
||||
returnBufferToCache(buffer);
|
||||
|
||||
// We cannot lazily deserialize in the getters, because the buffer might be re-used to parse
|
||||
// another file before the getter is called
|
||||
return {
|
||||
get program() {
|
||||
return data.program;
|
||||
},
|
||||
get module() {
|
||||
return data.module;
|
||||
},
|
||||
get comments() {
|
||||
return data.comments;
|
||||
},
|
||||
get errors() {
|
||||
return data.errors;
|
||||
},
|
||||
};
|
||||
}
|
||||
11
Frontend-Learner/node_modules/oxc-parser/src-js/raw-transfer/lazy-common.js
generated
vendored
Normal file
11
Frontend-Learner/node_modules/oxc-parser/src-js/raw-transfer/lazy-common.js
generated
vendored
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
// Unique token which is not exposed publicly.
|
||||
// Used to prevent user calling class constructors.
|
||||
export const TOKEN = {};
|
||||
|
||||
/**
|
||||
* Throw error when restricted class constructor is called by user code.
|
||||
* @throws {Error}
|
||||
*/
|
||||
export function constructorError() {
|
||||
throw new Error("Constructor is for internal use only");
|
||||
}
|
||||
153
Frontend-Learner/node_modules/oxc-parser/src-js/raw-transfer/lazy.js
generated
vendored
Normal file
153
Frontend-Learner/node_modules/oxc-parser/src-js/raw-transfer/lazy.js
generated
vendored
Normal file
|
|
@ -0,0 +1,153 @@
|
|||
import { DATA_POINTER_POS_32, PROGRAM_OFFSET } from "../../generated/constants.js";
|
||||
import { RawTransferData } from "../../generated/lazy/constructors.js";
|
||||
import { walkProgram } from "../../generated/lazy/walk.js";
|
||||
import { parseAsyncRawImpl, parseSyncRawImpl, returnBufferToCache } from "./common.js";
|
||||
import { TOKEN } from "./lazy-common.js";
|
||||
import { getVisitorsArr } from "./visitor.js";
|
||||
export { Visitor } from "./visitor.js";
|
||||
|
||||
/**
|
||||
* Parse JS/TS source synchronously on current thread.
|
||||
*
|
||||
* The data in buffer is not deserialized. Is deserialized to JS objects lazily, when accessing the
|
||||
* properties of objects.
|
||||
*
|
||||
* e.g. `program` in returned object is an instance of `Program` class, with getters for `start`, `end`,
|
||||
* `body` etc.
|
||||
*
|
||||
* Returned object contains a `visit` function which can be used to visit the AST with a `Visitor`
|
||||
* (`Visitor` class can be obtained by calling `experimentalGetLazyVisitor()`).
|
||||
*
|
||||
* Returned object contains a `dispose` method. When finished with this AST, it's advisable to call
|
||||
* `dispose`, to return the buffer to the cache, so it can be reused.
|
||||
* Garbage collector should do this anyway at some point, but on an unpredictable schedule,
|
||||
* so it's preferable to call `dispose` manually, to ensure the buffer can be reused immediately.
|
||||
*
|
||||
* @param {string} filename - Filename
|
||||
* @param {string} sourceText - Source text of file
|
||||
* @param {Object} options - Parsing options
|
||||
* @returns {Object} - Object with property getters for `program`, `module`, `comments`, and `errors`,
|
||||
* and `dispose` and `visit` methods
|
||||
*/
|
||||
export function parseSyncLazy(filename, sourceText, options) {
|
||||
return parseSyncRawImpl(filename, sourceText, options, construct);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse JS/TS source asynchronously on a separate thread.
|
||||
*
|
||||
* The data in buffer is not deserialized. Is deserialized to JS objects lazily, when accessing the
|
||||
* properties of objects.
|
||||
*
|
||||
* e.g. `program` in returned object is an instance of `Program` class, with getters for `start`, `end`,
|
||||
* `body` etc.
|
||||
*
|
||||
* Because this function does not deserialize the AST, unlike `parse`, very little work happens
|
||||
* on current thread in this function. Deserialization work only occurs when properties of the objects
|
||||
* are accessed.
|
||||
*
|
||||
* Returned object contains a `visit` function which can be used to visit the AST with a `Visitor`
|
||||
* (`Visitor` class can be obtained by calling `experimentalGetLazyVisitor()`).
|
||||
*
|
||||
* Returned object contains a `dispose` method. When finished with this AST, it's advisable to call
|
||||
* `dispose`, to return the buffer to the cache, so it can be reused.
|
||||
* Garbage collector should do this anyway at some point, but on an unpredictable schedule,
|
||||
* so it's preferable to call `dispose` manually, to ensure the buffer can be reused immediately.
|
||||
*
|
||||
* @param {string} filename - Filename
|
||||
* @param {string} sourceText - Source text of file
|
||||
* @param {Object} options - Parsing options
|
||||
* @returns {Object} - Object with property getters for `program`, `module`, `comments`, and `errors`,
|
||||
* and `dispose` and `visit` methods
|
||||
*/
|
||||
export function parse(filename, sourceText, options) {
|
||||
return parseAsyncRawImpl(filename, sourceText, options, construct);
|
||||
}
|
||||
|
||||
// Registry for buffers which are held by lazily-deserialized ASTs.
|
||||
// Returns buffer to cache when the `ast` wrapper is garbage collected.
|
||||
//
|
||||
// Check for existence of `FinalizationRegistry`, to avoid errors on old versions of NodeJS
|
||||
// which don't support it. e.g. Prettier supports NodeJS v14.
|
||||
// Raw transfer is disabled on NodeJS before v22, so it doesn't matter if this is `null` on old NodeJS
|
||||
// - it'll never be accessed in that case.
|
||||
const bufferRecycleRegistry =
|
||||
typeof FinalizationRegistry === "undefined"
|
||||
? null
|
||||
: new FinalizationRegistry(returnBufferToCache);
|
||||
|
||||
/**
|
||||
* Get an object with getters which lazy deserialize AST and other data from buffer.
|
||||
*
|
||||
* Object also includes `dispose` and `visit` functions.
|
||||
*
|
||||
* @param {Uint8Array} buffer - Buffer containing AST in raw form
|
||||
* @param {string} sourceText - Source for the file
|
||||
* @param {number} sourceByteLen - Length of source text in UTF-8 bytes
|
||||
* @param {Object} _options - Parsing options
|
||||
* @returns {Object} - Object with property getters for `program`, `module`, `comments`, and `errors`,
|
||||
* and `dispose` and `visit` methods
|
||||
*/
|
||||
function construct(buffer, sourceText, sourceByteLen, _options) {
|
||||
// Create AST object
|
||||
const sourceIsAscii = sourceText.length === sourceByteLen;
|
||||
const ast = { buffer, sourceText, sourceByteLen, sourceIsAscii, nodes: new Map(), token: TOKEN };
|
||||
|
||||
// Register `ast` with the recycle registry so buffer is returned to cache
|
||||
// when `ast` is garbage collected
|
||||
bufferRecycleRegistry.register(ast, buffer, ast);
|
||||
|
||||
// Get root data class instance
|
||||
const rawDataPos = buffer.uint32[DATA_POINTER_POS_32];
|
||||
const data = new RawTransferData(rawDataPos, ast);
|
||||
|
||||
return {
|
||||
get program() {
|
||||
return data.program;
|
||||
},
|
||||
get module() {
|
||||
return data.module;
|
||||
},
|
||||
get comments() {
|
||||
return data.comments;
|
||||
},
|
||||
get errors() {
|
||||
return data.errors;
|
||||
},
|
||||
dispose: dispose.bind(null, ast),
|
||||
visit(visitor) {
|
||||
walkProgram(rawDataPos + PROGRAM_OFFSET, ast, getVisitorsArr(visitor));
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Dispose of this AST.
|
||||
*
|
||||
* After calling this method, trying to read any nodes from this AST may cause an error.
|
||||
*
|
||||
* Buffer is returned to the cache to be reused.
|
||||
*
|
||||
* The buffer would be returned to the cache anyway, once all nodes of the AST are garbage collected,
|
||||
* but calling `dispose` is preferable, as it will happen immediately.
|
||||
* Otherwise, garbage collector may take time to collect the `ast` object, and new buffers may be created
|
||||
* in the meantime, when we could have reused this one.
|
||||
*
|
||||
* @param {Object} ast - AST object containing buffer etc
|
||||
* @returns {undefined}
|
||||
*/
|
||||
function dispose(ast) {
|
||||
// Return buffer to cache, to be reused
|
||||
returnBufferToCache(ast.buffer);
|
||||
|
||||
// Remove connection between `ast` and the buffer
|
||||
ast.buffer = null;
|
||||
|
||||
// Clear other contents of `ast`, so they can be garbage collected
|
||||
ast.sourceText = null;
|
||||
ast.nodes = null;
|
||||
|
||||
// Remove `ast` from recycling register.
|
||||
// When `ast` is garbage collected, there's no longer any action to be taken.
|
||||
bufferRecycleRegistry.unregister(ast);
|
||||
}
|
||||
365
Frontend-Learner/node_modules/oxc-parser/src-js/raw-transfer/node-array.js
generated
vendored
Normal file
365
Frontend-Learner/node_modules/oxc-parser/src-js/raw-transfer/node-array.js
generated
vendored
Normal file
|
|
@ -0,0 +1,365 @@
|
|||
import { constructorError, TOKEN } from "./lazy-common.js";
|
||||
|
||||
// Internal symbol to get `NodeArray` from a proxy wrapping a `NodeArray`.
|
||||
//
|
||||
// Methods of `NodeArray` are called with `this` being the proxy, rather than the `NodeArray` itself.
|
||||
// They can "unwrap" the proxy by getting `this[ARRAY]`, and the `get` proxy trap will return
|
||||
// the actual `NodeArray`.
|
||||
//
|
||||
// This symbol is not exported, and it is not actually defined on `NodeArray`s, so user cannot obtain it
|
||||
// via `Object.getOwnPropertySymbols` or `Reflect.ownKeys`. Therefore user code cannot unwrap the proxy.
|
||||
const ARRAY = Symbol();
|
||||
|
||||
// Functions to get internal properties of a `NodeArray`. Initialized in class static block below.
|
||||
let getInternalFromProxy, getLength, getElement;
|
||||
|
||||
/**
|
||||
* An array of AST nodes where elements are deserialized lazily upon access.
|
||||
*
|
||||
* Extends `Array` to make `Array.isArray` return `true` for a `NodeArray`.
|
||||
*
|
||||
* TODO: Other methods could maybe be more optimal, avoiding going via proxy multiple times
|
||||
* e.g. `some`, `indexOf`.
|
||||
*/
|
||||
export class NodeArray extends Array {
|
||||
#internal;
|
||||
|
||||
/**
|
||||
* Create a `NodeArray`.
|
||||
*
|
||||
* Constructor does not actually return a `NodeArray`, but one wrapped in a `Proxy`.
|
||||
* The proxy intercepts accesses to elements and lazily deserializes them,
|
||||
* and blocks mutation of elements or `length` property.
|
||||
*
|
||||
* @class
|
||||
* @param {number} pos - Buffer position of first element
|
||||
* @param {number} length - Number of elements
|
||||
* @param {number} stride - Element size in bytes
|
||||
* @param {Function} construct - Function to deserialize element
|
||||
* @param {Object} ast - AST object
|
||||
* @returns {Proxy<NodeArray>} - `NodeArray` wrapped in a `Proxy`
|
||||
*/
|
||||
constructor(pos, length, stride, construct, ast) {
|
||||
if (ast?.token !== TOKEN) constructorError();
|
||||
|
||||
super();
|
||||
this.#internal = { pos, length, ast, stride, construct };
|
||||
return new Proxy(this, PROXY_HANDLERS);
|
||||
}
|
||||
|
||||
// Allow `arr.filter`, `arr.map` etc.
|
||||
static [Symbol.species] = Array;
|
||||
|
||||
// Override `values` method with a more efficient one that avoids going via proxy for every iteration.
|
||||
// TODO: Benchmark to check that this is actually faster.
|
||||
values() {
|
||||
return new NodeArrayValuesIterator(this);
|
||||
}
|
||||
|
||||
// Override `keys` method with a more efficient one that avoids going via proxy for every iteration.
|
||||
// TODO: Benchmark to check that this is actually faster.
|
||||
keys() {
|
||||
return new NodeArrayKeysIterator(this);
|
||||
}
|
||||
|
||||
// Override `entries` method with a more efficient one that avoids going via proxy for every iteration.
|
||||
// TODO: Benchmark to check that this is actually faster.
|
||||
entries() {
|
||||
return new NodeArrayEntriesIterator(this);
|
||||
}
|
||||
|
||||
// This method is overwritten with reference to `values` method below.
|
||||
// Defining dummy method here to prevent the later assignment altering the shape of class prototype.
|
||||
[Symbol.iterator]() {}
|
||||
|
||||
/**
|
||||
* Override `slice` method to return a `NodeArray`.
|
||||
*
|
||||
* @this {NodeArray}
|
||||
* @param {*} start - Start of slice
|
||||
* @param {*} end - End of slice
|
||||
* @returns {NodeArray} - `NodeArray` containing slice of this one
|
||||
*/
|
||||
slice(start, end) {
|
||||
const internal = this[ARRAY].#internal,
|
||||
{ length } = internal;
|
||||
|
||||
start = toInt(start);
|
||||
if (start < 0) {
|
||||
start = length + start;
|
||||
if (start < 0) start = 0;
|
||||
}
|
||||
|
||||
if (end === void 0) {
|
||||
end = length;
|
||||
} else {
|
||||
end = toInt(end);
|
||||
if (end < 0) {
|
||||
end += length;
|
||||
if (end < 0) end = 0;
|
||||
} else if (end > length) {
|
||||
end = length;
|
||||
}
|
||||
}
|
||||
|
||||
let sliceLength = end - start;
|
||||
if (sliceLength <= 0 || start >= length) {
|
||||
start = 0;
|
||||
sliceLength = 0;
|
||||
}
|
||||
|
||||
const { stride } = internal;
|
||||
return new NodeArray(
|
||||
internal.pos + start * stride,
|
||||
sliceLength,
|
||||
stride,
|
||||
internal.construct,
|
||||
internal.ast,
|
||||
);
|
||||
}
|
||||
|
||||
// Make `console.log` deserialize all elements.
|
||||
[Symbol.for("nodejs.util.inspect.custom")]() {
|
||||
const values = [...this.values()];
|
||||
Object.setPrototypeOf(values, DebugNodeArray.prototype);
|
||||
return values;
|
||||
}
|
||||
|
||||
static {
|
||||
/**
|
||||
* Get internal properties of `NodeArray`, given a proxy wrapping a `NodeArray`.
|
||||
* @param {Proxy} proxy - Proxy wrapping `NodeArray` object
|
||||
* @returns {Object} - Internal properties object
|
||||
*/
|
||||
getInternalFromProxy = (proxy) => proxy[ARRAY].#internal;
|
||||
|
||||
/**
|
||||
* Get length of `NodeArray`.
|
||||
* @param {NodeArray} arr - `NodeArray` object
|
||||
* @returns {number} - Array length
|
||||
*/
|
||||
getLength = (arr) => arr.#internal.length;
|
||||
|
||||
/**
|
||||
* Get element of `NodeArray` at index `index`.
|
||||
*
|
||||
* @param {NodeArray} arr - `NodeArray` object
|
||||
* @param {number} index - Index of element to get
|
||||
* @returns {*} - Element at index `index`, or `undefined` if out of bounds
|
||||
*/
|
||||
getElement = (arr, index) => {
|
||||
const internal = arr.#internal;
|
||||
if (index >= internal.length) return void 0;
|
||||
return (0, internal.construct)(internal.pos + index * internal.stride, internal.ast);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// oxlint-disable-next-line typescript/unbound-method
|
||||
NodeArray.prototype[Symbol.iterator] = NodeArray.prototype.values;
|
||||
|
||||
/**
|
||||
* Iterator over values of a `NodeArray`.
|
||||
* Returned by `values` method, and also used as iterator for `for (const node of nodeArray) {}`.
|
||||
*/
|
||||
class NodeArrayValuesIterator {
|
||||
#internal;
|
||||
|
||||
constructor(proxy) {
|
||||
const internal = getInternalFromProxy(proxy),
|
||||
{ pos, stride } = internal;
|
||||
|
||||
this.#internal = {
|
||||
pos,
|
||||
endPos: pos + internal.length * stride,
|
||||
ast: internal.ast,
|
||||
construct: internal.construct,
|
||||
stride,
|
||||
};
|
||||
}
|
||||
|
||||
next() {
|
||||
const internal = this.#internal,
|
||||
{ pos } = internal;
|
||||
if (pos === internal.endPos) return { done: true, value: null };
|
||||
internal.pos = pos + internal.stride;
|
||||
return { done: false, value: (0, internal.construct)(pos, internal.ast) };
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterator over keys of a `NodeArray`. Returned by `keys` method.
|
||||
*/
|
||||
class NodeArrayKeysIterator {
|
||||
#internal;
|
||||
|
||||
constructor(proxy) {
|
||||
const internal = getInternalFromProxy(proxy);
|
||||
this.#internal = { index: 0, length: internal.length };
|
||||
}
|
||||
|
||||
next() {
|
||||
const internal = this.#internal,
|
||||
{ index } = internal;
|
||||
if (index === internal.length) return { done: true, value: null };
|
||||
internal.index = index + 1;
|
||||
return { done: false, value: index };
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterator over values of a `NodeArray`. Returned by `entries` method.
|
||||
*/
|
||||
class NodeArrayEntriesIterator {
|
||||
#internal;
|
||||
|
||||
constructor(proxy) {
|
||||
const internal = getInternalFromProxy(proxy);
|
||||
|
||||
this.#internal = {
|
||||
index: 0,
|
||||
length: internal.length,
|
||||
pos: internal.pos,
|
||||
ast: internal.ast,
|
||||
construct: internal.construct,
|
||||
stride: internal.stride,
|
||||
};
|
||||
}
|
||||
|
||||
next() {
|
||||
const internal = this.#internal,
|
||||
{ index } = internal;
|
||||
if (index === internal.length) return { done: true, value: null };
|
||||
internal.index = index + 1;
|
||||
return {
|
||||
done: false,
|
||||
value: [index, (0, internal.construct)(internal.pos + index * internal.stride, internal.ast)],
|
||||
};
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
// Class used for `[Symbol.for('nodejs.util.inspect.custom')]` method (for `console.log`).
|
||||
const DebugNodeArray = class NodeArray extends Array {};
|
||||
|
||||
// Proxy handlers.
|
||||
//
|
||||
// Every `NodeArray` returned to user is wrapped in a `Proxy`, using these handlers.
|
||||
// They lazily deserialize array elements upon access, and block mutation of array elements / `length`.
|
||||
const PROXY_HANDLERS = {
|
||||
// Return `true` for indexes which are in bounds.
|
||||
// e.g. `'0' in arr`.
|
||||
has(arr, key) {
|
||||
const index = toIndex(key);
|
||||
if (index !== null) return index < getLength(arr);
|
||||
return Reflect.has(arr, key);
|
||||
},
|
||||
|
||||
// Get elements and length.
|
||||
get(arr, key) {
|
||||
// Methods of `NodeArray` are called with `this` being the proxy, rather than the `NodeArray` itself.
|
||||
// They can "unwrap" the proxy by getting `this[ARRAY]`.
|
||||
if (key === ARRAY) return arr;
|
||||
if (key === "length") return getLength(arr);
|
||||
const index = toIndex(key);
|
||||
if (index !== null) return getElement(arr, index);
|
||||
|
||||
return Reflect.get(arr, key);
|
||||
},
|
||||
|
||||
// Get descriptors for elements and length.
|
||||
getOwnPropertyDescriptor(arr, key) {
|
||||
if (key === "length") {
|
||||
// Cannot return `writable: false` unfortunately
|
||||
return { value: getLength(arr), writable: true, enumerable: false, configurable: false };
|
||||
}
|
||||
|
||||
const index = toIndex(key);
|
||||
if (index !== null) {
|
||||
const value = getElement(arr, index);
|
||||
if (value === void 0) return void 0;
|
||||
// Cannot return `configurable: false` unfortunately
|
||||
return { value, writable: false, enumerable: true, configurable: true };
|
||||
}
|
||||
|
||||
return Reflect.getOwnPropertyDescriptor(arr, key);
|
||||
},
|
||||
|
||||
// Prevent setting `length` or entries.
|
||||
// Catches:
|
||||
// * `Object.defineProperty(arr, 0, {value: null})`.
|
||||
// * `arr[1] = null`.
|
||||
// * `arr.length = 0`.
|
||||
// * `Object.defineProperty(arr, 'length', {value: 0})`.
|
||||
// * Other operations which mutate entries e.g. `arr.push(123)`.
|
||||
defineProperty(arr, key, descriptor) {
|
||||
if (key === "length" || toIndex(key) !== null) return false;
|
||||
return Reflect.defineProperty(arr, key, descriptor);
|
||||
},
|
||||
|
||||
// Prevent deleting entries.
|
||||
deleteProperty(arr, key) {
|
||||
// Note: `Reflect.deleteProperty(arr, 'length')` already returns `false`
|
||||
if (toIndex(key) !== null) return false;
|
||||
return Reflect.deleteProperty(arr, key);
|
||||
},
|
||||
|
||||
// Get keys, including element indexes.
|
||||
ownKeys(arr) {
|
||||
const keys = [],
|
||||
length = getLength(arr);
|
||||
for (let i = 0; i < length; i++) {
|
||||
keys.push(i + "");
|
||||
}
|
||||
keys.push(...Reflect.ownKeys(arr));
|
||||
return keys;
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert key to array index, if it is a valid array index.
|
||||
*
|
||||
* Only strings comprising a plain integer are valid indexes.
|
||||
* e.g. `"-1"`, `"01"`, `"0xFF"`, `"1e1"`, `"1 "` are not valid indexes.
|
||||
* Integers >= 4294967295 are not valid indexes.
|
||||
*
|
||||
* @param {string|Symbol} - Key used for property lookup.
|
||||
* @returns {number|null} - `key` converted to integer, if it's a valid array index, otherwise `null`.
|
||||
*/
|
||||
function toIndex(key) {
|
||||
if (typeof key === "string") {
|
||||
if (key === "0") return 0;
|
||||
if (INDEX_REGEX.test(key)) {
|
||||
const index = +key;
|
||||
if (index < 4294967295) return index;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
const INDEX_REGEX = /^[1-9]\d*$/;
|
||||
|
||||
/**
|
||||
* Convert value to integer.
|
||||
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number#integer_conversion
|
||||
*
|
||||
* @param {*} value - Value to convert to integer.
|
||||
* @returns {number} - Integer
|
||||
*/
|
||||
function toInt(value) {
|
||||
value = Math.trunc(+value);
|
||||
// `value === 0` check is to convert -0 to 0
|
||||
if (value === 0 || Number.isNaN(value)) return 0;
|
||||
return value;
|
||||
}
|
||||
52
Frontend-Learner/node_modules/oxc-parser/src-js/raw-transfer/supported.js
generated
vendored
Normal file
52
Frontend-Learner/node_modules/oxc-parser/src-js/raw-transfer/supported.js
generated
vendored
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
import { rawTransferSupported as rawTransferSupportedBinding } from "../bindings.js";
|
||||
|
||||
let rawTransferIsSupported = null;
|
||||
|
||||
/**
|
||||
* Returns `true` if `experimentalRawTransfer` is option is supported.
|
||||
*
|
||||
* Raw transfer is only supported on 64-bit little-endian systems,
|
||||
* and NodeJS >= v22.0.0 or Deno >= v2.0.0.
|
||||
*
|
||||
* Versions of NodeJS prior to v22.0.0 do not support creating an `ArrayBuffer` larger than 4 GiB.
|
||||
* Bun (as at v1.2.4) also does not support creating an `ArrayBuffer` larger than 4 GiB.
|
||||
* Support on Deno v1 is unknown and it's EOL, so treating Deno before v2.0.0 as unsupported.
|
||||
*
|
||||
* No easy way to determining pointer width (64 bit or 32 bit) in JS,
|
||||
* so call a function on Rust side to find out.
|
||||
*
|
||||
* @returns {boolean} - `true` if raw transfer is supported on this platform
|
||||
*/
|
||||
export function rawTransferSupported() {
|
||||
if (rawTransferIsSupported === null) {
|
||||
rawTransferIsSupported = rawTransferRuntimeSupported() && rawTransferSupportedBinding();
|
||||
}
|
||||
return rawTransferIsSupported;
|
||||
}
|
||||
|
||||
// Checks copied from:
|
||||
// https://github.com/unjs/std-env/blob/ab15595debec9e9115a9c1d31bc7597a8e71dbfd/src/runtimes.ts
|
||||
// MIT license: https://github.com/unjs/std-env/blob/ab15595debec9e9115a9c1d31bc7597a8e71dbfd/LICENCE
|
||||
function rawTransferRuntimeSupported() {
|
||||
let global;
|
||||
try {
|
||||
global = globalThis;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
|
||||
const isBun = !!global.Bun || !!global.process?.versions?.bun;
|
||||
if (isBun) return false;
|
||||
|
||||
const isDeno = !!global.Deno;
|
||||
if (isDeno) {
|
||||
const match = Deno.version?.deno?.match(/^(\d+)\./);
|
||||
return !!match && match[1] * 1 >= 2;
|
||||
}
|
||||
|
||||
const isNode = global.process?.release?.name === "node";
|
||||
if (!isNode) return false;
|
||||
|
||||
const match = process.version?.match(/^v(\d+)\./);
|
||||
return !!match && match[1] * 1 >= 22;
|
||||
}
|
||||
127
Frontend-Learner/node_modules/oxc-parser/src-js/raw-transfer/visitor.js
generated
vendored
Normal file
127
Frontend-Learner/node_modules/oxc-parser/src-js/raw-transfer/visitor.js
generated
vendored
Normal file
|
|
@ -0,0 +1,127 @@
|
|||
import {
|
||||
LEAF_NODE_TYPES_COUNT,
|
||||
NODE_TYPE_IDS_MAP,
|
||||
NODE_TYPES_COUNT,
|
||||
} from "../../generated/lazy/type_ids.js";
|
||||
|
||||
// Getter for private `#visitorsArr` property of `Visitor` class. Initialized in class body below.
|
||||
let getVisitorsArrTemp;
|
||||
|
||||
/**
|
||||
* Visitor class, used to visit an AST.
|
||||
*/
|
||||
export class Visitor {
|
||||
#visitorsArr;
|
||||
|
||||
/**
|
||||
* Create `Visitor`.
|
||||
*
|
||||
* Provide an object where keys are names of AST nodes you want to visit,
|
||||
* and values are visitor functions which receive AST node objects of that type.
|
||||
*
|
||||
* Keys can also be postfixed with `:exit` to visit when exiting the node, rather than entering.
|
||||
*
|
||||
* ```js
|
||||
* const visitor = new Visitor({
|
||||
* BinaryExpression(binExpr) {
|
||||
* // Do stuff when entering a `BinaryExpression`
|
||||
* },
|
||||
* 'BinaryExpression:exit'(binExpr) {
|
||||
* // Do stuff when exiting a `BinaryExpression`
|
||||
* },
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* @class
|
||||
* @param {Object} visitor - Object defining visit functions for AST nodes
|
||||
* @returns {Visitor}
|
||||
*/
|
||||
constructor(visitor) {
|
||||
this.#visitorsArr = createVisitorsArr(visitor);
|
||||
}
|
||||
|
||||
static {
|
||||
getVisitorsArrTemp = (visitor) => visitor.#visitorsArr;
|
||||
}
|
||||
}
|
||||
|
||||
export const getVisitorsArr = getVisitorsArrTemp;
|
||||
|
||||
/**
|
||||
* Create array of visitors, keyed by node type ID.
|
||||
*
|
||||
* Each element of array is one of:
|
||||
*
|
||||
* * No visitor for this type = `null`.
|
||||
* * Visitor for leaf node = visit function.
|
||||
* * Visitor for non-leaf node = object of form `{ enter, exit }`,
|
||||
* where each property is either a visitor function or `null`.
|
||||
*
|
||||
* @param {Object} visitor - Visitors object from user
|
||||
* @returns {Array<Object|Function|null>} - Array of visitors
|
||||
*/
|
||||
function createVisitorsArr(visitor) {
|
||||
if (visitor === null || typeof visitor !== "object") {
|
||||
throw new Error("`visitor` must be an object");
|
||||
}
|
||||
|
||||
// Create empty visitors array
|
||||
const visitorsArr = [];
|
||||
for (let i = NODE_TYPES_COUNT; i !== 0; i--) {
|
||||
visitorsArr.push(null);
|
||||
}
|
||||
|
||||
// Populate visitors array from provided object
|
||||
for (let name of Object.keys(visitor)) {
|
||||
const visitFn = visitor[name];
|
||||
if (typeof visitFn !== "function") {
|
||||
throw new Error(`'${name}' property of \`visitor\` object is not a function`);
|
||||
}
|
||||
|
||||
const isExit = name.endsWith(":exit");
|
||||
if (isExit) name = name.slice(0, -5);
|
||||
|
||||
const typeId = NODE_TYPE_IDS_MAP.get(name);
|
||||
if (typeId === void 0) throw new Error(`Unknown node type '${name}' in \`visitor\` object`);
|
||||
|
||||
if (typeId < LEAF_NODE_TYPES_COUNT) {
|
||||
// Leaf node. Store just 1 function.
|
||||
const existingVisitFn = visitorsArr[typeId];
|
||||
if (existingVisitFn === null) {
|
||||
visitorsArr[typeId] = visitFn;
|
||||
} else if (isExit) {
|
||||
visitorsArr[typeId] = combineVisitFunctions(existingVisitFn, visitFn);
|
||||
} else {
|
||||
visitorsArr[typeId] = combineVisitFunctions(visitFn, existingVisitFn);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
let enterExit = visitorsArr[typeId];
|
||||
if (enterExit === null) {
|
||||
enterExit = visitorsArr[typeId] = { enter: null, exit: null };
|
||||
}
|
||||
|
||||
if (isExit) {
|
||||
enterExit.exit = visitFn;
|
||||
} else {
|
||||
enterExit.enter = visitFn;
|
||||
}
|
||||
}
|
||||
|
||||
return visitorsArr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Combine 2 visitor functions into 1.
|
||||
*
|
||||
* @param {function} visit1 - 1st visitor function
|
||||
* @param {function} visit2 - 2nd visitor function
|
||||
* @returns {function} - Combined visitor function
|
||||
*/
|
||||
function combineVisitFunctions(visit1, visit2) {
|
||||
return function (node) {
|
||||
visit1(node);
|
||||
visit2(node);
|
||||
};
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue