Website Structure

This commit is contained in:
supalerk-ar66 2026-01-13 10:46:40 +07:00
parent 62812f2090
commit 71f0676a62
22365 changed files with 4265753 additions and 791 deletions

View file

@ -0,0 +1,597 @@
// prettier-ignore
/* eslint-disable */
// @ts-nocheck
/* auto-generated by NAPI-RS */
import { createRequire } from 'node:module'
const require = createRequire(import.meta.url)
const __dirname = new URL('.', import.meta.url).pathname
const { readFileSync } = require('node:fs')
let nativeBinding = null
const loadErrors = []
const isMusl = () => {
let musl = false
if (process.platform === 'linux') {
musl = isMuslFromFilesystem()
if (musl === null) {
musl = isMuslFromReport()
}
if (musl === null) {
musl = isMuslFromChildProcess()
}
}
return musl
}
const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-')
const isMuslFromFilesystem = () => {
try {
return readFileSync('/usr/bin/ldd', 'utf-8').includes('musl')
} catch {
return null
}
}
const isMuslFromReport = () => {
let report = null
if (typeof process.report?.getReport === 'function') {
process.report.excludeNetwork = true
report = process.report.getReport()
}
if (!report) {
return null
}
if (report.header && report.header.glibcVersionRuntime) {
return false
}
if (Array.isArray(report.sharedObjects)) {
if (report.sharedObjects.some(isFileMusl)) {
return true
}
}
return false
}
const isMuslFromChildProcess = () => {
try {
return require('child_process').execSync('ldd --version', { encoding: 'utf8' }).includes('musl')
} catch (e) {
// If we reach this case, we don't know if the system is musl or not, so is better to just fallback to false
return false
}
}
function requireNative() {
if (process.env.NAPI_RS_NATIVE_LIBRARY_PATH) {
try {
return require(process.env.NAPI_RS_NATIVE_LIBRARY_PATH);
} catch (err) {
loadErrors.push(err)
}
} else if (process.platform === 'android') {
if (process.arch === 'arm64') {
try {
return require('./parser.android-arm64.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-android-arm64')
const bindingPackageVersion = require('@oxc-parser/binding-android-arm64/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm') {
try {
return require('./parser.android-arm-eabi.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-android-arm-eabi')
const bindingPackageVersion = require('@oxc-parser/binding-android-arm-eabi/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on Android ${process.arch}`))
}
} else if (process.platform === 'win32') {
if (process.arch === 'x64') {
if (process.config?.variables?.shlib_suffix === 'dll.a' || process.config?.variables?.node_target_type === 'shared_library') {
try {
return require('./parser.win32-x64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-win32-x64-gnu')
const bindingPackageVersion = require('@oxc-parser/binding-win32-x64-gnu/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
} else {
try {
return require('./parser.win32-x64-msvc.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-win32-x64-msvc')
const bindingPackageVersion = require('@oxc-parser/binding-win32-x64-msvc/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'ia32') {
try {
return require('./parser.win32-ia32-msvc.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-win32-ia32-msvc')
const bindingPackageVersion = require('@oxc-parser/binding-win32-ia32-msvc/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm64') {
try {
return require('./parser.win32-arm64-msvc.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-win32-arm64-msvc')
const bindingPackageVersion = require('@oxc-parser/binding-win32-arm64-msvc/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on Windows: ${process.arch}`))
}
} else if (process.platform === 'darwin') {
try {
return require('./parser.darwin-universal.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-darwin-universal')
const bindingPackageVersion = require('@oxc-parser/binding-darwin-universal/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
if (process.arch === 'x64') {
try {
return require('./parser.darwin-x64.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-darwin-x64')
const bindingPackageVersion = require('@oxc-parser/binding-darwin-x64/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm64') {
try {
return require('./parser.darwin-arm64.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-darwin-arm64')
const bindingPackageVersion = require('@oxc-parser/binding-darwin-arm64/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on macOS: ${process.arch}`))
}
} else if (process.platform === 'freebsd') {
if (process.arch === 'x64') {
try {
return require('./parser.freebsd-x64.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-freebsd-x64')
const bindingPackageVersion = require('@oxc-parser/binding-freebsd-x64/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm64') {
try {
return require('./parser.freebsd-arm64.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-freebsd-arm64')
const bindingPackageVersion = require('@oxc-parser/binding-freebsd-arm64/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on FreeBSD: ${process.arch}`))
}
} else if (process.platform === 'linux') {
if (process.arch === 'x64') {
if (isMusl()) {
try {
return require('./parser.linux-x64-musl.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-linux-x64-musl')
const bindingPackageVersion = require('@oxc-parser/binding-linux-x64-musl/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
} else {
try {
return require('./parser.linux-x64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-linux-x64-gnu')
const bindingPackageVersion = require('@oxc-parser/binding-linux-x64-gnu/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'arm64') {
if (isMusl()) {
try {
return require('./parser.linux-arm64-musl.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-linux-arm64-musl')
const bindingPackageVersion = require('@oxc-parser/binding-linux-arm64-musl/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
} else {
try {
return require('./parser.linux-arm64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-linux-arm64-gnu')
const bindingPackageVersion = require('@oxc-parser/binding-linux-arm64-gnu/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'arm') {
if (isMusl()) {
try {
return require('./parser.linux-arm-musleabihf.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-linux-arm-musleabihf')
const bindingPackageVersion = require('@oxc-parser/binding-linux-arm-musleabihf/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
} else {
try {
return require('./parser.linux-arm-gnueabihf.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-linux-arm-gnueabihf')
const bindingPackageVersion = require('@oxc-parser/binding-linux-arm-gnueabihf/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'loong64') {
if (isMusl()) {
try {
return require('./parser.linux-loong64-musl.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-linux-loong64-musl')
const bindingPackageVersion = require('@oxc-parser/binding-linux-loong64-musl/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
} else {
try {
return require('./parser.linux-loong64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-linux-loong64-gnu')
const bindingPackageVersion = require('@oxc-parser/binding-linux-loong64-gnu/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'riscv64') {
if (isMusl()) {
try {
return require('./parser.linux-riscv64-musl.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-linux-riscv64-musl')
const bindingPackageVersion = require('@oxc-parser/binding-linux-riscv64-musl/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
} else {
try {
return require('./parser.linux-riscv64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-linux-riscv64-gnu')
const bindingPackageVersion = require('@oxc-parser/binding-linux-riscv64-gnu/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'ppc64') {
try {
return require('./parser.linux-ppc64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-linux-ppc64-gnu')
const bindingPackageVersion = require('@oxc-parser/binding-linux-ppc64-gnu/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 's390x') {
try {
return require('./parser.linux-s390x-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-linux-s390x-gnu')
const bindingPackageVersion = require('@oxc-parser/binding-linux-s390x-gnu/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on Linux: ${process.arch}`))
}
} else if (process.platform === 'openharmony') {
if (process.arch === 'arm64') {
try {
return require('./parser.openharmony-arm64.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-openharmony-arm64')
const bindingPackageVersion = require('@oxc-parser/binding-openharmony-arm64/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'x64') {
try {
return require('./parser.openharmony-x64.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-openharmony-x64')
const bindingPackageVersion = require('@oxc-parser/binding-openharmony-x64/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm') {
try {
return require('./parser.openharmony-arm.node')
} catch (e) {
loadErrors.push(e)
}
try {
const binding = require('@oxc-parser/binding-openharmony-arm')
const bindingPackageVersion = require('@oxc-parser/binding-openharmony-arm/package.json').version
if (bindingPackageVersion !== '0.102.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
throw new Error(`Native binding package version mismatch, expected 0.102.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
}
return binding
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on OpenHarmony: ${process.arch}`))
}
} else {
loadErrors.push(new Error(`Unsupported OS: ${process.platform}, architecture: ${process.arch}`))
}
}
nativeBinding = requireNative()
if (!nativeBinding || process.env.NAPI_RS_FORCE_WASI) {
let wasiBinding = null
let wasiBindingError = null
try {
wasiBinding = require('./parser.wasi.cjs')
nativeBinding = wasiBinding
} catch (err) {
if (process.env.NAPI_RS_FORCE_WASI) {
wasiBindingError = err
}
}
if (!nativeBinding) {
try {
wasiBinding = require('@oxc-parser/binding-wasm32-wasi')
nativeBinding = wasiBinding
} catch (err) {
if (process.env.NAPI_RS_FORCE_WASI) {
wasiBindingError.cause = err
loadErrors.push(err)
}
}
}
if (process.env.NAPI_RS_FORCE_WASI === 'error' && !wasiBinding) {
const error = new Error('WASI binding not found and NAPI_RS_FORCE_WASI is set to error')
error.cause = wasiBindingError
throw error
}
}
if (!nativeBinding && globalThis.process?.versions?.["webcontainer"]) {
try {
nativeBinding = require('./webcontainer-fallback.cjs');
} catch (err) {
loadErrors.push(err)
}
}
if (!nativeBinding) {
if (loadErrors.length > 0) {
throw new Error(
`Cannot find native binding. ` +
`npm has a bug related to optional dependencies (https://github.com/npm/cli/issues/4828). ` +
'Please try `npm i` again after removing both package-lock.json and node_modules directory.',
{
cause: loadErrors.reduce((err, cur) => {
cur.cause = err
return cur
}),
},
)
}
throw new Error(`Failed to load native binding`)
}
const { Severity, ParseResult, ExportExportNameKind, ExportImportNameKind, ExportLocalNameKind, ImportNameKind, parse, parseSync, rawTransferSupported } = nativeBinding
export { Severity }
export { ParseResult }
export { ExportExportNameKind }
export { ExportImportNameKind }
export { ExportLocalNameKind }
export { ImportNameKind }
export { parse }
export { parseSync }
export { rawTransferSupported }
const { getBufferOffset, parseRaw, parseRawSync } = nativeBinding
export { getBufferOffset, parseRaw, parseRawSync }

View file

@ -0,0 +1,295 @@
/* auto-generated by NAPI-RS */
/* eslint-disable */
import type { Program } from "@oxc-project/types";
import type { VisitorObject } from "../generated/visit/visitor.d.ts";
export * from "@oxc-project/types";
export { VisitorObject };
export const visitorKeys: Record<string, string[]>;
export class Visitor {
constructor(visitor: VisitorObject);
visit(program: Program): void;
}
export interface Comment {
type: 'Line' | 'Block'
value: string
start: number
end: number
}
export interface ErrorLabel {
message: string | null
start: number
end: number
}
export interface OxcError {
severity: Severity
message: string
labels: Array<ErrorLabel>
helpMessage: string | null
codeframe: string | null
}
export declare const enum Severity {
Error = 'Error',
Warning = 'Warning',
Advice = 'Advice'
}
export declare class ParseResult {
get program(): import("@oxc-project/types").Program
get module(): EcmaScriptModule
get comments(): Array<Comment>
get errors(): Array<OxcError>
}
export interface DynamicImport {
start: number
end: number
moduleRequest: Span
}
export interface EcmaScriptModule {
/**
* Has ESM syntax.
*
* i.e. `import` and `export` statements, and `import.meta`.
*
* Dynamic imports `import('foo')` are ignored since they can be used in non-ESM files.
*/
hasModuleSyntax: boolean
/** Import statements. */
staticImports: Array<StaticImport>
/** Export statements. */
staticExports: Array<StaticExport>
/** Dynamic import expressions. */
dynamicImports: Array<DynamicImport>
/** Span positions` of `import.meta` */
importMetas: Array<Span>
}
export interface ExportExportName {
kind: ExportExportNameKind
name: string | null
start: number | null
end: number | null
}
export declare const enum ExportExportNameKind {
/** `export { name } */
Name = 'Name',
/** `export default expression` */
Default = 'Default',
/** `export * from "mod" */
None = 'None'
}
export interface ExportImportName {
kind: ExportImportNameKind
name: string | null
start: number | null
end: number | null
}
export declare const enum ExportImportNameKind {
/** `export { name } */
Name = 'Name',
/** `export * as ns from "mod"` */
All = 'All',
/** `export * from "mod"` */
AllButDefault = 'AllButDefault',
/** Does not have a specifier. */
None = 'None'
}
export interface ExportLocalName {
kind: ExportLocalNameKind
name: string | null
start: number | null
end: number | null
}
export declare const enum ExportLocalNameKind {
/** `export { name } */
Name = 'Name',
/** `export default expression` */
Default = 'Default',
/**
* If the exported value is not locally accessible from within the module.
* `export default function () {}`
*/
None = 'None'
}
export interface ImportName {
kind: ImportNameKind
name: string | null
start: number | null
end: number | null
}
export declare const enum ImportNameKind {
/** `import { x } from "mod"` */
Name = 'Name',
/** `import * as ns from "mod"` */
NamespaceObject = 'NamespaceObject',
/** `import defaultExport from "mod"` */
Default = 'Default'
}
/**
* Parse asynchronously.
*
* Note: This function can be slower than `parseSync` due to the overhead of spawning a thread.
*/
export declare function parse(filename: string, sourceText: string, options?: ParserOptions | undefined | null): Promise<ParseResult>
export interface ParserOptions {
/** Treat the source text as `js`, `jsx`, `ts`, `tsx` or `dts`. */
lang?: 'js' | 'jsx' | 'ts' | 'tsx' | 'dts'
/** Treat the source text as `script` or `module` code. */
sourceType?: 'script' | 'module' | 'unambiguous' | undefined
/**
* Return an AST which includes TypeScript-related properties, or excludes them.
*
* `'js'` is default for JS / JSX files.
* `'ts'` is default for TS / TSX files.
* The type of the file is determined from `lang` option, or extension of provided `filename`.
*/
astType?: 'js' | 'ts'
/**
* Controls whether the `range` property is included on AST nodes.
* The `range` property is a `[number, number]` which indicates the start/end offsets
* of the node in the file contents.
*
* @default false
*/
range?: boolean
/**
* Emit `ParenthesizedExpression` and `TSParenthesizedType` in AST.
*
* If this option is true, parenthesized expressions are represented by
* (non-standard) `ParenthesizedExpression` and `TSParenthesizedType` nodes that
* have a single `expression` property containing the expression inside parentheses.
*
* @default true
*/
preserveParens?: boolean
/**
* Produce semantic errors with an additional AST pass.
* Semantic errors depend on symbols and scopes, where the parser does not construct.
* This adds a small performance overhead.
*
* @default false
*/
showSemanticErrors?: boolean
}
/** Parse synchronously. */
export declare function parseSync(filename: string, sourceText: string, options?: ParserOptions | undefined | null): ParseResult
/** Returns `true` if raw transfer is supported on this platform. */
export declare function rawTransferSupported(): boolean
export interface Span {
start: number
end: number
}
export interface StaticExport {
start: number
end: number
entries: Array<StaticExportEntry>
}
export interface StaticExportEntry {
start: number
end: number
moduleRequest: ValueSpan | null
/** The name under which the desired binding is exported by the module`. */
importName: ExportImportName
/** The name used to export this binding by this module. */
exportName: ExportExportName
/** The name that is used to locally access the exported value from within the importing module. */
localName: ExportLocalName
/**
* Whether the export is a TypeScript `export type`.
*
* Examples:
*
* ```ts
* export type * from 'mod';
* export type * as ns from 'mod';
* export type { foo };
* export { type foo }:
* export type { foo } from 'mod';
* ```
*/
isType: boolean
}
export interface StaticImport {
/** Start of import statement. */
start: number
/** End of import statement. */
end: number
/**
* Import source.
*
* ```js
* import { foo } from "mod";
* // ^^^
* ```
*/
moduleRequest: ValueSpan
/**
* Import specifiers.
*
* Empty for `import "mod"`.
*/
entries: Array<StaticImportEntry>
}
export interface StaticImportEntry {
/**
* The name under which the desired binding is exported by the module.
*
* ```js
* import { foo } from "mod";
* // ^^^
* import { foo as bar } from "mod";
* // ^^^
* ```
*/
importName: ImportName
/**
* The name that is used to locally access the imported value from within the importing module.
* ```js
* import { foo } from "mod";
* // ^^^
* import { foo as bar } from "mod";
* // ^^^
* ```
*/
localName: ValueSpan
/**
* Whether this binding is for a TypeScript type-only import.
*
* `true` for the following imports:
* ```ts
* import type { foo } from "mod";
* import { type foo } from "mod";
* ```
*/
isType: boolean
}
export interface ValueSpan {
value: string
start: number
end: number
}

View file

@ -0,0 +1,108 @@
import { createRequire } from "node:module";
import { parse as parseBinding, parseSync as parseSyncBinding } from "./bindings.js";
import { wrap } from "./wrap.js";
export { default as visitorKeys } from "../generated/visit/keys.js";
export { Visitor } from "./visit/index.js";
export {
ExportExportNameKind,
ExportImportNameKind,
ExportLocalNameKind,
ImportNameKind,
ParseResult,
Severity,
} from "./bindings.js";
export { rawTransferSupported } from "./raw-transfer/supported.js";
const require = createRequire(import.meta.url);
// Lazily loaded as needed
let parseSyncRaw = null,
parseRaw,
parseSyncLazy = null,
parseLazy,
LazyVisitor;
/**
* Lazy-load code related to raw transfer.
* @returns {undefined}
*/
function loadRawTransfer() {
if (parseSyncRaw === null) {
({ parseSyncRaw, parse: parseRaw } = require("./raw-transfer/eager.js"));
}
}
/**
* Lazy-load code related to raw transfer lazy deserialization.
* @returns {undefined}
*/
function loadRawTransferLazy() {
if (parseSyncLazy === null) {
({ parseSyncLazy, parse: parseLazy, Visitor: LazyVisitor } = require("./raw-transfer/lazy.js"));
}
}
/**
* Parse JS/TS source synchronously on current thread.
*
* @param {string} filename - Filename
* @param {string} sourceText - Source text of file
* @param {Object|undefined} options - Parsing options
* @returns {Object} - Object with property getters for `program`, `module`, `comments`, and `errors`
* @throws {Error} - If `experimentalRawTransfer` or `experimentalLazy` option is enabled,
* and raw transfer is not supported on this platform
*/
export function parseSync(filename, sourceText, options) {
if (options?.experimentalRawTransfer) {
loadRawTransfer();
return parseSyncRaw(filename, sourceText, options);
}
if (options?.experimentalLazy) {
loadRawTransferLazy();
return parseSyncLazy(filename, sourceText, options);
}
return wrap(parseSyncBinding(filename, sourceText, options));
}
/**
* Parse JS/TS source asynchronously on a separate thread.
*
* Note that not all of the workload can happen on a separate thread.
* Parsing on Rust side does happen in a separate thread, but deserialization of the AST to JS objects
* has to happen on current thread. This synchronous deserialization work typically outweighs
* the asynchronous parsing by a factor of between 3 and 20.
*
* i.e. the majority of the workload cannot be parallelized by using this method.
*
* Generally `parseSync` is preferable to use as it does not have the overhead of spawning a thread.
* If you need to parallelize parsing multiple files, it is recommended to use worker threads.
*
* @param {string} filename - Filename
* @param {string} sourceText - Source text of file
* @param {Object|undefined} options - Parsing options
* @returns {Object} - Object with property getters for `program`, `module`, `comments`, and `errors`
* @throws {Error} - If `experimentalRawTransfer` or `experimentalLazy` option is enabled,
* and raw transfer is not supported on this platform
*/
export async function parse(filename, sourceText, options) {
if (options?.experimentalRawTransfer) {
loadRawTransfer();
return await parseRaw(filename, sourceText, options);
}
if (options?.experimentalLazy) {
loadRawTransferLazy();
return await parseLazy(filename, sourceText, options);
}
return wrap(await parseBinding(filename, sourceText, options));
}
/**
* Get `Visitor` class to construct visitors with.
* @returns {function} - `Visitor` class
*/
export function experimentalGetLazyVisitor() {
loadRawTransferLazy();
return LazyVisitor;
}

View file

@ -0,0 +1,276 @@
import os from "node:os";
import { BUFFER_ALIGN, BUFFER_SIZE, IS_TS_FLAG_POS } from "../../generated/constants.js";
import {
getBufferOffset,
parseRaw as parseRawBinding,
parseRawSync as parseRawSyncBinding,
} from "../bindings.js";
import { rawTransferSupported } from "./supported.js";
// Throw an error if running on a platform which raw transfer doesn't support.
//
// Note: This module is lazy-loaded only when user calls `parseSync` or `parseAsync` with
// `experimentalRawTransfer` or `experimentalLazy` options, or calls `experimentalGetLazyVisitor`.
if (!rawTransferSupported()) {
throw new Error(
"`experimentalRawTransfer` and `experimentalLazy` options are not supported " +
"on 32-bit or big-endian systems, versions of NodeJS prior to v22.0.0, " +
"versions of Deno prior to v2.0.0, or other runtimes",
);
}
/**
* Parse JS/TS source synchronously on current thread using raw transfer.
*
* Convert the buffer returned by Rust to a JS object with provided `convert` function.
*
* This function contains logic shared by both `parseSyncRaw` and `parseSyncLazy`.
*
* @param {string} filename - Filename
* @param {string} sourceText - Source text of file
* @param {Object} options - Parsing options
* @param {function} convert - Function to convert the buffer returned from Rust into a JS object
* @returns {Object} - The return value of `convert`
*/
export function parseSyncRawImpl(filename, sourceText, options, convert) {
const { buffer, sourceByteLen } = prepareRaw(sourceText);
parseRawSyncBinding(filename, buffer, sourceByteLen, options);
return convert(buffer, sourceText, sourceByteLen, options);
}
// User should not schedule more async tasks than there are available CPUs, as it hurts performance,
// but it's a common mistake in async JS code to do exactly that.
//
// That anti-pattern looks like this when applied to Oxc:
//
// ```js
// const asts = await Promise.all(
// files.map(
// async (filename) => {
// const sourceText = await fs.readFile(filename, 'utf8');
// const ast = await oxc.parseAsync(filename, sourceText);
// return ast;
// }
// )
// );
// ```
//
// In most cases, that'd just result in a bit of degraded performance, and higher memory use because
// of loading sources into memory prematurely.
//
// However, raw transfer uses a 6 GiB buffer for each parsing operation.
// Most of the memory pages in those buffers are never touched, so this does not consume a huge amount
// of physical memory, but it does still consume virtual memory.
//
// If we allowed creating a large number of 6 GiB buffers simultaneously, it would quickly consume
// virtual memory space and risk memory exhaustion. The code above would exhaust all of bottom half
// (heap) of 48-bit virtual memory space if `files.length >= 21_845`. This is not a number which
// is unrealistic in real world code.
//
// To guard against this possibility, we implement a simple queue.
// No more than `os.availableParallelism()` files can be parsed simultaneously, and any further calls to
// `parseAsyncRaw` will be put in a queue, to execute once other tasks complete.
//
// Fallback to `os.cpus().length` on versions of NodeJS prior to v18.14.0, which do not support
// `os.availableParallelism`.
let availableCores = os.availableParallelism ? os.availableParallelism() : os.cpus().length;
const queue = [];
/**
* Parse JS/TS source asynchronously using raw transfer.
*
* Convert the buffer returned by Rust to a JS object with provided `convert` function.
*
* Queues up parsing operations if more calls than number of CPU cores (see above).
*
* This function contains logic shared by both `parseAsyncRaw` and `parseAsyncLazy`.
*
* @param {string} filename - Filename
* @param {string} sourceText - Source text of file
* @param {Object} options - Parsing options
* @param {function} convert - Function to convert the buffer returned from Rust into a JS object
* @returns {Object} - The return value of `convert`
*/
export async function parseAsyncRawImpl(filename, sourceText, options, convert) {
// Wait for a free CPU core if all CPUs are currently busy.
//
// Note: `availableCores` is NOT decremented if have to wait in the queue first,
// and NOT incremented when parsing completes and it runs next task in the queue.
//
// This is to avoid a race condition if `parseAsyncRaw` is called during the microtick in between
// `resolve` being called below, and the promise resolving here. In that case the new task could
// start running, and then the promise resolves, and the queued task also starts running.
// We'd then have `availableParallelism() + 1` tasks running simultaneously. Potentially, this could
// happen repeatedly, with the number of tasks running simultaneously ever-increasing.
if (availableCores === 0) {
// All CPU cores are busy. Put this task in queue and wait for capacity to become available.
await new Promise((resolve, _) => {
queue.push(resolve);
});
} else {
// A CPU core is available. Mark core as busy, and run parsing now.
availableCores--;
}
// Parse
const { buffer, sourceByteLen } = prepareRaw(sourceText);
await parseRawBinding(filename, buffer, sourceByteLen, options);
const data = convert(buffer, sourceText, sourceByteLen, options);
// Free the CPU core
if (queue.length > 0) {
// Some further tasks waiting in queue. Run the next one.
// Do not increment `availableCores` (see above).
const resolve = queue.shift();
resolve();
} else {
// No tasks waiting in queue. This CPU is now free.
availableCores++;
}
return data;
}
const ARRAY_BUFFER_SIZE = BUFFER_SIZE + BUFFER_ALIGN;
const ONE_GIB = 1 << 30;
// We keep a cache of buffers for raw transfer, so we can reuse them as much as possible.
//
// When processing multiple files, it's ideal if can reuse an existing buffer, as it's more likely to
// be warm in CPU cache, it avoids allocations, and it saves work for the garbage collector.
//
// However, we also don't want to keep a load of large buffers around indefinitely using up memory,
// if they're not going to be used again.
//
// We have no knowledge of what pattern over time user may process files in (could be lots in quick
// succession, or more occasionally in a long-running process). So we try to use flexible caching
// strategy which is adaptable to many usage patterns.
//
// We use a 2-tier cache.
// Tier 1 uses strong references, tier 2 uses weak references.
//
// When parsing is complete and the buffer is no longer in use, push it to `buffers` (tier 1 cache).
// Set a timer to clear the cache when no activity for 10 seconds.
//
// When the timer expires, move all the buffers from tier 1 cache into `oldBuffers` (tier 2).
// They are stored there as `WeakRef`s, so the garbage collector is free to reclaim them.
//
// On the next call to `parseSync` or `parseAsync`, promote any buffers in tier 2 cache which were not
// already garbage collected back into tier 1 cache. This is on assumption that parsing one file
// indicates parsing as a whole is an ongoing process, and there will likely be further calls to
// `parseSync` / `parseAsync` in future.
//
// The weak tier 2 cache is because V8 does not necessarily free memory as soon as it's able to be
// freed. We don't want to block it from freeing memory, but if it's not done that yet, there's no
// point creating a new buffer, when one already exists.
const CLEAR_BUFFERS_TIMEOUT = 10_000; // 10 seconds
const buffers = [],
oldBuffers = [];
let clearBuffersTimeout = null;
const textEncoder = new TextEncoder();
/**
* Get a buffer (from cache if possible), and copy source text into it.
*
* @param {string} sourceText - Source text of file
* @returns {Object} - Object of form `{ buffer, sourceByteLen }`.
* - `buffer`: `Uint8Array` containing the AST in raw form.
* - `sourceByteLen`: Length of source text in UTF-8 bytes
* (which may not be equal to `sourceText.length` if source contains non-ASCII characters).
*/
export function prepareRaw(sourceText) {
// Cancel timeout for clearing buffers
if (clearBuffersTimeout !== null) {
clearTimeout(clearBuffersTimeout);
clearBuffersTimeout = null;
}
// Revive any discarded buffers which have not yet been garbage collected
if (oldBuffers.length > 0) {
const revivedBuffers = [];
for (let oldBuffer of oldBuffers) {
oldBuffer = oldBuffer.deref();
if (oldBuffer !== undefined) revivedBuffers.push(oldBuffer);
}
oldBuffers.length = 0;
if (revivedBuffers.length > 0) buffers.unshift(...revivedBuffers);
}
// Reuse existing buffer, or create a new one
const buffer = buffers.length > 0 ? buffers.pop() : createBuffer();
// Write source into start of buffer.
// `TextEncoder` cannot write into a `Uint8Array` larger than 1 GiB,
// so create a view into buffer of this size to write into.
const sourceBuffer = new Uint8Array(buffer.buffer, buffer.byteOffset, ONE_GIB);
const { read, written: sourceByteLen } = textEncoder.encodeInto(sourceText, sourceBuffer);
if (read !== sourceText.length) throw new Error("Failed to write source text into buffer");
return { buffer, sourceByteLen };
}
/**
* Get if AST should be parsed as JS or TS.
* Rust side sets a `bool` in this position in buffer which is `true` if TS.
*
* @param {Uint8Array} buffer - Buffer containing AST in raw form
* @returns {boolean} - `true` if AST is JS, `false` if TS
*/
export function isJsAst(buffer) {
return buffer[IS_TS_FLAG_POS] === 0;
}
/**
* Return buffer to cache, to be reused.
* Set a timer to clear buffers.
*
* @param {Uint8Array} buffer - Buffer
* @returns {undefined}
*/
export function returnBufferToCache(buffer) {
buffers.push(buffer);
if (clearBuffersTimeout !== null) clearTimeout(clearBuffersTimeout);
clearBuffersTimeout = setTimeout(clearBuffersCache, CLEAR_BUFFERS_TIMEOUT);
clearBuffersTimeout.unref();
}
/**
* Downgrade buffers in tier 1 cache (`buffers`) to tier 2 (`oldBuffers`)
* so they can be garbage collected.
*
* @returns {undefined}
*/
function clearBuffersCache() {
clearBuffersTimeout = null;
for (const buffer of buffers) {
oldBuffers.push(new WeakRef(buffer));
}
buffers.length = 0;
}
/**
* Create a `Uint8Array` which is 2 GiB in size, with its start aligned on 4 GiB.
*
* Achieve this by creating a 6 GiB `ArrayBuffer`, getting the offset within it that's aligned to 4 GiB,
* chopping off that number of bytes from the start, and shortening to 2 GiB.
*
* It's always possible to obtain a 2 GiB slice aligned on 4 GiB within a 6 GiB buffer,
* no matter how the 6 GiB buffer is aligned.
*
* Note: On systems with virtual memory, this only consumes 6 GiB of *virtual* memory.
* It does not consume physical memory until data is actually written to the `Uint8Array`.
* Physical memory consumed corresponds to the quantity of data actually written.
*
* @returns {Uint8Array} - Buffer
*/
function createBuffer() {
const arrayBuffer = new ArrayBuffer(ARRAY_BUFFER_SIZE);
const offset = getBufferOffset(new Uint8Array(arrayBuffer));
const buffer = new Uint8Array(arrayBuffer, offset, BUFFER_SIZE);
buffer.uint32 = new Uint32Array(arrayBuffer, offset, BUFFER_SIZE / 4);
buffer.float64 = new Float64Array(arrayBuffer, offset, BUFFER_SIZE / 8);
return buffer;
}

View file

@ -0,0 +1,119 @@
import { createRequire } from "node:module";
import { isJsAst, parseAsyncRawImpl, parseSyncRawImpl, returnBufferToCache } from "./common.js";
const require = createRequire(import.meta.url);
/**
* Parse JS/TS source synchronously on current thread, using raw transfer to speed up deserialization.
*
* @param {string} filename - Filename
* @param {string} sourceText - Source text of file
* @param {Object} options - Parsing options
* @returns {Object} - Object with property getters for `program`, `module`, `comments`, and `errors`
*/
export function parseSyncRaw(filename, sourceText, options) {
return parseSyncRawImpl(filename, sourceText, options, deserialize);
}
/**
* Parse JS/TS source asynchronously, using raw transfer to speed up deserialization.
*
* Note that not all of the workload can happen on a separate thread.
* Parsing on Rust side does happen in a separate thread, but deserialization of the AST to JS objects
* has to happen on current thread. This synchronous deserialization work typically outweighs
* the asynchronous parsing by a factor of around 3.
*
* i.e. the majority of the workload cannot be parallelized by using this method.
*
* Generally `parseSyncRaw` is preferable to use as it does not have the overhead of spawning a thread.
* If you need to parallelize parsing multiple files, it is recommended to use worker threads.
*
* @param {string} filename - Filename
* @param {string} sourceText - Source text of file
* @param {Object} options - Parsing options
* @returns {Object} - Object with property getters for `program`, `module`, `comments`, and `errors`
*/
export function parse(filename, sourceText, options) {
return parseAsyncRawImpl(filename, sourceText, options, deserialize);
}
// Deserializers are large files, so lazy-loaded.
// `deserialize` functions are stored in this array once loaded.
// Index into these arrays is `isJs * 1 + range * 2 + experimentalParent * 4`.
const deserializers = [null, null, null, null, null, null, null, null];
const deserializerNames = [
"ts",
"js",
"ts_range",
"js_range",
"ts_parent",
"js_parent",
"ts_range_parent",
"js_range_parent",
];
/**
* Deserialize whole AST from buffer.
*
* @param {Uint8Array} buffer - Buffer containing AST in raw form
* @param {string} sourceText - Source for the file
* @param {number} sourceByteLen - Length of source text in UTF-8 bytes
* @param {Object} options - Parsing options
* @returns {Object} - Object with property getters for `program`, `module`, `comments`, and `errors`
*/
function deserialize(buffer, sourceText, sourceByteLen, options) {
const isJs = isJsAst(buffer),
range = !!options.range,
parent = !!options.experimentalParent;
// Lazy load deserializer, and deserialize buffer to JS objects
const deserializerIndex = +isJs | (+range << 1) | (+parent << 2);
let deserializeThis = deserializers[deserializerIndex];
if (deserializeThis === null) {
deserializeThis = deserializers[deserializerIndex] = require(
`../../generated/deserialize/${deserializerNames[deserializerIndex]}.js`,
).deserialize;
}
const data = deserializeThis(buffer, sourceText, sourceByteLen);
// Add a line comment for hashbang if JS.
// Do not add comment if TS, to match `@typescript-eslint/parser`.
// See https://github.com/oxc-project/oxc/blob/ea784f5f082e4c53c98afde9bf983afd0b95e44e/napi/parser/src/lib.rs#L106-L130
if (isJs) {
const { hashbang } = data.program;
if (hashbang !== null) {
data.comments.unshift(
range
? {
type: "Line",
value: hashbang.value,
start: hashbang.start,
end: hashbang.end,
range: hashbang.range,
}
: { type: "Line", value: hashbang.value, start: hashbang.start, end: hashbang.end },
);
}
}
// Return buffer to cache, to be reused
returnBufferToCache(buffer);
// We cannot lazily deserialize in the getters, because the buffer might be re-used to parse
// another file before the getter is called
return {
get program() {
return data.program;
},
get module() {
return data.module;
},
get comments() {
return data.comments;
},
get errors() {
return data.errors;
},
};
}

View file

@ -0,0 +1,11 @@
// Unique token which is not exposed publicly.
// Used to prevent user calling class constructors.
export const TOKEN = {};
/**
* Throw error when restricted class constructor is called by user code.
* @throws {Error}
*/
export function constructorError() {
throw new Error("Constructor is for internal use only");
}

View file

@ -0,0 +1,153 @@
import { DATA_POINTER_POS_32, PROGRAM_OFFSET } from "../../generated/constants.js";
import { RawTransferData } from "../../generated/lazy/constructors.js";
import { walkProgram } from "../../generated/lazy/walk.js";
import { parseAsyncRawImpl, parseSyncRawImpl, returnBufferToCache } from "./common.js";
import { TOKEN } from "./lazy-common.js";
import { getVisitorsArr } from "./visitor.js";
export { Visitor } from "./visitor.js";
/**
* Parse JS/TS source synchronously on current thread.
*
* The data in buffer is not deserialized. Is deserialized to JS objects lazily, when accessing the
* properties of objects.
*
* e.g. `program` in returned object is an instance of `Program` class, with getters for `start`, `end`,
* `body` etc.
*
* Returned object contains a `visit` function which can be used to visit the AST with a `Visitor`
* (`Visitor` class can be obtained by calling `experimentalGetLazyVisitor()`).
*
* Returned object contains a `dispose` method. When finished with this AST, it's advisable to call
* `dispose`, to return the buffer to the cache, so it can be reused.
* Garbage collector should do this anyway at some point, but on an unpredictable schedule,
* so it's preferable to call `dispose` manually, to ensure the buffer can be reused immediately.
*
* @param {string} filename - Filename
* @param {string} sourceText - Source text of file
* @param {Object} options - Parsing options
* @returns {Object} - Object with property getters for `program`, `module`, `comments`, and `errors`,
* and `dispose` and `visit` methods
*/
export function parseSyncLazy(filename, sourceText, options) {
return parseSyncRawImpl(filename, sourceText, options, construct);
}
/**
* Parse JS/TS source asynchronously on a separate thread.
*
* The data in buffer is not deserialized. Is deserialized to JS objects lazily, when accessing the
* properties of objects.
*
* e.g. `program` in returned object is an instance of `Program` class, with getters for `start`, `end`,
* `body` etc.
*
* Because this function does not deserialize the AST, unlike `parse`, very little work happens
* on current thread in this function. Deserialization work only occurs when properties of the objects
* are accessed.
*
* Returned object contains a `visit` function which can be used to visit the AST with a `Visitor`
* (`Visitor` class can be obtained by calling `experimentalGetLazyVisitor()`).
*
* Returned object contains a `dispose` method. When finished with this AST, it's advisable to call
* `dispose`, to return the buffer to the cache, so it can be reused.
* Garbage collector should do this anyway at some point, but on an unpredictable schedule,
* so it's preferable to call `dispose` manually, to ensure the buffer can be reused immediately.
*
* @param {string} filename - Filename
* @param {string} sourceText - Source text of file
* @param {Object} options - Parsing options
* @returns {Object} - Object with property getters for `program`, `module`, `comments`, and `errors`,
* and `dispose` and `visit` methods
*/
export function parse(filename, sourceText, options) {
return parseAsyncRawImpl(filename, sourceText, options, construct);
}
// Registry for buffers which are held by lazily-deserialized ASTs.
// Returns buffer to cache when the `ast` wrapper is garbage collected.
//
// Check for existence of `FinalizationRegistry`, to avoid errors on old versions of NodeJS
// which don't support it. e.g. Prettier supports NodeJS v14.
// Raw transfer is disabled on NodeJS before v22, so it doesn't matter if this is `null` on old NodeJS
// - it'll never be accessed in that case.
const bufferRecycleRegistry =
typeof FinalizationRegistry === "undefined"
? null
: new FinalizationRegistry(returnBufferToCache);
/**
* Get an object with getters which lazy deserialize AST and other data from buffer.
*
* Object also includes `dispose` and `visit` functions.
*
* @param {Uint8Array} buffer - Buffer containing AST in raw form
* @param {string} sourceText - Source for the file
* @param {number} sourceByteLen - Length of source text in UTF-8 bytes
* @param {Object} _options - Parsing options
* @returns {Object} - Object with property getters for `program`, `module`, `comments`, and `errors`,
* and `dispose` and `visit` methods
*/
function construct(buffer, sourceText, sourceByteLen, _options) {
// Create AST object
const sourceIsAscii = sourceText.length === sourceByteLen;
const ast = { buffer, sourceText, sourceByteLen, sourceIsAscii, nodes: new Map(), token: TOKEN };
// Register `ast` with the recycle registry so buffer is returned to cache
// when `ast` is garbage collected
bufferRecycleRegistry.register(ast, buffer, ast);
// Get root data class instance
const rawDataPos = buffer.uint32[DATA_POINTER_POS_32];
const data = new RawTransferData(rawDataPos, ast);
return {
get program() {
return data.program;
},
get module() {
return data.module;
},
get comments() {
return data.comments;
},
get errors() {
return data.errors;
},
dispose: dispose.bind(null, ast),
visit(visitor) {
walkProgram(rawDataPos + PROGRAM_OFFSET, ast, getVisitorsArr(visitor));
},
};
}
/**
* Dispose of this AST.
*
* After calling this method, trying to read any nodes from this AST may cause an error.
*
* Buffer is returned to the cache to be reused.
*
* The buffer would be returned to the cache anyway, once all nodes of the AST are garbage collected,
* but calling `dispose` is preferable, as it will happen immediately.
* Otherwise, garbage collector may take time to collect the `ast` object, and new buffers may be created
* in the meantime, when we could have reused this one.
*
* @param {Object} ast - AST object containing buffer etc
* @returns {undefined}
*/
function dispose(ast) {
// Return buffer to cache, to be reused
returnBufferToCache(ast.buffer);
// Remove connection between `ast` and the buffer
ast.buffer = null;
// Clear other contents of `ast`, so they can be garbage collected
ast.sourceText = null;
ast.nodes = null;
// Remove `ast` from recycling register.
// When `ast` is garbage collected, there's no longer any action to be taken.
bufferRecycleRegistry.unregister(ast);
}

View file

@ -0,0 +1,365 @@
import { constructorError, TOKEN } from "./lazy-common.js";
// Internal symbol to get `NodeArray` from a proxy wrapping a `NodeArray`.
//
// Methods of `NodeArray` are called with `this` being the proxy, rather than the `NodeArray` itself.
// They can "unwrap" the proxy by getting `this[ARRAY]`, and the `get` proxy trap will return
// the actual `NodeArray`.
//
// This symbol is not exported, and it is not actually defined on `NodeArray`s, so user cannot obtain it
// via `Object.getOwnPropertySymbols` or `Reflect.ownKeys`. Therefore user code cannot unwrap the proxy.
const ARRAY = Symbol();
// Functions to get internal properties of a `NodeArray`. Initialized in class static block below.
let getInternalFromProxy, getLength, getElement;
/**
* An array of AST nodes where elements are deserialized lazily upon access.
*
* Extends `Array` to make `Array.isArray` return `true` for a `NodeArray`.
*
* TODO: Other methods could maybe be more optimal, avoiding going via proxy multiple times
* e.g. `some`, `indexOf`.
*/
export class NodeArray extends Array {
#internal;
/**
* Create a `NodeArray`.
*
* Constructor does not actually return a `NodeArray`, but one wrapped in a `Proxy`.
* The proxy intercepts accesses to elements and lazily deserializes them,
* and blocks mutation of elements or `length` property.
*
* @class
* @param {number} pos - Buffer position of first element
* @param {number} length - Number of elements
* @param {number} stride - Element size in bytes
* @param {Function} construct - Function to deserialize element
* @param {Object} ast - AST object
* @returns {Proxy<NodeArray>} - `NodeArray` wrapped in a `Proxy`
*/
constructor(pos, length, stride, construct, ast) {
if (ast?.token !== TOKEN) constructorError();
super();
this.#internal = { pos, length, ast, stride, construct };
return new Proxy(this, PROXY_HANDLERS);
}
// Allow `arr.filter`, `arr.map` etc.
static [Symbol.species] = Array;
// Override `values` method with a more efficient one that avoids going via proxy for every iteration.
// TODO: Benchmark to check that this is actually faster.
values() {
return new NodeArrayValuesIterator(this);
}
// Override `keys` method with a more efficient one that avoids going via proxy for every iteration.
// TODO: Benchmark to check that this is actually faster.
keys() {
return new NodeArrayKeysIterator(this);
}
// Override `entries` method with a more efficient one that avoids going via proxy for every iteration.
// TODO: Benchmark to check that this is actually faster.
entries() {
return new NodeArrayEntriesIterator(this);
}
// This method is overwritten with reference to `values` method below.
// Defining dummy method here to prevent the later assignment altering the shape of class prototype.
[Symbol.iterator]() {}
/**
* Override `slice` method to return a `NodeArray`.
*
* @this {NodeArray}
* @param {*} start - Start of slice
* @param {*} end - End of slice
* @returns {NodeArray} - `NodeArray` containing slice of this one
*/
slice(start, end) {
const internal = this[ARRAY].#internal,
{ length } = internal;
start = toInt(start);
if (start < 0) {
start = length + start;
if (start < 0) start = 0;
}
if (end === void 0) {
end = length;
} else {
end = toInt(end);
if (end < 0) {
end += length;
if (end < 0) end = 0;
} else if (end > length) {
end = length;
}
}
let sliceLength = end - start;
if (sliceLength <= 0 || start >= length) {
start = 0;
sliceLength = 0;
}
const { stride } = internal;
return new NodeArray(
internal.pos + start * stride,
sliceLength,
stride,
internal.construct,
internal.ast,
);
}
// Make `console.log` deserialize all elements.
[Symbol.for("nodejs.util.inspect.custom")]() {
const values = [...this.values()];
Object.setPrototypeOf(values, DebugNodeArray.prototype);
return values;
}
static {
/**
* Get internal properties of `NodeArray`, given a proxy wrapping a `NodeArray`.
* @param {Proxy} proxy - Proxy wrapping `NodeArray` object
* @returns {Object} - Internal properties object
*/
getInternalFromProxy = (proxy) => proxy[ARRAY].#internal;
/**
* Get length of `NodeArray`.
* @param {NodeArray} arr - `NodeArray` object
* @returns {number} - Array length
*/
getLength = (arr) => arr.#internal.length;
/**
* Get element of `NodeArray` at index `index`.
*
* @param {NodeArray} arr - `NodeArray` object
* @param {number} index - Index of element to get
* @returns {*} - Element at index `index`, or `undefined` if out of bounds
*/
getElement = (arr, index) => {
const internal = arr.#internal;
if (index >= internal.length) return void 0;
return (0, internal.construct)(internal.pos + index * internal.stride, internal.ast);
};
}
}
// oxlint-disable-next-line typescript/unbound-method
NodeArray.prototype[Symbol.iterator] = NodeArray.prototype.values;
/**
* Iterator over values of a `NodeArray`.
* Returned by `values` method, and also used as iterator for `for (const node of nodeArray) {}`.
*/
class NodeArrayValuesIterator {
#internal;
constructor(proxy) {
const internal = getInternalFromProxy(proxy),
{ pos, stride } = internal;
this.#internal = {
pos,
endPos: pos + internal.length * stride,
ast: internal.ast,
construct: internal.construct,
stride,
};
}
next() {
const internal = this.#internal,
{ pos } = internal;
if (pos === internal.endPos) return { done: true, value: null };
internal.pos = pos + internal.stride;
return { done: false, value: (0, internal.construct)(pos, internal.ast) };
}
[Symbol.iterator]() {
return this;
}
}
/**
* Iterator over keys of a `NodeArray`. Returned by `keys` method.
*/
class NodeArrayKeysIterator {
#internal;
constructor(proxy) {
const internal = getInternalFromProxy(proxy);
this.#internal = { index: 0, length: internal.length };
}
next() {
const internal = this.#internal,
{ index } = internal;
if (index === internal.length) return { done: true, value: null };
internal.index = index + 1;
return { done: false, value: index };
}
[Symbol.iterator]() {
return this;
}
}
/**
* Iterator over values of a `NodeArray`. Returned by `entries` method.
*/
class NodeArrayEntriesIterator {
#internal;
constructor(proxy) {
const internal = getInternalFromProxy(proxy);
this.#internal = {
index: 0,
length: internal.length,
pos: internal.pos,
ast: internal.ast,
construct: internal.construct,
stride: internal.stride,
};
}
next() {
const internal = this.#internal,
{ index } = internal;
if (index === internal.length) return { done: true, value: null };
internal.index = index + 1;
return {
done: false,
value: [index, (0, internal.construct)(internal.pos + index * internal.stride, internal.ast)],
};
}
[Symbol.iterator]() {
return this;
}
}
// Class used for `[Symbol.for('nodejs.util.inspect.custom')]` method (for `console.log`).
const DebugNodeArray = class NodeArray extends Array {};
// Proxy handlers.
//
// Every `NodeArray` returned to user is wrapped in a `Proxy`, using these handlers.
// They lazily deserialize array elements upon access, and block mutation of array elements / `length`.
const PROXY_HANDLERS = {
// Return `true` for indexes which are in bounds.
// e.g. `'0' in arr`.
has(arr, key) {
const index = toIndex(key);
if (index !== null) return index < getLength(arr);
return Reflect.has(arr, key);
},
// Get elements and length.
get(arr, key) {
// Methods of `NodeArray` are called with `this` being the proxy, rather than the `NodeArray` itself.
// They can "unwrap" the proxy by getting `this[ARRAY]`.
if (key === ARRAY) return arr;
if (key === "length") return getLength(arr);
const index = toIndex(key);
if (index !== null) return getElement(arr, index);
return Reflect.get(arr, key);
},
// Get descriptors for elements and length.
getOwnPropertyDescriptor(arr, key) {
if (key === "length") {
// Cannot return `writable: false` unfortunately
return { value: getLength(arr), writable: true, enumerable: false, configurable: false };
}
const index = toIndex(key);
if (index !== null) {
const value = getElement(arr, index);
if (value === void 0) return void 0;
// Cannot return `configurable: false` unfortunately
return { value, writable: false, enumerable: true, configurable: true };
}
return Reflect.getOwnPropertyDescriptor(arr, key);
},
// Prevent setting `length` or entries.
// Catches:
// * `Object.defineProperty(arr, 0, {value: null})`.
// * `arr[1] = null`.
// * `arr.length = 0`.
// * `Object.defineProperty(arr, 'length', {value: 0})`.
// * Other operations which mutate entries e.g. `arr.push(123)`.
defineProperty(arr, key, descriptor) {
if (key === "length" || toIndex(key) !== null) return false;
return Reflect.defineProperty(arr, key, descriptor);
},
// Prevent deleting entries.
deleteProperty(arr, key) {
// Note: `Reflect.deleteProperty(arr, 'length')` already returns `false`
if (toIndex(key) !== null) return false;
return Reflect.deleteProperty(arr, key);
},
// Get keys, including element indexes.
ownKeys(arr) {
const keys = [],
length = getLength(arr);
for (let i = 0; i < length; i++) {
keys.push(i + "");
}
keys.push(...Reflect.ownKeys(arr));
return keys;
},
};
/**
* Convert key to array index, if it is a valid array index.
*
* Only strings comprising a plain integer are valid indexes.
* e.g. `"-1"`, `"01"`, `"0xFF"`, `"1e1"`, `"1 "` are not valid indexes.
* Integers >= 4294967295 are not valid indexes.
*
* @param {string|Symbol} - Key used for property lookup.
* @returns {number|null} - `key` converted to integer, if it's a valid array index, otherwise `null`.
*/
function toIndex(key) {
if (typeof key === "string") {
if (key === "0") return 0;
if (INDEX_REGEX.test(key)) {
const index = +key;
if (index < 4294967295) return index;
}
}
return null;
}
const INDEX_REGEX = /^[1-9]\d*$/;
/**
* Convert value to integer.
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number#integer_conversion
*
* @param {*} value - Value to convert to integer.
* @returns {number} - Integer
*/
function toInt(value) {
value = Math.trunc(+value);
// `value === 0` check is to convert -0 to 0
if (value === 0 || Number.isNaN(value)) return 0;
return value;
}

View file

@ -0,0 +1,52 @@
import { rawTransferSupported as rawTransferSupportedBinding } from "../bindings.js";
let rawTransferIsSupported = null;
/**
* Returns `true` if `experimentalRawTransfer` is option is supported.
*
* Raw transfer is only supported on 64-bit little-endian systems,
* and NodeJS >= v22.0.0 or Deno >= v2.0.0.
*
* Versions of NodeJS prior to v22.0.0 do not support creating an `ArrayBuffer` larger than 4 GiB.
* Bun (as at v1.2.4) also does not support creating an `ArrayBuffer` larger than 4 GiB.
* Support on Deno v1 is unknown and it's EOL, so treating Deno before v2.0.0 as unsupported.
*
* No easy way to determining pointer width (64 bit or 32 bit) in JS,
* so call a function on Rust side to find out.
*
* @returns {boolean} - `true` if raw transfer is supported on this platform
*/
export function rawTransferSupported() {
if (rawTransferIsSupported === null) {
rawTransferIsSupported = rawTransferRuntimeSupported() && rawTransferSupportedBinding();
}
return rawTransferIsSupported;
}
// Checks copied from:
// https://github.com/unjs/std-env/blob/ab15595debec9e9115a9c1d31bc7597a8e71dbfd/src/runtimes.ts
// MIT license: https://github.com/unjs/std-env/blob/ab15595debec9e9115a9c1d31bc7597a8e71dbfd/LICENCE
function rawTransferRuntimeSupported() {
let global;
try {
global = globalThis;
} catch {
return false;
}
const isBun = !!global.Bun || !!global.process?.versions?.bun;
if (isBun) return false;
const isDeno = !!global.Deno;
if (isDeno) {
const match = Deno.version?.deno?.match(/^(\d+)\./);
return !!match && match[1] * 1 >= 2;
}
const isNode = global.process?.release?.name === "node";
if (!isNode) return false;
const match = process.version?.match(/^v(\d+)\./);
return !!match && match[1] * 1 >= 22;
}

View file

@ -0,0 +1,127 @@
import {
LEAF_NODE_TYPES_COUNT,
NODE_TYPE_IDS_MAP,
NODE_TYPES_COUNT,
} from "../../generated/lazy/type_ids.js";
// Getter for private `#visitorsArr` property of `Visitor` class. Initialized in class body below.
let getVisitorsArrTemp;
/**
* Visitor class, used to visit an AST.
*/
export class Visitor {
#visitorsArr;
/**
* Create `Visitor`.
*
* Provide an object where keys are names of AST nodes you want to visit,
* and values are visitor functions which receive AST node objects of that type.
*
* Keys can also be postfixed with `:exit` to visit when exiting the node, rather than entering.
*
* ```js
* const visitor = new Visitor({
* BinaryExpression(binExpr) {
* // Do stuff when entering a `BinaryExpression`
* },
* 'BinaryExpression:exit'(binExpr) {
* // Do stuff when exiting a `BinaryExpression`
* },
* });
* ```
*
* @class
* @param {Object} visitor - Object defining visit functions for AST nodes
* @returns {Visitor}
*/
constructor(visitor) {
this.#visitorsArr = createVisitorsArr(visitor);
}
static {
getVisitorsArrTemp = (visitor) => visitor.#visitorsArr;
}
}
export const getVisitorsArr = getVisitorsArrTemp;
/**
* Create array of visitors, keyed by node type ID.
*
* Each element of array is one of:
*
* * No visitor for this type = `null`.
* * Visitor for leaf node = visit function.
* * Visitor for non-leaf node = object of form `{ enter, exit }`,
* where each property is either a visitor function or `null`.
*
* @param {Object} visitor - Visitors object from user
* @returns {Array<Object|Function|null>} - Array of visitors
*/
function createVisitorsArr(visitor) {
if (visitor === null || typeof visitor !== "object") {
throw new Error("`visitor` must be an object");
}
// Create empty visitors array
const visitorsArr = [];
for (let i = NODE_TYPES_COUNT; i !== 0; i--) {
visitorsArr.push(null);
}
// Populate visitors array from provided object
for (let name of Object.keys(visitor)) {
const visitFn = visitor[name];
if (typeof visitFn !== "function") {
throw new Error(`'${name}' property of \`visitor\` object is not a function`);
}
const isExit = name.endsWith(":exit");
if (isExit) name = name.slice(0, -5);
const typeId = NODE_TYPE_IDS_MAP.get(name);
if (typeId === void 0) throw new Error(`Unknown node type '${name}' in \`visitor\` object`);
if (typeId < LEAF_NODE_TYPES_COUNT) {
// Leaf node. Store just 1 function.
const existingVisitFn = visitorsArr[typeId];
if (existingVisitFn === null) {
visitorsArr[typeId] = visitFn;
} else if (isExit) {
visitorsArr[typeId] = combineVisitFunctions(existingVisitFn, visitFn);
} else {
visitorsArr[typeId] = combineVisitFunctions(visitFn, existingVisitFn);
}
continue;
}
let enterExit = visitorsArr[typeId];
if (enterExit === null) {
enterExit = visitorsArr[typeId] = { enter: null, exit: null };
}
if (isExit) {
enterExit.exit = visitFn;
} else {
enterExit.enter = visitFn;
}
}
return visitorsArr;
}
/**
* Combine 2 visitor functions into 1.
*
* @param {function} visit1 - 1st visitor function
* @param {function} visit2 - 2nd visitor function
* @returns {function} - Combined visitor function
*/
function combineVisitFunctions(visit1, visit2) {
return function (node) {
visit1(node);
visit2(node);
};
}

View file

@ -0,0 +1,41 @@
import { createRequire } from "node:module";
// Lazy-loaded when first construct a `Visitor`
let walkProgram = null,
addVisitorToCompiled,
createCompiledVisitor,
finalizeCompiledVisitor;
/**
* Visitor class for traversing AST.
*/
export class Visitor {
#compiledVisitor = null;
constructor(visitor) {
if (walkProgram === null) {
const require = createRequire(import.meta.url);
({ walkProgram } = require("../../generated/visit/walk.js"));
({
addVisitorToCompiled,
createCompiledVisitor,
finalizeCompiledVisitor,
} = require("./visitor.js"));
}
const compiledVisitor = createCompiledVisitor();
addVisitorToCompiled(visitor);
const needsVisit = finalizeCompiledVisitor();
if (needsVisit) this.#compiledVisitor = compiledVisitor;
}
/**
* Visit AST.
* @param program - The AST to visit.
* @returns {undefined}
*/
visit(program) {
const compiledVisitor = this.#compiledVisitor;
if (compiledVisitor !== null) walkProgram(program, compiledVisitor);
}
}

View file

@ -0,0 +1,406 @@
// Functions to compile 1 or more visitor objects into a single compiled visitor.
//
// # Visitor objects
//
// Visitor objects which are generated by rules' `create` functions have keys being either:
// * Name of an AST type. or
// * Name of an AST type postfixed with `:exit`.
//
// Each property value must be a function that handles that AST node.
//
// e.g.:
//
// ```
// {
// BinaryExpression(node) {
// // Do stuff on enter
// },
// 'BinaryExpression:exit'(node) {
// // Do stuff on exit
// },
// }
// ```
//
// # Compiled visitor
//
// Compiled visitor is an array with `NODE_TYPES_COUNT` length, keyed by the ID of the node type.
// `NODE_TYPE_IDS_MAP` maps from type name to ID.
//
// Each element of compiled array is one of:
// * No visitor for this type = `null`.
// * Visitor for leaf node = visit function.
// * Visitor for non-leaf node = object of form `{ enter, exit }`,
// where each property is either a visitor function or `null`.
//
// e.g.:
//
// ```
// [
// // Leaf nodes
// function(node) { /* do stuff */ },
// // ...
//
// // Non-leaf nodes
// {
// enter: function(node) { /* do stuff */ },
// exit: null,
// },
// // ...
// ]
// ```
//
// # Object reuse
//
// No more than 1 compiled visitor exists at any time, so we reuse a single array `compiledVisitor`,
// rather than creating a new array for each file being linted.
//
// To compile visitors, call:
// * `initCompiledVisitor` once.
// * `addVisitorToCompiled` with each visitor object.
// * `finalizeCompiledVisitor` once.
//
// After this sequence of calls, `compiledVisitor` is ready to be used to walk the AST.
//
// We also recycle:
//
// * `{ enter, exit }` objects which are stored in compiled visitor.
// * Temporary arrays used to store multiple visit functions, which are merged into a single function
// in `finalizeCompiledVisitor`.
//
// The aim is to reduce pressure on the garbage collector. All these recycled objects are long-lived
// and will graduate to "old space", which leaves as much capacity as possible in "new space"
// for objects created by user code in visitors. If ephemeral user-created objects all fit in new space,
// it will avoid full GC runs, which should greatly improve performance.
import {
LEAF_NODE_TYPES_COUNT,
NODE_TYPE_IDS_MAP,
NODE_TYPES_COUNT,
} from "../../generated/visit/type_ids.js";
const { isArray } = Array;
// Compiled visitor used for visiting each file.
// Same array is reused for each file.
//
// Initialized with `.push()` to ensure V8 treats the array as "packed" (linear array),
// not "holey" (hash map). This is critical, as looking up elements in this array is a very hot path
// during AST visitation, and holey arrays are much slower.
// https://v8.dev/blog/elements-kinds
let compiledVisitor;
export function createCompiledVisitor() {
// Create a new compiled visitor array
compiledVisitor = [];
for (let i = NODE_TYPES_COUNT; i !== 0; i--) {
compiledVisitor.push(null);
}
return compiledVisitor;
}
// Arrays containing type IDs of types which have multiple visit functions defined for them.
//
// Filled with `0` initially up to maximum size they could ever need to be so:
// 1. These arrays never need to grow.
// 2. V8 treats these arrays as "PACKED_SMI_ELEMENTS".
const mergedLeafVisitorTypeIds = [],
mergedEnterVisitorTypeIds = [],
mergedExitVisitorTypeIds = [];
for (let i = LEAF_NODE_TYPES_COUNT; i !== 0; i--) {
mergedLeafVisitorTypeIds.push(0);
}
for (let i = NODE_TYPES_COUNT - LEAF_NODE_TYPES_COUNT; i !== 0; i--) {
mergedEnterVisitorTypeIds.push(0);
mergedExitVisitorTypeIds.push(0);
}
mergedLeafVisitorTypeIds.length = 0;
mergedEnterVisitorTypeIds.length = 0;
mergedExitVisitorTypeIds.length = 0;
// `true` if `addVisitor` has been called with a visitor which visits at least one AST type
let hasActiveVisitors = false;
// Enter+exit object cache.
//
// `compiledVisitor` may contain many `{ enter, exit }` objects.
// Use this cache to reuse those objects across all visitor compilations.
//
// `enterExitObjectCacheNextIndex` is the index of first object in cache which is currently unused.
// It may point to the end of the cache array.
const enterExitObjectCache = [];
let enterExitObjectCacheNextIndex = 0;
function getEnterExitObject() {
if (enterExitObjectCacheNextIndex < enterExitObjectCache.length) {
return enterExitObjectCache[enterExitObjectCacheNextIndex++];
}
const enterExit = { enter: null, exit: null };
enterExitObjectCache.push(enterExit);
enterExitObjectCacheNextIndex++;
return enterExit;
}
// Visit function arrays cache.
//
// During compilation, many arrays may be used temporarily to store multiple visit functions for same AST type.
// The functions in each array are merged into a single function in `finalizeCompiledVisitor`,
// after which these arrays aren't used again.
//
// Use this cache to reuse these arrays across each visitor compilation.
//
// `visitFnArrayCacheNextIndex` is the index of first array in cache which is currently unused.
// It may point to the end of the cache array.
const visitFnArrayCache = [];
let visitFnArrayCacheNextIndex = 0;
function createVisitFnArray(visit1, visit2) {
if (visitFnArrayCacheNextIndex < visitFnArrayCache.length) {
const arr = visitFnArrayCache[visitFnArrayCacheNextIndex++];
arr.push(visit1, visit2);
return arr;
}
const arr = [visit1, visit2];
visitFnArrayCache.push(arr);
visitFnArrayCacheNextIndex++;
return arr;
}
/**
* Initialize compiled visitor, ready for calls to `addVisitor`.
*/
export function initCompiledVisitor() {
// Reset `compiledVisitor` array after previous compilation
for (let i = 0; i < NODE_TYPES_COUNT; i++) {
compiledVisitor[i] = null;
}
// Reset enter+exit objects which were used in previous compilation
for (let i = 0; i < enterExitObjectCacheNextIndex; i++) {
const enterExit = enterExitObjectCache[i];
enterExit.enter = null;
enterExit.exit = null;
}
enterExitObjectCacheNextIndex = 0;
}
/**
* Add a visitor to compiled visitor.
*
* @param visitor - Visitor object
*/
export function addVisitorToCompiled(visitor) {
if (visitor === null || typeof visitor !== "object")
throw new TypeError("Visitor must be an object");
// Exit if is empty visitor
const keys = Object.keys(visitor),
keysLen = keys.length;
if (keysLen === 0) return;
hasActiveVisitors = true;
// Populate visitors array from provided object
for (let i = 0; i < keysLen; i++) {
let name = keys[i];
const visitFn = visitor[name];
if (typeof visitFn !== "function") {
throw new TypeError(`'${name}' property of visitor object is not a function`);
}
const isExit = name.endsWith(":exit");
if (isExit) name = name.slice(0, -5);
const typeId = NODE_TYPE_IDS_MAP.get(name);
if (typeId === void 0) throw new Error(`Unknown node type '${name}' in visitor object`);
const existing = compiledVisitor[typeId];
if (typeId < LEAF_NODE_TYPES_COUNT) {
// Leaf node - store just 1 function, not enter+exit pair
if (existing === null) {
compiledVisitor[typeId] = visitFn;
} else if (isArray(existing)) {
if (isExit) {
existing.push(visitFn);
} else {
// Insert before last in array in case last was enter visit function from the current rule,
// to ensure enter is called before exit.
// It could also be either an enter or exit visitor function for another rule, but the order
// rules are called in doesn't matter. We only need to make sure that a rule's exit visitor
// isn't called before enter visitor *for that same rule*.
existing.splice(existing.length - 1, 0, visitFn);
}
} else {
// Same as above, enter visitor is put to front of list to make sure enter is called before exit
compiledVisitor[typeId] = isExit
? createVisitFnArray(existing, visitFn)
: createVisitFnArray(visitFn, existing);
mergedLeafVisitorTypeIds.push(typeId);
}
} else {
// Not leaf node - store enter+exit pair
if (existing === null) {
const enterExit = (compiledVisitor[typeId] = getEnterExitObject());
if (isExit) {
enterExit.exit = visitFn;
} else {
enterExit.enter = visitFn;
}
} else if (isExit) {
const { exit } = existing;
if (exit === null) {
existing.exit = visitFn;
} else if (isArray(exit)) {
exit.push(visitFn);
} else {
existing.exit = createVisitFnArray(exit, visitFn);
mergedExitVisitorTypeIds.push(typeId);
}
} else {
const { enter } = existing;
if (enter === null) {
existing.enter = visitFn;
} else if (isArray(enter)) {
enter.push(visitFn);
} else {
existing.enter = createVisitFnArray(enter, visitFn);
mergedEnterVisitorTypeIds.push(typeId);
}
}
}
}
}
/**
* Finalize compiled visitor.
*
* After calling this function, `compiledVisitor` is ready to be used to walk the AST.
*
* @returns {boolean} - `true` if compiled visitor visits at least 1 AST type
*/
export function finalizeCompiledVisitor() {
if (hasActiveVisitors === false) return false;
// Merge visit functions for node types which have multiple visitors from different rules,
// or enter+exit functions for leaf nodes
for (let i = mergedLeafVisitorTypeIds.length - 1; i >= 0; i--) {
const typeId = mergedLeafVisitorTypeIds[i];
compiledVisitor[typeId] = mergeVisitFns(compiledVisitor[typeId]);
}
for (let i = mergedEnterVisitorTypeIds.length - 1; i >= 0; i--) {
const typeId = mergedEnterVisitorTypeIds[i];
const enterExit = compiledVisitor[typeId];
enterExit.enter = mergeVisitFns(enterExit.enter);
}
for (let i = mergedExitVisitorTypeIds.length - 1; i >= 0; i--) {
const typeId = mergedExitVisitorTypeIds[i];
const enterExit = compiledVisitor[typeId];
enterExit.exit = mergeVisitFns(enterExit.exit);
}
// Reset state, ready for next time
mergedLeafVisitorTypeIds.length = 0;
mergedEnterVisitorTypeIds.length = 0;
mergedExitVisitorTypeIds.length = 0;
// Note: Visit function arrays have been emptied in `mergeVisitFns`, so all arrays in `visitFnArrayCache`
// are now empty and ready for reuse. We just need to reset the index.
visitFnArrayCacheNextIndex = 0;
hasActiveVisitors = false;
return true;
}
/**
* Merge array of visit functions into a single function, which calls each of input functions in turn.
*
* The array passed is cleared (length set to 0), so the array can be reused.
*
* The merged function is statically defined and does not contain a loop, to hopefully allow
* JS engine to heavily optimize it.
*
* `mergers` contains pre-defined functions to merge up to 5 visit functions.
* Merger functions for merging more than 5 visit functions are created dynamically on demand.
*
* @param visitFns - Array of visit functions
* @returns Function which calls all of `visitFns` in turn.
*/
function mergeVisitFns(visitFns) {
const numVisitFns = visitFns.length;
// Get or create merger for merging `numVisitFns` functions
let merger;
if (mergers.length <= numVisitFns) {
while (mergers.length < numVisitFns) {
mergers.push(null);
}
merger = createMerger(numVisitFns);
mergers.push(merger);
} else {
merger = mergers[numVisitFns];
if (merger === null) merger = mergers[numVisitFns] = createMerger(numVisitFns);
}
// Merge functions
const mergedFn = merger(...visitFns);
// Empty `visitFns` array, so it can be reused
visitFns.length = 0;
return mergedFn;
}
/**
* Create a merger function that merges `fnCount` functions.
*
* @param fnCount - Number of functions to be merged
* @returns Function to merge `fnCount` functions
*/
function createMerger(fnCount) {
const args = [];
let body = "return node=>{";
for (let i = 1; i <= fnCount; i++) {
args.push(`visit${i}`);
body += `visit${i}(node);`;
}
body += "}";
args.push(body);
// oxlint-disable-next-line typescript/no-implied-eval
return new Function(...args);
}
// Pre-defined mergers for merging up to 5 functions
const mergers = [
null, // No merger for 0 functions
null, // No merger for 1 function
(visit1, visit2) => (node) => {
visit1(node);
visit2(node);
},
(visit1, visit2, visit3) => (node) => {
visit1(node);
visit2(node);
visit3(node);
},
(visit1, visit2, visit3, visit4) => (node) => {
visit1(node);
visit2(node);
visit3(node);
visit4(node);
},
(visit1, visit2, visit3, visit4, visit5) => (node) => {
visit1(node);
visit2(node);
visit3(node);
visit4(node);
visit5(node);
},
];

View file

@ -0,0 +1,11 @@
export * from "@oxc-parser/binding-wasm32-wasi";
import * as bindings from "@oxc-parser/binding-wasm32-wasi";
import { wrap } from "./wrap.js";
export async function parse(...args) {
return wrap(await bindings.parse(...args));
}
export function parseSync(filename, sourceText, options) {
return wrap(bindings.parseSync(filename, sourceText, options));
}

View file

@ -0,0 +1,21 @@
const fs = require("node:fs");
const childProcess = require("node:child_process");
const pkg = JSON.parse(fs.readFileSync(require.resolve("oxc-parser/package.json"), "utf-8"));
const { version } = pkg;
const baseDir = `/tmp/oxc-parser-${version}`;
const bindingEntry = `${baseDir}/node_modules/@oxc-parser/binding-wasm32-wasi/parser.wasi.cjs`;
if (!fs.existsSync(bindingEntry)) {
fs.rmSync(baseDir, { recursive: true, force: true });
fs.mkdirSync(baseDir, { recursive: true });
const bindingPkg = `@oxc-parser/binding-wasm32-wasi@${version}`;
// oxlint-disable-next-line no-console
console.log(`[oxc-parser] Downloading ${bindingPkg} on WebContainer...`);
childProcess.execFileSync("pnpm", ["i", bindingPkg], {
cwd: baseDir,
stdio: "inherit",
});
}
module.exports = require(bindingEntry);

View file

@ -0,0 +1,57 @@
export function wrap(result) {
let program, module, comments, errors;
return {
get program() {
if (!program) program = jsonParseAst(result.program);
return program;
},
get module() {
if (!module) module = result.module;
return module;
},
get comments() {
if (!comments) comments = result.comments;
return comments;
},
get errors() {
if (!errors) errors = result.errors;
return errors;
},
};
}
// Used by `napi/playground/scripts/patch.js`.
//
// Set `value` field of `Literal`s which are `BigInt`s or `RegExp`s.
//
// Returned JSON contains an array `fixes` with paths to these nodes
// e.g. for `123n; foo(/xyz/)`, `fixes` will be
// `[["body", 0, "expression"], ["body", 1, "expression", "arguments", 2]]`.
//
// Walk down the AST to these nodes and alter them.
// Compiling the list of fixes on Rust side avoids having to do a full AST traversal on JS side
// to locate the likely very few `Literal`s which need fixing.
export function jsonParseAst(programJson) {
const { node: program, fixes } = JSON.parse(programJson);
for (const fixPath of fixes) {
applyFix(program, fixPath);
}
return program;
}
function applyFix(program, fixPath) {
let node = program;
for (const key of fixPath) {
node = node[key];
}
if (node.bigint) {
node.value = BigInt(node.bigint);
} else {
try {
node.value = RegExp(node.regex.pattern, node.regex.flags);
} catch {
// Invalid regexp, or valid regexp using syntax not supported by this version of NodeJS
}
}
}