diff --git a/extractors/cds/tools/cds-extractor.ts b/extractors/cds/tools/cds-extractor.ts index 673370724..14e655679 100644 --- a/extractors/cds/tools/cds-extractor.ts +++ b/extractors/cds/tools/cds-extractor.ts @@ -115,6 +115,7 @@ try { new Set([ ...globSync(join(sourceRoot, '**/*.cds'), { ignore: ['**/node_modules/**', '**/.git/**'], + windowsPathsNoEscape: true, }), ]), ); diff --git a/extractors/cds/tools/dist/cds-extractor.bundle.js b/extractors/cds/tools/dist/cds-extractor.bundle.js index 9514f9639..2d7c9e62d 100644 --- a/extractors/cds/tools/dist/cds-extractor.bundle.js +++ b/extractors/cds/tools/dist/cds-extractor.bundle.js @@ -3581,8 +3581,8 @@ function testCdsCommand(validatedCommand, sourceRoot2, silent = false) { } // src/cds/compiler/compile.ts -var import_child_process3 = require("child_process"); -var import_path4 = require("path"); +var import_child_process4 = require("child_process"); +var import_path6 = require("path"); // src/cds/compiler/version.ts var import_child_process2 = require("child_process"); @@ -3618,6123 +3618,6358 @@ function getCdsVersion(cdsCommand, cacheDir) { } } -// src/cds/compiler/compile.ts -function parseCommandForSpawn(commandString) { - const parts = commandString.trim().split(/\s+/); - const executable = parts[0]; - const baseArgs = parts.slice(1); - return { executable, baseArgs }; +// src/environment.ts +var import_child_process3 = require("child_process"); +var import_fs5 = require("fs"); +var import_os = require("os"); +var import_path5 = require("path"); + +// src/paths-ignore.ts +var import_fs4 = require("fs"); +var import_path4 = require("path"); + +// node_modules/js-yaml/dist/js-yaml.mjs +function isNothing(subject) { + return typeof subject === "undefined" || subject === null; } -function determineCompilationTargets(project, sourceRoot2) { - const projectAbsolutePath = (0, import_path4.join)(sourceRoot2, project.projectDir); - const rootCdsFiles = project.cdsFiles.filter((file) => (0, import_path4.dirname)((0, import_path4.join)(sourceRoot2, file)) === projectAbsolutePath).map((file) => (0, import_path4.basename)(file)); - if (rootCdsFiles.includes("index.cds")) { - return ["index.cds"]; - } - const capDirectories = ["db", "srv", "app"]; - const existingCapDirs = capDirectories.filter((dir) => dirExists((0, import_path4.join)(projectAbsolutePath, dir))); - if (existingCapDirs.length > 0) { - return existingCapDirs; - } - if (rootCdsFiles.length > 0) { - return rootCdsFiles; - } - return project.cdsFiles.map((file) => (0, import_path4.relative)(projectAbsolutePath, (0, import_path4.join)(sourceRoot2, file))); +function isObject(subject) { + return typeof subject === "object" && subject !== null; } -function compileCdsToJson(cdsFilePath, sourceRoot2, cdsCommand, cacheDir, projectMap, projectDir) { - try { - const resolvedCdsFilePath = (0, import_path4.resolve)(cdsFilePath); - if (!fileExists(resolvedCdsFilePath)) { - throw new Error(`Expected CDS file '${resolvedCdsFilePath}' does not exist.`); - } - const cdsVersion = getCdsVersion(cdsCommand, cacheDir); - const versionInfo = cdsVersion ? `with CDS v${cdsVersion}` : ""; - const projectBaseDir = (0, import_path4.join)(sourceRoot2, projectDir); - const spawnOptions = createSpawnOptions(projectBaseDir, cdsCommand, cacheDir); - if (!projectMap || !projectDir || !projectMap.has(projectDir)) { - throw new Error( - `Project directory '${projectDir}' not found in projectMap. Ensure the project is properly initialized.` - ); +function toArray(sequence) { + if (Array.isArray(sequence)) return sequence; + else if (isNothing(sequence)) return []; + return [sequence]; +} +function extend(target, source) { + var index, length, key, sourceKeys; + if (source) { + sourceKeys = Object.keys(source); + for (index = 0, length = sourceKeys.length; index < length; index += 1) { + key = sourceKeys[index]; + target[key] = source[key]; } - const project = projectMap.get(projectDir); - return compileProject(sourceRoot2, projectDir, cdsCommand, spawnOptions, versionInfo, project); - } catch (error) { - return { success: false, message: String(error) }; } + return target; } -function compileProject(sourceRoot2, projectDir, cdsCommand, spawnOptions, versionInfo, project) { - cdsExtractorLog("info", `Compiling CDS project '${projectDir}' using ${versionInfo}...`); - const compilationTargets = determineCompilationTargets(project, sourceRoot2); - if (compilationTargets.length === 0) { - throw new Error( - `Project directory '${projectDir}' does not contain any CDS files and cannot be compiled` - ); +function repeat(string, count) { + var result = "", cycle; + for (cycle = 0; cycle < count; cycle += 1) { + result += string; } - const projectJsonOutPath = (0, import_path4.join)(sourceRoot2, projectDir, modelCdsJsonFile); - const compileArgs = [ - "compile", - ...compilationTargets, - "--to", - "json", - "--dest", - modelCdsJsonFile, - "--locations", - "--log-level", - "warn" - ]; - cdsExtractorLog("info", `Compiling CDS project targets: ${compilationTargets.join(", ")}`); - cdsExtractorLog( - "info", - `Running compilation task for CDS project '${projectDir}': command='${cdsCommand}' args='${JSON.stringify(compileArgs)}'` - ); - const { executable, baseArgs } = parseCommandForSpawn(cdsCommand); - const allArgs = [...baseArgs, ...compileArgs]; - const result = (0, import_child_process3.spawnSync)(executable, allArgs, spawnOptions); - if (result.error) { - cdsExtractorLog("error", `SpawnSync error: ${result.error.message}`); - throw new Error(`Error executing CDS compiler: ${result.error.message}`); + return result; +} +function isNegativeZero(number) { + return number === 0 && Number.NEGATIVE_INFINITY === 1 / number; +} +var isNothing_1 = isNothing; +var isObject_1 = isObject; +var toArray_1 = toArray; +var repeat_1 = repeat; +var isNegativeZero_1 = isNegativeZero; +var extend_1 = extend; +var common = { + isNothing: isNothing_1, + isObject: isObject_1, + toArray: toArray_1, + repeat: repeat_1, + isNegativeZero: isNegativeZero_1, + extend: extend_1 +}; +function formatError(exception2, compact) { + var where = "", message = exception2.reason || "(unknown reason)"; + if (!exception2.mark) return message; + if (exception2.mark.name) { + where += 'in "' + exception2.mark.name + '" '; } - if (result.stderr && result.stderr.length > 0) { - cdsExtractorLog("warn", `CDS stderr output: ${result.stderr.toString()}`); + where += "(" + (exception2.mark.line + 1) + ":" + (exception2.mark.column + 1) + ")"; + if (!compact && exception2.mark.snippet) { + where += "\n\n" + exception2.mark.snippet; } - if (result.status !== 0) { - cdsExtractorLog("error", `CDS command failed with status ${result.status}`); - cdsExtractorLog( - "error", - `Command: ${cdsCommand} ${compileArgs.map((arg) => arg.includes(" ") ? `"${arg}"` : arg).join(" ")}` - ); - cdsExtractorLog("error", `Stdout: ${result.stdout?.toString() || "No stdout"}`); - cdsExtractorLog("error", `Stderr: ${result.stderr?.toString() || "No stderr"}`); - throw new Error( - `Could not compile the CAP project ${projectDir}. -Reported error(s): -\`\`\` -${result.stderr?.toString() || "Unknown error"} -\`\`\`` - ); + return message + " " + where; +} +function YAMLException$1(reason, mark) { + Error.call(this); + this.name = "YAMLException"; + this.reason = reason; + this.mark = mark; + this.message = formatError(this, false); + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } else { + this.stack = new Error().stack || ""; } - if (!fileExists(projectJsonOutPath) && !dirExists(projectJsonOutPath)) { - throw new Error( - `CAP project '${projectDir}' was not compiled to JSON. This is likely because the project structure is invalid.` - ); +} +YAMLException$1.prototype = Object.create(Error.prototype); +YAMLException$1.prototype.constructor = YAMLException$1; +YAMLException$1.prototype.toString = function toString(compact) { + return this.name + ": " + formatError(this, compact); +}; +var exception = YAMLException$1; +function getLine(buffer, lineStart, lineEnd, position, maxLineLength) { + var head = ""; + var tail = ""; + var maxHalfLength = Math.floor(maxLineLength / 2) - 1; + if (position - lineStart > maxHalfLength) { + head = " ... "; + lineStart = position - maxHalfLength + head.length; } - if (dirExists(projectJsonOutPath)) { - cdsExtractorLog( - "info", - `CDS compiler generated JSON to output directory: ${projectJsonOutPath}` - ); - recursivelyRenameJsonFiles(projectJsonOutPath); - } else { - cdsExtractorLog("info", `CDS compiler generated JSON to file: ${projectJsonOutPath}`); + if (lineEnd - position > maxHalfLength) { + tail = " ..."; + lineEnd = position + maxHalfLength - tail.length; } - normalizeLocationPathsInFile(projectJsonOutPath); return { - success: true, - outputPath: projectJsonOutPath, - compiledAsProject: true, - message: "Project was compiled using project-aware compilation" + str: head + buffer.slice(lineStart, lineEnd).replace(/\t/g, "\u2192") + tail, + pos: position - lineStart + head.length + // relative position }; } -function createSpawnOptions(projectBaseDir, cdsCommand, cacheDir) { - const spawnOptions = { - cwd: projectBaseDir, - // CRITICAL: Always use project base directory as cwd to ensure correct path generation - shell: false, - // Use shell=false to ensure proper argument handling for paths with spaces - stdio: "pipe", - env: { ...process.env } - }; - const binPathNative = `node_modules${import_path4.sep}.bin${import_path4.sep}`; - const binPathPosix = "node_modules/.bin/"; - const isDirectBinary = cdsCommand.includes(binPathNative) || cdsCommand.includes(binPathPosix); - if (cacheDir && !isDirectBinary) { - const nodePath = (0, import_path4.join)(cacheDir, "node_modules"); - spawnOptions.env = { - ...process.env, - NODE_PATH: `${nodePath}${import_path4.delimiter}${process.env.NODE_PATH ?? ""}`, - PATH: `${(0, import_path4.join)(nodePath, ".bin")}${import_path4.delimiter}${process.env.PATH}`, - // Add NPM configuration to ensure dependencies are resolved from the cache directory - npm_config_prefix: cacheDir, - // Ensure we don't pick up global CDS installations that might conflict - npm_config_global: "false", - // Clear any existing CDS environment variables that might interfere - CDS_HOME: cacheDir - }; - } else if (isDirectBinary) { - const cleanEnv = { ...process.env }; - delete cleanEnv.NODE_PATH; - delete cleanEnv.npm_config_prefix; - delete cleanEnv.npm_config_global; - delete cleanEnv.CDS_HOME; - spawnOptions.env = cleanEnv; - } - return spawnOptions; +function padStart(string, max) { + return common.repeat(" ", max - string.length) + string; } - -// src/cds/compiler/validator.ts -var import_fs4 = require("fs"); -var import_path5 = require("path"); -function identifyTasksRequiringRetry(dependencyGraph2) { - const tasksRequiringRetry = /* @__PURE__ */ new Map(); - for (const [projectDir, project] of dependencyGraph2.projects.entries()) { - const failedTasks = []; - for (const task of project.compilationTasks) { - if (task.retryInfo?.hasBeenRetried) { - continue; - } - const validationResult2 = validateTaskOutputs(task, dependencyGraph2.sourceRootDir); - if (!validationResult2.isValid) { - failedTasks.push(task); - cdsExtractorLog( - "info", - `Task ${task.id} requires retry: ${validationResult2.validFileCount}/${validationResult2.expectedFileCount} output files valid (status: ${task.status})` - ); - if (task.status === "success") { - cdsExtractorLog( - "warn", - `Task ${task.id} was marked as successful but output files are missing or invalid - updating status to failed` - ); - task.status = "failed"; - } - } - } - if (failedTasks.length > 0) { - tasksRequiringRetry.set(projectDir, failedTasks); - } - } - if (tasksRequiringRetry.size > 0) { - const totalFailedTasks = Array.from(tasksRequiringRetry.values()).reduce( - (sum, tasks) => sum + tasks.length, - 0 +function makeSnippet(mark, options) { + options = Object.create(options || null); + if (!mark.buffer) return null; + if (!options.maxLength) options.maxLength = 79; + if (typeof options.indent !== "number") options.indent = 1; + if (typeof options.linesBefore !== "number") options.linesBefore = 3; + if (typeof options.linesAfter !== "number") options.linesAfter = 2; + var re2 = /\r?\n|\r|\0/g; + var lineStarts = [0]; + var lineEnds = []; + var match2; + var foundLineNo = -1; + while (match2 = re2.exec(mark.buffer)) { + lineEnds.push(match2.index); + lineStarts.push(match2.index + match2[0].length); + if (mark.position <= match2.index && foundLineNo < 0) { + foundLineNo = lineStarts.length - 2; + } + } + if (foundLineNo < 0) foundLineNo = lineStarts.length - 1; + var result = "", i, line; + var lineNoLength = Math.min(mark.line + options.linesAfter, lineEnds.length).toString().length; + var maxLineLength = options.maxLength - (options.indent + lineNoLength + 3); + for (i = 1; i <= options.linesBefore; i++) { + if (foundLineNo - i < 0) break; + line = getLine( + mark.buffer, + lineStarts[foundLineNo - i], + lineEnds[foundLineNo - i], + mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo - i]), + maxLineLength ); - cdsExtractorLog( - "info", - `Identified ${totalFailedTasks} task(s) requiring retry across ${tasksRequiringRetry.size} project(s)` + result = common.repeat(" ", options.indent) + padStart((mark.line - i + 1).toString(), lineNoLength) + " | " + line.str + "\n" + result; + } + line = getLine(mark.buffer, lineStarts[foundLineNo], lineEnds[foundLineNo], mark.position, maxLineLength); + result += common.repeat(" ", options.indent) + padStart((mark.line + 1).toString(), lineNoLength) + " | " + line.str + "\n"; + result += common.repeat("-", options.indent + lineNoLength + 3 + line.pos) + "^\n"; + for (i = 1; i <= options.linesAfter; i++) { + if (foundLineNo + i >= lineEnds.length) break; + line = getLine( + mark.buffer, + lineStarts[foundLineNo + i], + lineEnds[foundLineNo + i], + mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo + i]), + maxLineLength ); + result += common.repeat(" ", options.indent) + padStart((mark.line + i + 1).toString(), lineNoLength) + " | " + line.str + "\n"; } - return tasksRequiringRetry; + return result.replace(/\n$/, ""); } -function updateCdsDependencyGraphStatus(dependencyGraph2, sourceRootDir) { - let successfulTasks = 0; - let failedTasks = 0; - let tasksSuccessfullyRetried = 0; - for (const project of dependencyGraph2.projects.values()) { - for (const task of project.compilationTasks) { - const validationResult2 = validateTaskOutputs(task, sourceRootDir); - const isValid = validationResult2.isValid; - if (isValid) { - task.status = "success"; - successfulTasks++; - if (task.retryInfo?.hasBeenRetried) { - tasksSuccessfullyRetried++; - } - } else { - task.status = "failed"; - failedTasks++; - } - } +var snippet = makeSnippet; +var TYPE_CONSTRUCTOR_OPTIONS = [ + "kind", + "multi", + "resolve", + "construct", + "instanceOf", + "predicate", + "represent", + "representName", + "defaultStyle", + "styleAliases" +]; +var YAML_NODE_KINDS = [ + "scalar", + "sequence", + "mapping" +]; +function compileStyleAliases(map2) { + var result = {}; + if (map2 !== null) { + Object.keys(map2).forEach(function(style) { + map2[style].forEach(function(alias) { + result[String(alias)] = style; + }); + }); } - dependencyGraph2.statusSummary.successfulCompilations = successfulTasks; - dependencyGraph2.statusSummary.failedCompilations = failedTasks; - dependencyGraph2.retryStatus.totalTasksSuccessfullyRetried = tasksSuccessfullyRetried; - dependencyGraph2.retryStatus.totalTasksRequiringRetry = failedTasks; - return { - tasksValidated: successfulTasks + failedTasks, - successfulTasks, - failedTasks, - tasksSuccessfullyRetried - }; + return result; } -function validateOutputFile(filePath) { - const result = { - isValid: false, - filePath, - exists: false +function Type$1(tag, options) { + options = options || {}; + Object.keys(options).forEach(function(name) { + if (TYPE_CONSTRUCTOR_OPTIONS.indexOf(name) === -1) { + throw new exception('Unknown option "' + name + '" is met in definition of "' + tag + '" YAML type.'); + } + }); + this.options = options; + this.tag = tag; + this.kind = options["kind"] || null; + this.resolve = options["resolve"] || function() { + return true; }; - if (!fileExists(filePath)) { - result.error = "File does not exist"; - return result; + this.construct = options["construct"] || function(data) { + return data; + }; + this.instanceOf = options["instanceOf"] || null; + this.predicate = options["predicate"] || null; + this.represent = options["represent"] || null; + this.representName = options["representName"] || null; + this.defaultStyle = options["defaultStyle"] || null; + this.multi = options["multi"] || false; + this.styleAliases = compileStyleAliases(options["styleAliases"] || null); + if (YAML_NODE_KINDS.indexOf(this.kind) === -1) { + throw new exception('Unknown kind "' + this.kind + '" is specified for "' + tag + '" YAML type.'); } - result.exists = true; - if (filePath.endsWith(".cds.json") || filePath.endsWith(".json")) { - try { - const content = (0, import_fs4.readFileSync)(filePath, "utf8"); - if (!content.trim()) { - result.error = "File is empty"; - return result; - } - const parsed = JSON.parse(content); - if (typeof parsed !== "object" || parsed === null) { - result.error = "File does not contain a valid JSON object"; - return result; +} +var type = Type$1; +function compileList(schema2, name) { + var result = []; + schema2[name].forEach(function(currentType) { + var newIndex = result.length; + result.forEach(function(previousType, previousIndex) { + if (previousType.tag === currentType.tag && previousType.kind === currentType.kind && previousType.multi === currentType.multi) { + newIndex = previousIndex; } - result.hasValidJson = true; - result.isValid = true; - } catch (error) { - result.error = `Invalid JSON content: ${String(error)}`; - return result; + }); + result[newIndex] = currentType; + }); + return result; +} +function compileMap() { + var result = { + scalar: {}, + sequence: {}, + mapping: {}, + fallback: {}, + multi: { + scalar: [], + sequence: [], + mapping: [], + fallback: [] } - } else { - result.isValid = true; + }, index, length; + function collectType(type2) { + if (type2.multi) { + result.multi[type2.kind].push(type2); + result.multi["fallback"].push(type2); + } else { + result[type2.kind][type2.tag] = result["fallback"][type2.tag] = type2; + } + } + for (index = 0, length = arguments.length; index < length; index += 1) { + arguments[index].forEach(collectType); } return result; } -function validateTaskOutputs(task, sourceRoot2) { - const fileResults = []; - const expectedOutput = task.expectedOutputFile; - const absolutePath = (0, import_path5.isAbsolute)(expectedOutput) ? expectedOutput : (0, import_path5.join)(sourceRoot2, expectedOutput); - const fileResult = validateOutputFile(absolutePath); - fileResults.push(fileResult); - const validFileCount = fileResults.filter((r) => r.isValid).length; - const expectedFileCount = 1; - const isValid = validFileCount === expectedFileCount && expectedFileCount > 0; - return { - isValid, - task, - fileResults, - validFileCount, - expectedFileCount - }; +function Schema$1(definition) { + return this.extend(definition); } - -// src/diagnostics.ts -var import_child_process4 = require("child_process"); -var import_path6 = require("path"); -function convertToRelativePath(filePath, sourceRoot2) { - if (!filePath || typeof filePath !== "string" || !sourceRoot2 || typeof sourceRoot2 !== "string") { - return "."; +Schema$1.prototype.extend = function extend2(definition) { + var implicit = []; + var explicit = []; + if (definition instanceof type) { + explicit.push(definition); + } else if (Array.isArray(definition)) { + explicit = explicit.concat(definition); + } else if (definition && (Array.isArray(definition.implicit) || Array.isArray(definition.explicit))) { + if (definition.implicit) implicit = implicit.concat(definition.implicit); + if (definition.explicit) explicit = explicit.concat(definition.explicit); + } else { + throw new exception("Schema.extend argument should be a Type, [ Type ], or a schema definition ({ implicit: [...], explicit: [...] })"); } - try { - const resolvedSourceRoot = (0, import_path6.resolve)(sourceRoot2); - const resolvedFilePath = (0, import_path6.isAbsolute)(filePath) ? (0, import_path6.resolve)(filePath) : (0, import_path6.resolve)(resolvedSourceRoot, filePath); - if (resolvedFilePath === resolvedSourceRoot) { - return "."; + implicit.forEach(function(type$1) { + if (!(type$1 instanceof type)) { + throw new exception("Specified list of YAML types (or a single Type object) contains a non-Type object."); } - const relativePath = (0, import_path6.relative)(resolvedSourceRoot, resolvedFilePath); - if (relativePath.startsWith("..")) { - return "."; + if (type$1.loadKind && type$1.loadKind !== "scalar") { + throw new exception("There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported."); } - return relativePath; - } catch { - return "."; - } -} -function addDiagnostic(filePath, message, codeqlExePath2, sourceId, sourceName, severity, logPrefix, sourceRoot2) { - const finalFilePath = sourceRoot2 ? convertToRelativePath(filePath, sourceRoot2) : (0, import_path6.resolve)(filePath); - let finalMessage = message; - if (sourceRoot2 && finalFilePath === "." && filePath !== sourceRoot2) { - const resolvedSourceRoot = (0, import_path6.resolve)(sourceRoot2); - const resolvedFilePath = (0, import_path6.isAbsolute)(filePath) ? (0, import_path6.resolve)(filePath) : (0, import_path6.resolve)(resolvedSourceRoot, filePath); - if (resolvedFilePath !== resolvedSourceRoot) { - finalMessage = `${message} - -**Note**: The file \`${filePath}\` is located outside the scanned source directory and cannot be linked directly in this diagnostic. This diagnostic is associated with the repository root instead.`; + if (type$1.multi) { + throw new exception("There is a multi type in the implicit list of a schema. Multi tags can only be listed as explicit."); } + }); + explicit.forEach(function(type$1) { + if (!(type$1 instanceof type)) { + throw new exception("Specified list of YAML types (or a single Type object) contains a non-Type object."); + } + }); + var result = Object.create(Schema$1.prototype); + result.implicit = (this.implicit || []).concat(implicit); + result.explicit = (this.explicit || []).concat(explicit); + result.compiledImplicit = compileList(result, "implicit"); + result.compiledExplicit = compileList(result, "explicit"); + result.compiledTypeMap = compileMap(result.compiledImplicit, result.compiledExplicit); + return result; +}; +var schema = Schema$1; +var str = new type("tag:yaml.org,2002:str", { + kind: "scalar", + construct: function(data) { + return data !== null ? data : ""; } - try { - (0, import_child_process4.execFileSync)(codeqlExePath2, [ - "database", - "add-diagnostic", - "--extractor-name=cds", - "--ready-for-status-page", - `--source-id=${sourceId}`, - `--source-name=${sourceName}`, - `--severity=${severity}`, - `--markdown-message=${finalMessage}`, - `--file-path=${finalFilePath}`, - "--", - `${process.env.CODEQL_EXTRACTOR_CDS_WIP_DATABASE ?? ""}` - ]); - cdsExtractorLog("info", `Added ${severity} diagnostic for ${logPrefix}: ${filePath}`); - return true; - } catch (err) { - cdsExtractorLog( - "error", - `Failed to add ${severity} diagnostic for ${logPrefix}=${filePath} : ${String(err)}` - ); - return false; - } -} -function addCdsIndexerDiagnostic(projectDir, errorMessage, codeqlExePath2, sourceRoot2) { - return addDiagnostic( - projectDir, - errorMessage, - codeqlExePath2, - "cds/indexer-failure", - "Failure running @sap/cds-indexer for a SAP CAP CDS project", - "warning" /* Warning */, - "project directory", - sourceRoot2 - ); +}); +var seq = new type("tag:yaml.org,2002:seq", { + kind: "sequence", + construct: function(data) { + return data !== null ? data : []; + } +}); +var map = new type("tag:yaml.org,2002:map", { + kind: "mapping", + construct: function(data) { + return data !== null ? data : {}; + } +}); +var failsafe = new schema({ + explicit: [ + str, + seq, + map + ] +}); +function resolveYamlNull(data) { + if (data === null) return true; + var max = data.length; + return max === 1 && data === "~" || max === 4 && (data === "null" || data === "Null" || data === "NULL"); } -function addCompilationDiagnostic(cdsFilePath, errorMessage, codeqlExePath2, sourceRoot2) { - return addDiagnostic( - cdsFilePath, - errorMessage, - codeqlExePath2, - "cds/compilation-failure", - "Failure to compile one or more SAP CAP CDS files", - "error" /* Error */, - "source file", - sourceRoot2 - ); +function constructYamlNull() { + return null; } -function addDependencyGraphDiagnostic(sourceRoot2, errorMessage, codeqlExePath2) { - return addDiagnostic( - sourceRoot2, - errorMessage, - codeqlExePath2, - "cds/dependency-graph-failure", - "CDS project dependency graph build failure", - "error" /* Error */, - "source root", - sourceRoot2 - ); +function isNull(object) { + return object === null; } -function addDependencyInstallationDiagnostic(sourceRoot2, errorMessage, codeqlExePath2) { - return addDiagnostic( - sourceRoot2, - errorMessage, - codeqlExePath2, - "cds/dependency-installation-failure", - "CDS dependency installation failure", - "error" /* Error */, - "source root", - sourceRoot2 - ); +var _null = new type("tag:yaml.org,2002:null", { + kind: "scalar", + resolve: resolveYamlNull, + construct: constructYamlNull, + predicate: isNull, + represent: { + canonical: function() { + return "~"; + }, + lowercase: function() { + return "null"; + }, + uppercase: function() { + return "NULL"; + }, + camelcase: function() { + return "Null"; + }, + empty: function() { + return ""; + } + }, + defaultStyle: "lowercase" +}); +function resolveYamlBoolean(data) { + if (data === null) return false; + var max = data.length; + return max === 4 && (data === "true" || data === "True" || data === "TRUE") || max === 5 && (data === "false" || data === "False" || data === "FALSE"); } -function addEnvironmentSetupDiagnostic(sourceRoot2, errorMessage, codeqlExePath2) { - const contextFile = sourceRoot2; - return addDiagnostic( - contextFile, - errorMessage, - codeqlExePath2, - "cds/environment-setup-failure", - "CDS extractor environment setup failure", - "error" /* Error */, - "source root", - sourceRoot2 - ); +function constructYamlBoolean(data) { + return data === "true" || data === "True" || data === "TRUE"; } -function addJavaScriptExtractorDiagnostic(filePath, errorMessage, codeqlExePath2, sourceRoot2) { - return addDiagnostic( - filePath, - errorMessage, - codeqlExePath2, - "cds/js-extractor-failure", - "Failure in JavaScript extractor for SAP CAP CDS files", - "error" /* Error */, - "extraction file", - sourceRoot2 - ); +function isBoolean(object) { + return Object.prototype.toString.call(object) === "[object Boolean]"; } -function addNoCdsProjectsDiagnostic(sourceRoot2, message, codeqlExePath2) { - return addDiagnostic( - sourceRoot2, - message, - codeqlExePath2, - "cds/no-cds-projects", - "No CDS projects detected in source", - "warning" /* Warning */, - "source root", - sourceRoot2 - ); +var bool = new type("tag:yaml.org,2002:bool", { + kind: "scalar", + resolve: resolveYamlBoolean, + construct: constructYamlBoolean, + predicate: isBoolean, + represent: { + lowercase: function(object) { + return object ? "true" : "false"; + }, + uppercase: function(object) { + return object ? "TRUE" : "FALSE"; + }, + camelcase: function(object) { + return object ? "True" : "False"; + } + }, + defaultStyle: "lowercase" +}); +function isHexCode(c) { + return 48 <= c && c <= 57 || 65 <= c && c <= 70 || 97 <= c && c <= 102; } - -// src/packageManager/cacheInstaller.ts -var import_child_process6 = require("child_process"); -var import_crypto = require("crypto"); -var import_fs5 = require("fs"); -var import_path7 = require("path"); - -// src/packageManager/versionResolver.ts -var import_child_process5 = require("child_process"); -var availableVersionsCache = /* @__PURE__ */ new Map(); -var cacheStats = { - hits: 0, - misses: 0, - get hitRate() { - const total = this.hits + this.misses; - return total > 0 ? (this.hits / total * 100).toFixed(1) : "0.0"; - } -}; -function checkVersionCompatibility(cdsVersion, cdsDkVersion) { - if (cdsVersion === "latest" || cdsDkVersion === "latest") { - return { isCompatible: true }; - } - const parsedCds = parseSemanticVersion(cdsVersion); - const parsedCdsDk = parseSemanticVersion(cdsDkVersion); - if (!parsedCds || !parsedCdsDk) { - return { - isCompatible: false, - warning: "Unable to parse version numbers for compatibility check" - }; - } - const majorVersionsMatch = parsedCds.major === parsedCdsDk.major; - const minorVersionsMatch = parsedCds.minor === parsedCdsDk.minor; - if (!majorVersionsMatch) { - return { - isCompatible: false, - warning: `Major version mismatch: @sap/cds ${cdsVersion} and @sap/cds-dk ${cdsDkVersion} may not be compatible` - }; - } - if (!minorVersionsMatch) { - return { - isCompatible: true, - warning: `Minor version difference: @sap/cds ${cdsVersion} and @sap/cds-dk ${cdsDkVersion} - consider aligning versions for best compatibility` - }; - } - return { isCompatible: true }; +function isOctCode(c) { + return 48 <= c && c <= 55; } -function compareVersions(a, b) { - if (a.major !== b.major) return a.major - b.major; - if (a.minor !== b.minor) return a.minor - b.minor; - if (a.patch !== b.patch) return a.patch - b.patch; - if (a.prerelease && !b.prerelease) return -1; - if (!a.prerelease && b.prerelease) return 1; - if (a.prerelease && b.prerelease) { - return a.prerelease.localeCompare(b.prerelease); - } - return 0; +function isDecCode(c) { + return 48 <= c && c <= 57; } -function findBestAvailableVersion(availableVersions, requiredVersion) { - const parsedVersions = availableVersions.map((v2) => parseSemanticVersion(v2)).filter((v2) => v2 !== null); - if (parsedVersions.length === 0) { - return null; - } - const satisfyingVersions = parsedVersions.filter((v2) => satisfiesRange(v2, requiredVersion)); - if (satisfyingVersions.length > 0) { - satisfyingVersions.sort((a, b) => compareVersions(b, a)); - return satisfyingVersions[0].original; +function resolveYamlInteger(data) { + if (data === null) return false; + var max = data.length, index = 0, hasDigits = false, ch; + if (!max) return false; + ch = data[index]; + if (ch === "-" || ch === "+") { + ch = data[++index]; } - parsedVersions.sort((a, b) => compareVersions(b, a)); - return parsedVersions[0].original; -} -function getAvailableVersions(packageName) { - if (availableVersionsCache.has(packageName)) { - cacheStats.hits++; - return availableVersionsCache.get(packageName); + if (ch === "0") { + if (index + 1 === max) return true; + ch = data[++index]; + if (ch === "b") { + index++; + for (; index < max; index++) { + ch = data[index]; + if (ch === "_") continue; + if (ch !== "0" && ch !== "1") return false; + hasDigits = true; + } + return hasDigits && ch !== "_"; + } + if (ch === "x") { + index++; + for (; index < max; index++) { + ch = data[index]; + if (ch === "_") continue; + if (!isHexCode(data.charCodeAt(index))) return false; + hasDigits = true; + } + return hasDigits && ch !== "_"; + } + if (ch === "o") { + index++; + for (; index < max; index++) { + ch = data[index]; + if (ch === "_") continue; + if (!isOctCode(data.charCodeAt(index))) return false; + hasDigits = true; + } + return hasDigits && ch !== "_"; + } } - cacheStats.misses++; - try { - const output = (0, import_child_process5.execSync)(`npm view ${packageName} versions --json`, { - encoding: "utf8", - timeout: 3e4 - // 30 second timeout - }); - const versions = JSON.parse(output); - let versionArray = []; - if (Array.isArray(versions)) { - versionArray = versions.filter((v2) => typeof v2 === "string"); - } else if (typeof versions === "string") { - versionArray = [versions]; + if (ch === "_") return false; + for (; index < max; index++) { + ch = data[index]; + if (ch === "_") continue; + if (!isDecCode(data.charCodeAt(index))) { + return false; } - availableVersionsCache.set(packageName, versionArray); - return versionArray; - } catch (error) { - cdsExtractorLog("warn", `Failed to fetch versions for ${packageName}: ${String(error)}`); - availableVersionsCache.set(packageName, []); - return []; + hasDigits = true; } + if (!hasDigits || ch === "_") return false; + return true; } -function parseSemanticVersion(version) { - if (version === "latest") { - return { - major: 999, - minor: 999, - patch: 999, - original: version - }; - } - const cleanVersion = version.replace(/^[\^~>=<]+/, ""); - const semverRegex = /^(\d+)\.(\d+)\.(\d+)(?:-([a-zA-Z0-9.-]+))?(?:\+([a-zA-Z0-9.-]+))?$/; - const match2 = cleanVersion.match(semverRegex); - if (!match2) { - return null; +function constructYamlInteger(data) { + var value = data, sign = 1, ch; + if (value.indexOf("_") !== -1) { + value = value.replace(/_/g, ""); } - return { - major: parseInt(match2[1], 10), - minor: parseInt(match2[2], 10), - patch: parseInt(match2[3], 10), - prerelease: match2[4], - build: match2[5], - original: version - }; -} -function isSatisfyingVersion(resolvedVersion, requestedVersion) { - if (resolvedVersion === requestedVersion || requestedVersion === "latest") { - return true; + ch = value[0]; + if (ch === "-" || ch === "+") { + if (ch === "-") sign = -1; + value = value.slice(1); + ch = value[0]; } - const parsedResolved = parseSemanticVersion(resolvedVersion); - if (!parsedResolved) { - return false; + if (value === "0") return 0; + if (ch === "0") { + if (value[1] === "b") return sign * parseInt(value.slice(2), 2); + if (value[1] === "x") return sign * parseInt(value.slice(2), 16); + if (value[1] === "o") return sign * parseInt(value.slice(2), 8); } - return satisfiesRange(parsedResolved, requestedVersion); + return sign * parseInt(value, 10); } -function resolveCdsVersions2(cdsVersion, cdsDkVersion) { - const cdsVersions = getAvailableVersions("@sap/cds"); - const cdsDkVersions = getAvailableVersions("@sap/cds-dk"); - const resolvedCdsVersion = findBestAvailableVersion(cdsVersions, cdsVersion); - const resolvedCdsDkVersion = findBestAvailableVersion(cdsDkVersions, cdsDkVersion); - const cdsExactMatch = resolvedCdsVersion === cdsVersion || cdsVersion === "latest" && resolvedCdsVersion !== null; - const cdsDkExactMatch = resolvedCdsDkVersion === cdsDkVersion || cdsDkVersion === "latest" && resolvedCdsDkVersion !== null; - const cdsSatisfiesRange = resolvedCdsVersion ? isSatisfyingVersion(resolvedCdsVersion, cdsVersion) : false; - const cdsDkSatisfiesRange = resolvedCdsDkVersion ? isSatisfyingVersion(resolvedCdsDkVersion, cdsDkVersion) : false; - const isFallback = !cdsSatisfiesRange || !cdsDkSatisfiesRange; - let warning; - if (resolvedCdsVersion && resolvedCdsDkVersion) { - const compatibility = checkVersionCompatibility(resolvedCdsVersion, resolvedCdsDkVersion); - const shouldShowWarning = isFallback || !cdsExactMatch || !cdsDkExactMatch || compatibility.warning && !compatibility.isCompatible; - if (compatibility.warning && shouldShowWarning) { - warning = compatibility.warning; - } - } - return { - resolvedCdsVersion, - resolvedCdsDkVersion, - cdsExactMatch, - cdsDkExactMatch, - warning, - isFallback - }; +function isInteger(object) { + return Object.prototype.toString.call(object) === "[object Number]" && (object % 1 === 0 && !common.isNegativeZero(object)); } -function satisfiesRange(version, range2) { - if (range2 === "latest") { - return true; +var int = new type("tag:yaml.org,2002:int", { + kind: "scalar", + resolve: resolveYamlInteger, + construct: constructYamlInteger, + predicate: isInteger, + represent: { + binary: function(obj) { + return obj >= 0 ? "0b" + obj.toString(2) : "-0b" + obj.toString(2).slice(1); + }, + octal: function(obj) { + return obj >= 0 ? "0o" + obj.toString(8) : "-0o" + obj.toString(8).slice(1); + }, + decimal: function(obj) { + return obj.toString(10); + }, + /* eslint-disable max-len */ + hexadecimal: function(obj) { + return obj >= 0 ? "0x" + obj.toString(16).toUpperCase() : "-0x" + obj.toString(16).toUpperCase().slice(1); + } + }, + defaultStyle: "decimal", + styleAliases: { + binary: [2, "bin"], + octal: [8, "oct"], + decimal: [10, "dec"], + hexadecimal: [16, "hex"] } - const rangeVersion = parseSemanticVersion(range2); - if (!rangeVersion) { +}); +var YAML_FLOAT_PATTERN = new RegExp( + // 2.5e4, 2.5 and integers + "^(?:[-+]?(?:[0-9][0-9_]*)(?:\\.[0-9_]*)?(?:[eE][-+]?[0-9]+)?|\\.[0-9_]+(?:[eE][-+]?[0-9]+)?|[-+]?\\.(?:inf|Inf|INF)|\\.(?:nan|NaN|NAN))$" +); +function resolveYamlFloat(data) { + if (data === null) return false; + if (!YAML_FLOAT_PATTERN.test(data) || // Quick hack to not allow integers end with `_` + // Probably should update regexp & check speed + data[data.length - 1] === "_") { return false; } - if (range2.startsWith("^")) { - return version.major === rangeVersion.major && compareVersions(version, rangeVersion) >= 0; - } else if (range2.startsWith("~")) { - return version.major === rangeVersion.major && version.minor === rangeVersion.minor && compareVersions(version, rangeVersion) >= 0; - } else if (range2.startsWith(">=")) { - return compareVersions(version, rangeVersion) >= 0; - } else if (range2.startsWith(">")) { - return compareVersions(version, rangeVersion) > 0; - } else if (range2.startsWith("<=")) { - return compareVersions(version, rangeVersion) <= 0; - } else if (range2.startsWith("<")) { - return compareVersions(version, rangeVersion) < 0; - } else { - return compareVersions(version, rangeVersion) === 0; - } + return true; } - -// src/packageManager/cacheInstaller.ts -var cacheSubDirName = ".cds-extractor-cache"; -function addDependencyVersionWarning(packageJsonPath, warningMessage, codeqlExePath2) { - try { - (0, import_child_process6.execFileSync)(codeqlExePath2, [ - "database", - "add-diagnostic", - "--extractor-name=cds", - "--ready-for-status-page", - "--source-id=cds/dependency-version-fallback", - "--source-name=Using fallback versions for SAP CAP CDS dependencies", - `--severity=${"warning" /* Warning */}`, - `--markdown-message=${warningMessage}`, - `--file-path=${(0, import_path7.resolve)(packageJsonPath)}`, - "--", - `${process.env.CODEQL_EXTRACTOR_CDS_WIP_DATABASE ?? ""}` - ]); - cdsExtractorLog("info", `Added warning diagnostic for dependency fallback: ${packageJsonPath}`); - return true; - } catch (err) { - cdsExtractorLog( - "error", - `Failed to add warning diagnostic for ${packageJsonPath}: ${String(err)}` - ); - return false; +function constructYamlFloat(data) { + var value, sign; + value = data.replace(/_/g, "").toLowerCase(); + sign = value[0] === "-" ? -1 : 1; + if ("+-".indexOf(value[0]) >= 0) { + value = value.slice(1); } -} -function findNearestNpmrc(startDir) { - let current = (0, import_path7.resolve)(startDir); - while (true) { - const candidate = (0, import_path7.join)(current, ".npmrc"); - if ((0, import_fs5.existsSync)(candidate)) { - return candidate; - } - const parent = (0, import_path7.dirname)(current); - if (parent === current) { - return void 0; - } - current = parent; + if (value === ".inf") { + return sign === 1 ? Number.POSITIVE_INFINITY : Number.NEGATIVE_INFINITY; + } else if (value === ".nan") { + return NaN; } + return sign * parseFloat(value, 10); } -function copyNpmrcToCache(cacheDir, projectDir) { - const npmrcPath = findNearestNpmrc(projectDir); - if (!npmrcPath) { - return; +var SCIENTIFIC_WITHOUT_DOT = /^[-+]?[0-9]+e/; +function representYamlFloat(object, style) { + var res; + if (isNaN(object)) { + switch (style) { + case "lowercase": + return ".nan"; + case "uppercase": + return ".NAN"; + case "camelcase": + return ".NaN"; + } + } else if (Number.POSITIVE_INFINITY === object) { + switch (style) { + case "lowercase": + return ".inf"; + case "uppercase": + return ".INF"; + case "camelcase": + return ".Inf"; + } + } else if (Number.NEGATIVE_INFINITY === object) { + switch (style) { + case "lowercase": + return "-.inf"; + case "uppercase": + return "-.INF"; + case "camelcase": + return "-.Inf"; + } + } else if (common.isNegativeZero(object)) { + return "-0.0"; } - const dest = (0, import_path7.join)(cacheDir, ".npmrc"); - try { - (0, import_fs5.copyFileSync)(npmrcPath, dest); - cdsExtractorLog("info", `Copied .npmrc from '${npmrcPath}' to cache directory '${cacheDir}'`); - } catch (err) { - cdsExtractorLog( - "warn", - `Failed to copy .npmrc to cache directory: ${err instanceof Error ? err.message : String(err)}` - ); + res = object.toString(10); + return SCIENTIFIC_WITHOUT_DOT.test(res) ? res.replace("e", ".e") : res; +} +function isFloat(object) { + return Object.prototype.toString.call(object) === "[object Number]" && (object % 1 !== 0 || common.isNegativeZero(object)); +} +var float = new type("tag:yaml.org,2002:float", { + kind: "scalar", + resolve: resolveYamlFloat, + construct: constructYamlFloat, + predicate: isFloat, + represent: representYamlFloat, + defaultStyle: "lowercase" +}); +var json = failsafe.extend({ + implicit: [ + _null, + bool, + int, + float + ] +}); +var core = json; +var YAML_DATE_REGEXP = new RegExp( + "^([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])$" +); +var YAML_TIMESTAMP_REGEXP = new RegExp( + "^([0-9][0-9][0-9][0-9])-([0-9][0-9]?)-([0-9][0-9]?)(?:[Tt]|[ \\t]+)([0-9][0-9]?):([0-9][0-9]):([0-9][0-9])(?:\\.([0-9]*))?(?:[ \\t]*(Z|([-+])([0-9][0-9]?)(?::([0-9][0-9]))?))?$" +); +function resolveYamlTimestamp(data) { + if (data === null) return false; + if (YAML_DATE_REGEXP.exec(data) !== null) return true; + if (YAML_TIMESTAMP_REGEXP.exec(data) !== null) return true; + return false; +} +function constructYamlTimestamp(data) { + var match2, year, month, day, hour, minute, second, fraction = 0, delta = null, tz_hour, tz_minute, date; + match2 = YAML_DATE_REGEXP.exec(data); + if (match2 === null) match2 = YAML_TIMESTAMP_REGEXP.exec(data); + if (match2 === null) throw new Error("Date resolve error"); + year = +match2[1]; + month = +match2[2] - 1; + day = +match2[3]; + if (!match2[4]) { + return new Date(Date.UTC(year, month, day)); + } + hour = +match2[4]; + minute = +match2[5]; + second = +match2[6]; + if (match2[7]) { + fraction = match2[7].slice(0, 3); + while (fraction.length < 3) { + fraction += "0"; + } + fraction = +fraction; + } + if (match2[9]) { + tz_hour = +match2[10]; + tz_minute = +(match2[11] || 0); + delta = (tz_hour * 60 + tz_minute) * 6e4; + if (match2[9] === "-") delta = -delta; } + date = new Date(Date.UTC(year, month, day, hour, minute, second, fraction)); + if (delta) date.setTime(date.getTime() - delta); + return date; } -function cacheInstallDependencies(dependencyGraph2, sourceRoot2, codeqlExePath2) { - if (dependencyGraph2.projects.size === 0) { - cdsExtractorLog("info", "No CDS projects found for dependency installation."); - cdsExtractorLog( - "info", - "This is expected if the source contains no CAP/CDS projects and should be handled by the caller." - ); - return /* @__PURE__ */ new Map(); +function representYamlTimestamp(object) { + return object.toISOString(); +} +var timestamp = new type("tag:yaml.org,2002:timestamp", { + kind: "scalar", + resolve: resolveYamlTimestamp, + construct: constructYamlTimestamp, + instanceOf: Date, + represent: representYamlTimestamp +}); +function resolveYamlMerge(data) { + return data === "<<" || data === null; +} +var merge = new type("tag:yaml.org,2002:merge", { + kind: "scalar", + resolve: resolveYamlMerge +}); +var BASE64_MAP = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\n\r"; +function resolveYamlBinary(data) { + if (data === null) return false; + var code, idx, bitlen = 0, max = data.length, map2 = BASE64_MAP; + for (idx = 0; idx < max; idx++) { + code = map2.indexOf(data.charAt(idx)); + if (code > 64) continue; + if (code < 0) return false; + bitlen += 6; } - const dependencyCombinations = extractUniqueDependencyCombinations(dependencyGraph2.projects); - if (dependencyCombinations.length === 0) { - cdsExtractorLog( - "error", - "No CDS dependencies found in any project. This means projects were detected but lack proper @sap/cds dependencies." - ); - cdsExtractorLog( - "info", - "Will attempt to use system-installed CDS tools if available, but compilation may fail." - ); - return /* @__PURE__ */ new Map(); + return bitlen % 8 === 0; +} +function constructYamlBinary(data) { + var idx, tailbits, input = data.replace(/[\r\n=]/g, ""), max = input.length, map2 = BASE64_MAP, bits = 0, result = []; + for (idx = 0; idx < max; idx++) { + if (idx % 4 === 0 && idx) { + result.push(bits >> 16 & 255); + result.push(bits >> 8 & 255); + result.push(bits & 255); + } + bits = bits << 6 | map2.indexOf(input.charAt(idx)); } - cdsExtractorLog( - "info", - `Found ${dependencyCombinations.length} unique CDS dependency combination(s).` - ); - for (const combination of dependencyCombinations) { - const { cdsVersion, cdsDkVersion, hash, resolvedCdsVersion, resolvedCdsDkVersion, isFallback } = combination; - const actualCdsVersion = resolvedCdsVersion ?? cdsVersion; - const actualCdsDkVersion = resolvedCdsDkVersion ?? cdsDkVersion; - const fallbackNote = isFallback ? " (using fallback versions)" : ""; - const indexerNote = combination.cdsIndexerVersion ? `, @sap/cds-indexer@${combination.cdsIndexerVersion}` : ""; - cdsExtractorLog( - "info", - `Dependency combination ${hash.substring(0, 8)}: @sap/cds@${actualCdsVersion}, @sap/cds-dk@${actualCdsDkVersion}${indexerNote}${fallbackNote}` - ); + tailbits = max % 4 * 6; + if (tailbits === 0) { + result.push(bits >> 16 & 255); + result.push(bits >> 8 & 255); + result.push(bits & 255); + } else if (tailbits === 18) { + result.push(bits >> 10 & 255); + result.push(bits >> 2 & 255); + } else if (tailbits === 12) { + result.push(bits >> 4 & 255); } - const cacheRootDir = (0, import_path7.join)(sourceRoot2, cacheSubDirName); - cdsExtractorLog( - "info", - `Using cache directory '${cacheSubDirName}' within source root directory '${cacheRootDir}'` - ); - if (!(0, import_fs5.existsSync)(cacheRootDir)) { - try { - (0, import_fs5.mkdirSync)(cacheRootDir, { recursive: true }); - cdsExtractorLog("info", `Created cache directory: ${cacheRootDir}`); - } catch (err) { - cdsExtractorLog( - "warn", - `Failed to create cache directory: ${err instanceof Error ? err.message : String(err)}` - ); - cdsExtractorLog("info", "Skipping dependency installation due to cache directory failure."); - return /* @__PURE__ */ new Map(); + return new Uint8Array(result); +} +function representYamlBinary(object) { + var result = "", bits = 0, idx, tail, max = object.length, map2 = BASE64_MAP; + for (idx = 0; idx < max; idx++) { + if (idx % 3 === 0 && idx) { + result += map2[bits >> 18 & 63]; + result += map2[bits >> 12 & 63]; + result += map2[bits >> 6 & 63]; + result += map2[bits & 63]; } - } else { - cdsExtractorLog("info", `Cache directory already exists: ${cacheRootDir}`); + bits = (bits << 8) + object[idx]; } - const projectCacheDirMap2 = /* @__PURE__ */ new Map(); - let successfulInstallations = 0; - for (const combination of dependencyCombinations) { - const { cdsVersion, cdsDkVersion, hash } = combination; - const { resolvedCdsVersion, resolvedCdsDkVersion } = combination; - const cacheDirName = `cds-${hash}`; - const cacheDir = (0, import_path7.join)(cacheRootDir, cacheDirName); - cdsExtractorLog( - "info", - `Processing dependency combination ${hash.substring(0, 8)} in cache directory: ${cacheDirName}` - ); - if (!(0, import_fs5.existsSync)(cacheDir)) { - try { - (0, import_fs5.mkdirSync)(cacheDir, { recursive: true }); - cdsExtractorLog("info", `Created cache subdirectory: ${cacheDirName}`); - } catch (err) { - cdsExtractorLog( - "error", - `Failed to create cache directory for combination ${hash.substring(0, 8)} (${cacheDirName}): ${err instanceof Error ? err.message : String(err)}` - ); - continue; - } - const actualCdsVersion = resolvedCdsVersion ?? cdsVersion; - const actualCdsDkVersion = resolvedCdsDkVersion ?? cdsDkVersion; - const cacheDeps = { - "@sap/cds": actualCdsVersion, - "@sap/cds-dk": actualCdsDkVersion - }; - if (combination.cdsIndexerVersion) { - cacheDeps["@sap/cds-indexer"] = combination.cdsIndexerVersion; - cdsExtractorLog( - "info", - `Including @sap/cds-indexer@${combination.cdsIndexerVersion} in cache for combination ${hash.substring(0, 8)}` - ); - } - const packageJson = { - name: `cds-extractor-cache-${hash}`, - version: "1.0.0", - private: true, - dependencies: cacheDeps - }; - try { - (0, import_fs5.writeFileSync)((0, import_path7.join)(cacheDir, "package.json"), JSON.stringify(packageJson, null, 2)); - cdsExtractorLog("info", `Created package.json in cache subdirectory: ${cacheDirName}`); - } catch (err) { - cdsExtractorLog( - "error", - `Failed to create package.json in cache directory ${cacheDirName}: ${err instanceof Error ? err.message : String(err)}` - ); - continue; - } - } - const npmrcProjectDir = Array.from(dependencyGraph2.projects.values()).map((project) => project.projectDir).find((projectDir) => projectDir && (0, import_fs5.existsSync)((0, import_path7.join)(sourceRoot2, projectDir, ".npmrc"))); - if (npmrcProjectDir) { - copyNpmrcToCache(cacheDir, (0, import_path7.join)(sourceRoot2, npmrcProjectDir)); - } - const samplePackageJsonPath = Array.from(dependencyGraph2.projects.values()).find( - (project) => project.packageJson - )?.projectDir; - const packageJsonPath = samplePackageJsonPath ? (0, import_path7.join)(sourceRoot2, samplePackageJsonPath, "package.json") : void 0; - const installSuccess = installDependenciesInCache( - cacheDir, - combination, - cacheDirName, - packageJsonPath, - codeqlExePath2 - ); - if (!installSuccess) { - cdsExtractorLog( - "warn", - `Skipping failed dependency combination ${hash.substring(0, 8)} (cache directory: ${cacheDirName})` - ); - continue; - } - successfulInstallations++; - for (const [projectDir, project] of Array.from(dependencyGraph2.projects.entries())) { - if (!project.packageJson) { - continue; - } - const p_cdsVersion = project.packageJson.dependencies?.["@sap/cds"] ?? "latest"; - const p_cdsDkVersion = project.packageJson.devDependencies?.["@sap/cds-dk"] ?? p_cdsVersion; - const p_cdsIndexerVersion = project.packageJson.dependencies?.["@sap/cds-indexer"] ?? project.packageJson.devDependencies?.["@sap/cds-indexer"] ?? void 0; - const projectResolvedVersions = resolveCdsVersions2(p_cdsVersion, p_cdsDkVersion); - const projectActualCdsVersion = projectResolvedVersions.resolvedCdsVersion ?? p_cdsVersion; - const projectActualCdsDkVersion = projectResolvedVersions.resolvedCdsDkVersion ?? p_cdsDkVersion; - const combinationActualCdsVersion = combination.resolvedCdsVersion ?? combination.cdsVersion; - const combinationActualCdsDkVersion = combination.resolvedCdsDkVersion ?? combination.cdsDkVersion; - if (projectActualCdsVersion === combinationActualCdsVersion && projectActualCdsDkVersion === combinationActualCdsDkVersion && p_cdsIndexerVersion === combination.cdsIndexerVersion) { - projectCacheDirMap2.set(projectDir, cacheDir); - } - } - } - if (successfulInstallations === 0) { - cdsExtractorLog("error", "Failed to install any dependency combinations."); - if (dependencyCombinations.length > 0) { - cdsExtractorLog( - "error", - `All ${dependencyCombinations.length} dependency combination(s) failed to install. This will likely cause compilation failures.` - ); - } - } else if (successfulInstallations < dependencyCombinations.length) { - cdsExtractorLog( - "warn", - `Successfully installed ${successfulInstallations} out of ${dependencyCombinations.length} dependency combinations.` - ); - } else { - cdsExtractorLog("info", "All dependency combinations installed successfully."); - } - if (projectCacheDirMap2.size > 0) { - cdsExtractorLog("info", `Project to cache directory mappings:`); - for (const [projectDir, cacheDir] of Array.from(projectCacheDirMap2.entries())) { - const cacheDirName = (0, import_path7.join)(cacheDir).split("/").pop() ?? "unknown"; - cdsExtractorLog("info", ` ${projectDir} \u2192 ${cacheDirName}`); - } - } else { - cdsExtractorLog( - "warn", - "No project to cache directory mappings created. Projects may not have compatible dependencies installed." - ); + tail = max % 3; + if (tail === 0) { + result += map2[bits >> 18 & 63]; + result += map2[bits >> 12 & 63]; + result += map2[bits >> 6 & 63]; + result += map2[bits & 63]; + } else if (tail === 2) { + result += map2[bits >> 10 & 63]; + result += map2[bits >> 4 & 63]; + result += map2[bits << 2 & 63]; + result += map2[64]; + } else if (tail === 1) { + result += map2[bits >> 2 & 63]; + result += map2[bits << 4 & 63]; + result += map2[64]; + result += map2[64]; } - return projectCacheDirMap2; + return result; } -function extractUniqueDependencyCombinations(projects) { - const combinations = /* @__PURE__ */ new Map(); - for (const project of Array.from(projects.values())) { - if (!project.packageJson) { - continue; - } - const cdsVersion = project.packageJson.dependencies?.["@sap/cds"] ?? "latest"; - const cdsDkVersion = project.packageJson.devDependencies?.["@sap/cds-dk"] ?? cdsVersion; - const cdsIndexerVersion = project.packageJson.dependencies?.["@sap/cds-indexer"] ?? project.packageJson.devDependencies?.["@sap/cds-indexer"] ?? void 0; - cdsExtractorLog( - "info", - `Resolving available dependency versions for project '${project.projectDir}' with dependencies: [@sap/cds@${cdsVersion}, @sap/cds-dk@${cdsDkVersion}]` - ); - const resolvedVersions = resolveCdsVersions2(cdsVersion, cdsDkVersion); - const { resolvedCdsVersion, resolvedCdsDkVersion, ...rest } = resolvedVersions; - if (resolvedCdsVersion && resolvedCdsDkVersion) { - let statusMsg; - if (resolvedVersions.cdsExactMatch && resolvedVersions.cdsDkExactMatch) { - statusMsg = " (exact match)"; - } else if (!resolvedVersions.isFallback) { - statusMsg = " (compatible versions)"; - } else { - statusMsg = " (using fallback versions)"; +function isBinary(obj) { + return Object.prototype.toString.call(obj) === "[object Uint8Array]"; +} +var binary = new type("tag:yaml.org,2002:binary", { + kind: "scalar", + resolve: resolveYamlBinary, + construct: constructYamlBinary, + predicate: isBinary, + represent: representYamlBinary +}); +var _hasOwnProperty$3 = Object.prototype.hasOwnProperty; +var _toString$2 = Object.prototype.toString; +function resolveYamlOmap(data) { + if (data === null) return true; + var objectKeys = [], index, length, pair, pairKey, pairHasKey, object = data; + for (index = 0, length = object.length; index < length; index += 1) { + pair = object[index]; + pairHasKey = false; + if (_toString$2.call(pair) !== "[object Object]") return false; + for (pairKey in pair) { + if (_hasOwnProperty$3.call(pair, pairKey)) { + if (!pairHasKey) pairHasKey = true; + else return false; } - cdsExtractorLog( - "info", - `Resolved to: @sap/cds@${resolvedCdsVersion}, @sap/cds-dk@${resolvedCdsDkVersion}${statusMsg}` - ); - } else { - cdsExtractorLog( - "error", - `Failed to resolve CDS dependencies: @sap/cds@${cdsVersion}, @sap/cds-dk@${cdsDkVersion}` - ); - } - const actualCdsVersion = resolvedCdsVersion ?? cdsVersion; - const actualCdsDkVersion = resolvedCdsDkVersion ?? cdsDkVersion; - const hashInput = cdsIndexerVersion ? `${actualCdsVersion}|${actualCdsDkVersion}|${cdsIndexerVersion}` : `${actualCdsVersion}|${actualCdsDkVersion}`; - const hash = (0, import_crypto.createHash)("sha256").update(hashInput).digest("hex"); - if (!combinations.has(hash)) { - combinations.set(hash, { - cdsVersion, - cdsDkVersion, - cdsIndexerVersion, - hash, - resolvedCdsVersion: resolvedCdsVersion ?? void 0, - resolvedCdsDkVersion: resolvedCdsDkVersion ?? void 0, - ...rest - }); } + if (!pairHasKey) return false; + if (objectKeys.indexOf(pairKey) === -1) objectKeys.push(pairKey); + else return false; } - return Array.from(combinations.values()); + return true; } -function installDependenciesInCache(cacheDir, combination, cacheDirName, packageJsonPath, codeqlExePath2) { - const { resolvedCdsVersion, resolvedCdsDkVersion, isFallback, warning } = combination; - const nodeModulesExists = (0, import_fs5.existsSync)((0, import_path7.join)(cacheDir, "node_modules", "@sap", "cds")) && (0, import_fs5.existsSync)((0, import_path7.join)(cacheDir, "node_modules", "@sap", "cds-dk")); - if (nodeModulesExists) { - cdsExtractorLog( - "info", - `Using cached dependencies for @sap/cds@${resolvedCdsVersion} and @sap/cds-dk@${resolvedCdsDkVersion} from ${cacheDirName}` - ); - if (isFallback && warning && packageJsonPath && codeqlExePath2) { - addDependencyVersionWarning(packageJsonPath, warning, codeqlExePath2); - } - return true; - } - if (!resolvedCdsVersion || !resolvedCdsDkVersion) { - cdsExtractorLog("error", "Cannot install dependencies: no compatible versions found"); - return false; +function constructYamlOmap(data) { + return data !== null ? data : []; +} +var omap = new type("tag:yaml.org,2002:omap", { + kind: "sequence", + resolve: resolveYamlOmap, + construct: constructYamlOmap +}); +var _toString$1 = Object.prototype.toString; +function resolveYamlPairs(data) { + if (data === null) return true; + var index, length, pair, keys, result, object = data; + result = new Array(object.length); + for (index = 0, length = object.length; index < length; index += 1) { + pair = object[index]; + if (_toString$1.call(pair) !== "[object Object]") return false; + keys = Object.keys(pair); + if (keys.length !== 1) return false; + result[index] = [keys[0], pair[keys[0]]]; } - cdsExtractorLog( - "info", - `Installing @sap/cds@${resolvedCdsVersion} and @sap/cds-dk@${resolvedCdsDkVersion} in cache directory: ${cacheDirName}` - ); - if (isFallback && warning) { - cdsExtractorLog("warn", warning); + return true; +} +function constructYamlPairs(data) { + if (data === null) return []; + var index, length, pair, keys, result, object = data; + result = new Array(object.length); + for (index = 0, length = object.length; index < length; index += 1) { + pair = object[index]; + keys = Object.keys(pair); + result[index] = [keys[0], pair[keys[0]]]; } - try { - (0, import_child_process6.execFileSync)("npm", ["install", "--quiet", "--no-audit", "--no-fund"], { - cwd: cacheDir, - stdio: "inherit" - }); - if (isFallback && warning && packageJsonPath && codeqlExePath2) { - addDependencyVersionWarning(packageJsonPath, warning, codeqlExePath2); + return result; +} +var pairs = new type("tag:yaml.org,2002:pairs", { + kind: "sequence", + resolve: resolveYamlPairs, + construct: constructYamlPairs +}); +var _hasOwnProperty$2 = Object.prototype.hasOwnProperty; +function resolveYamlSet(data) { + if (data === null) return true; + var key, object = data; + for (key in object) { + if (_hasOwnProperty$2.call(object, key)) { + if (object[key] !== null) return false; } - return true; - } catch (err) { - const errorMessage = `Failed to install resolved dependencies in cache directory ${cacheDir}: ${err instanceof Error ? err.message : String(err)}`; - cdsExtractorLog("error", errorMessage); - return false; } + return true; } - -// src/packageManager/projectInstaller.ts -var import_child_process7 = require("child_process"); -var import_path8 = require("path"); -function needsFullDependencyInstallation(project) { - if (project.retryStatus?.fullDependenciesInstalled) { - return false; - } - const hasFailedTasks = project.compilationTasks.some( - (task) => task.status === "failed" && !task.retryInfo?.hasBeenRetried - ); - return hasFailedTasks && project.packageJson !== void 0; +function constructYamlSet(data) { + return data !== null ? data : {}; } -function projectInstallDependencies(project, sourceRoot2) { - const startTime = Date.now(); - const projectPath = (0, import_path8.join)(sourceRoot2, project.projectDir); - const result = { - success: false, - projectDir: projectPath, - warnings: [], - durationMs: 0, - timedOut: false +var set = new type("tag:yaml.org,2002:set", { + kind: "mapping", + resolve: resolveYamlSet, + construct: constructYamlSet +}); +var _default = core.extend({ + implicit: [ + timestamp, + merge + ], + explicit: [ + binary, + omap, + pairs, + set + ] +}); +var _hasOwnProperty$1 = Object.prototype.hasOwnProperty; +var CONTEXT_FLOW_IN = 1; +var CONTEXT_FLOW_OUT = 2; +var CONTEXT_BLOCK_IN = 3; +var CONTEXT_BLOCK_OUT = 4; +var CHOMPING_CLIP = 1; +var CHOMPING_STRIP = 2; +var CHOMPING_KEEP = 3; +var PATTERN_NON_PRINTABLE = /[\x00-\x08\x0B\x0C\x0E-\x1F\x7F-\x84\x86-\x9F\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/; +var PATTERN_NON_ASCII_LINE_BREAKS = /[\x85\u2028\u2029]/; +var PATTERN_FLOW_INDICATORS = /[,\[\]\{\}]/; +var PATTERN_TAG_HANDLE = /^(?:!|!!|![a-z\-]+!)$/i; +var PATTERN_TAG_URI = /^(?:!|[^,\[\]\{\}])(?:%[0-9a-f]{2}|[0-9a-z\-#;\/\?:@&=\+\$,_\.!~\*'\(\)\[\]])*$/i; +function _class(obj) { + return Object.prototype.toString.call(obj); +} +function is_EOL(c) { + return c === 10 || c === 13; +} +function is_WHITE_SPACE(c) { + return c === 9 || c === 32; +} +function is_WS_OR_EOL(c) { + return c === 9 || c === 32 || c === 10 || c === 13; +} +function is_FLOW_INDICATOR(c) { + return c === 44 || c === 91 || c === 93 || c === 123 || c === 125; +} +function fromHexCode(c) { + var lc; + if (48 <= c && c <= 57) { + return c - 48; + } + lc = c | 32; + if (97 <= lc && lc <= 102) { + return lc - 97 + 10; + } + return -1; +} +function escapedHexLen(c) { + if (c === 120) { + return 2; + } + if (c === 117) { + return 4; + } + if (c === 85) { + return 8; + } + return 0; +} +function fromDecimalCode(c) { + if (48 <= c && c <= 57) { + return c - 48; + } + return -1; +} +function simpleEscapeSequence(c) { + return c === 48 ? "\0" : c === 97 ? "\x07" : c === 98 ? "\b" : c === 116 ? " " : c === 9 ? " " : c === 110 ? "\n" : c === 118 ? "\v" : c === 102 ? "\f" : c === 114 ? "\r" : c === 101 ? "\x1B" : c === 32 ? " " : c === 34 ? '"' : c === 47 ? "/" : c === 92 ? "\\" : c === 78 ? "\x85" : c === 95 ? "\xA0" : c === 76 ? "\u2028" : c === 80 ? "\u2029" : ""; +} +function charFromCodepoint(c) { + if (c <= 65535) { + return String.fromCharCode(c); + } + return String.fromCharCode( + (c - 65536 >> 10) + 55296, + (c - 65536 & 1023) + 56320 + ); +} +function setProperty(object, key, value) { + if (key === "__proto__") { + Object.defineProperty(object, key, { + configurable: true, + enumerable: true, + writable: true, + value + }); + } else { + object[key] = value; + } +} +var simpleEscapeCheck = new Array(256); +var simpleEscapeMap = new Array(256); +for (i = 0; i < 256; i++) { + simpleEscapeCheck[i] = simpleEscapeSequence(i) ? 1 : 0; + simpleEscapeMap[i] = simpleEscapeSequence(i); +} +var i; +function State$1(input, options) { + this.input = input; + this.filename = options["filename"] || null; + this.schema = options["schema"] || _default; + this.onWarning = options["onWarning"] || null; + this.legacy = options["legacy"] || false; + this.json = options["json"] || false; + this.listener = options["listener"] || null; + this.implicitTypes = this.schema.compiledImplicit; + this.typeMap = this.schema.compiledTypeMap; + this.length = input.length; + this.position = 0; + this.line = 0; + this.lineStart = 0; + this.lineIndent = 0; + this.firstTabInLine = -1; + this.documents = []; +} +function generateError(state, message) { + var mark = { + name: state.filename, + buffer: state.input.slice(0, -1), + // omit trailing \0 + position: state.position, + line: state.line, + column: state.position - state.lineStart }; - try { - if (!project.packageJson) { - result.error = "No package.json found for project"; - return result; + mark.snippet = snippet(mark); + return new exception(message, mark); +} +function throwError(state, message) { + throw generateError(state, message); +} +function throwWarning(state, message) { + if (state.onWarning) { + state.onWarning.call(null, generateError(state, message)); + } +} +var directiveHandlers = { + YAML: function handleYamlDirective(state, name, args) { + var match2, major, minor; + if (state.version !== null) { + throwError(state, "duplication of %YAML directive"); + } + if (args.length !== 1) { + throwError(state, "YAML directive accepts exactly one argument"); + } + match2 = /^([0-9]+)\.([0-9]+)$/.exec(args[0]); + if (match2 === null) { + throwError(state, "ill-formed argument of the YAML directive"); + } + major = parseInt(match2[1], 10); + minor = parseInt(match2[2], 10); + if (major !== 1) { + throwError(state, "unacceptable YAML version of the document"); + } + state.version = args[0]; + state.checkLineBreaks = minor < 2; + if (minor !== 1 && minor !== 2) { + throwWarning(state, "unsupported YAML version of the document"); + } + }, + TAG: function handleTagDirective(state, name, args) { + var handle, prefix; + if (args.length !== 2) { + throwError(state, "TAG directive accepts exactly two arguments"); + } + handle = args[0]; + prefix = args[1]; + if (!PATTERN_TAG_HANDLE.test(handle)) { + throwError(state, "ill-formed tag handle (first argument) of the TAG directive"); + } + if (_hasOwnProperty$1.call(state.tagMap, handle)) { + throwError(state, 'there is a previously declared suffix for "' + handle + '" tag handle'); + } + if (!PATTERN_TAG_URI.test(prefix)) { + throwError(state, "ill-formed tag prefix (second argument) of the TAG directive"); } - cdsExtractorLog( - "info", - `Installing full dependencies for project ${project.projectDir} in project's node_modules` - ); try { - (0, import_child_process7.execFileSync)("npm", ["install", "--quiet", "--no-audit", "--no-fund"], { - cwd: projectPath, - stdio: "inherit", - timeout: 12e4 - // 2-minute timeout - }); - result.success = true; - cdsExtractorLog( - "info", - `Successfully installed full dependencies for project ${project.projectDir}` - ); - } catch (execError) { - if (execError instanceof Error && "signal" in execError && execError.signal === "SIGTERM") { - result.timedOut = true; - result.error = "Dependency installation timed out"; - } else { - result.error = `npm install failed: ${String(execError)}`; - } - result.warnings.push( - `Dependency installation failed but will still attempt retry compilation: ${result.error}` - ); - cdsExtractorLog("warn", result.warnings[0]); + prefix = decodeURIComponent(prefix); + } catch (err) { + throwError(state, "tag prefix is malformed: " + prefix); } - } catch (error) { - result.error = `Failed to install full dependencies: ${String(error)}`; - cdsExtractorLog("error", result.error); - } finally { - result.durationMs = Date.now() - startTime; + state.tagMap[handle] = prefix; } - return result; -} - -// src/cds/compiler/retry.ts -function addCompilationDiagnosticsForFailedTasks(dependencyGraph2, codeqlExePath2, sourceRoot2) { - for (const project of dependencyGraph2.projects.values()) { - for (const task of project.compilationTasks) { - if (task.status === "failed") { - const shouldAddDiagnostic = task.retryInfo?.hasBeenRetried ?? !task.retryInfo; - if (shouldAddDiagnostic) { - for (const sourceFile of task.sourceFiles) { - addCompilationDiagnostic( - sourceFile, - task.errorSummary ?? "Compilation failed", - codeqlExePath2, - sourceRoot2 - ); - } +}; +function captureSegment(state, start, end, checkJson) { + var _position, _length, _character, _result; + if (start < end) { + _result = state.input.slice(start, end); + if (checkJson) { + for (_position = 0, _length = _result.length; _position < _length; _position += 1) { + _character = _result.charCodeAt(_position); + if (!(_character === 9 || 32 <= _character && _character <= 1114111)) { + throwError(state, "expected valid JSON character"); } } + } else if (PATTERN_NON_PRINTABLE.test(_result)) { + throwError(state, "the stream contains non-printable characters"); } + state.result += _result; } } -function orchestrateRetryAttempts(dependencyGraph2, codeqlExePath2) { - const startTime = Date.now(); - let dependencyInstallationStartTime = 0; - let dependencyInstallationEndTime = 0; - let retryCompilationStartTime = 0; - let retryCompilationEndTime = 0; - const result = { - success: true, - projectsWithRetries: [], - totalTasksRequiringRetry: 0, - totalSuccessfulRetries: 0, - totalFailedRetries: 0, - projectsWithSuccessfulDependencyInstallation: [], - projectsWithFailedDependencyInstallation: [], - retryDurationMs: 0, - dependencyInstallationDurationMs: 0, - retryCompilationDurationMs: 0 - }; - try { - cdsExtractorLog("info", "Identifying tasks requiring retry..."); - const tasksRequiringRetry = identifyTasksRequiringRetry(dependencyGraph2); - if (tasksRequiringRetry.size === 0) { - cdsExtractorLog("info", "No tasks require retry - all compilations successful"); - return result; +function mergeMappings(state, destination, source, overridableKeys) { + var sourceKeys, key, index, quantity; + if (!common.isObject(source)) { + throwError(state, "cannot merge mappings; the provided source object is unacceptable"); + } + sourceKeys = Object.keys(source); + for (index = 0, quantity = sourceKeys.length; index < quantity; index += 1) { + key = sourceKeys[index]; + if (!_hasOwnProperty$1.call(destination, key)) { + setProperty(destination, key, source[key]); + overridableKeys[key] = true; } - result.totalTasksRequiringRetry = Array.from(tasksRequiringRetry.values()).reduce( - (sum, tasks) => sum + tasks.length, - 0 - ); - dependencyGraph2.retryStatus.totalTasksRequiringRetry = result.totalTasksRequiringRetry; - dependencyInstallationStartTime = Date.now(); - for (const [projectDir, failedTasks] of tasksRequiringRetry) { - const project = dependencyGraph2.projects.get(projectDir); - if (!project) { - continue; + } +} +function storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, startLine, startLineStart, startPos) { + var index, quantity; + if (Array.isArray(keyNode)) { + keyNode = Array.prototype.slice.call(keyNode); + for (index = 0, quantity = keyNode.length; index < quantity; index += 1) { + if (Array.isArray(keyNode[index])) { + throwError(state, "nested arrays are not supported inside keys"); } - if (needsFullDependencyInstallation(project)) { - try { - const installResult = projectInstallDependencies(project, dependencyGraph2.sourceRootDir); - project.retryStatus ??= { - fullDependenciesInstalled: false, - tasksRequiringRetry: failedTasks.length, - tasksRetried: 0, - installationErrors: [] - }; - if (installResult.success) { - project.retryStatus.fullDependenciesInstalled = true; - result.projectsWithSuccessfulDependencyInstallation.push(projectDir); - dependencyGraph2.retryStatus.projectsWithFullDependencies.add(projectDir); - } else { - project.retryStatus.installationErrors = [ - ...project.retryStatus.installationErrors ?? [], - installResult.error ?? "Unknown installation error" - ]; - result.projectsWithFailedDependencyInstallation.push(projectDir); - } - if (installResult.warnings.length > 0) { - for (const warning of installResult.warnings) { - dependencyGraph2.errors.warnings.push({ - phase: "retry_dependency_installation", - message: warning, - timestamp: /* @__PURE__ */ new Date(), - context: projectDir - }); - } - } - } catch (error) { - const errorMessage = `Failed to install full dependencies for project ${projectDir}: ${String(error)}`; - cdsExtractorLog("error", errorMessage); - dependencyGraph2.errors.critical.push({ - phase: "retry_dependency_installation", - message: errorMessage, - timestamp: /* @__PURE__ */ new Date() - }); - result.projectsWithFailedDependencyInstallation.push(projectDir); - } + if (typeof keyNode === "object" && _class(keyNode[index]) === "[object Object]") { + keyNode[index] = "[object Object]"; } - dependencyGraph2.retryStatus.projectsRequiringFullDependencies.add(projectDir); } - dependencyInstallationEndTime = Date.now(); - result.dependencyInstallationDurationMs = dependencyInstallationEndTime - dependencyInstallationStartTime; - cdsExtractorLog("info", "Executing retry compilation attempts..."); - retryCompilationStartTime = Date.now(); - for (const [projectDir, failedTasks] of tasksRequiringRetry) { - const project = dependencyGraph2.projects.get(projectDir); - if (!project) { - continue; - } - const retryExecutionResult = retryCompilationTasksForProject( - failedTasks, - project, - dependencyGraph2 - ); - result.projectsWithRetries.push(projectDir); - result.totalSuccessfulRetries += retryExecutionResult.successfulRetries; - result.totalFailedRetries += retryExecutionResult.failedRetries; - if (project.retryStatus) { - project.retryStatus.tasksRetried = retryExecutionResult.retriedTasks.length; + } + if (typeof keyNode === "object" && _class(keyNode) === "[object Object]") { + keyNode = "[object Object]"; + } + keyNode = String(keyNode); + if (_result === null) { + _result = {}; + } + if (keyTag === "tag:yaml.org,2002:merge") { + if (Array.isArray(valueNode)) { + for (index = 0, quantity = valueNode.length; index < quantity; index += 1) { + mergeMappings(state, _result, valueNode[index], overridableKeys); } + } else { + mergeMappings(state, _result, valueNode, overridableKeys); } - retryCompilationEndTime = Date.now(); - result.retryCompilationDurationMs = retryCompilationEndTime - retryCompilationStartTime; - updateCdsDependencyGraphStatus(dependencyGraph2, dependencyGraph2.sourceRootDir); - updateDependencyGraphWithRetryResults(dependencyGraph2, result); - addCompilationDiagnosticsForFailedTasks( - dependencyGraph2, - codeqlExePath2, - dependencyGraph2.sourceRootDir - ); - result.success = result.totalSuccessfulRetries > 0 || result.totalTasksRequiringRetry === 0; - } catch (error) { - const errorMessage = `Retry orchestration failed: ${String(error)}`; - cdsExtractorLog("error", errorMessage); - dependencyGraph2.errors.critical.push({ - phase: "retry_orchestration", - message: errorMessage, - timestamp: /* @__PURE__ */ new Date() - }); - result.success = false; - } finally { - result.retryDurationMs = Date.now() - startTime; + } else { + if (!state.json && !_hasOwnProperty$1.call(overridableKeys, keyNode) && _hasOwnProperty$1.call(_result, keyNode)) { + state.line = startLine || state.line; + state.lineStart = startLineStart || state.lineStart; + state.position = startPos || state.position; + throwError(state, "duplicated mapping key"); + } + setProperty(_result, keyNode, valueNode); + delete overridableKeys[keyNode]; } - return result; + return _result; } -function retryCompilationTask(task, retryCommand, projectDir, dependencyGraph2) { - const startTime = /* @__PURE__ */ new Date(); - const attemptId = `${task.id}_retry_${startTime.getTime()}`; - const cdsCommandString = retryCommand.originalCommand; - const attempt = { - id: attemptId, - cdsCommand: cdsCommandString, - cacheDir: projectDir, - timestamp: startTime, - result: { - success: false, - timestamp: startTime +function readLineBreak(state) { + var ch; + ch = state.input.charCodeAt(state.position); + if (ch === 10) { + state.position++; + } else if (ch === 13) { + state.position++; + if (state.input.charCodeAt(state.position) === 10) { + state.position++; } - }; - try { - const primarySourceFile = task.sourceFiles[0]; - const compilationResult = compileCdsToJson( - primarySourceFile, - dependencyGraph2.sourceRootDir, - cdsCommandString, - projectDir, - // Convert CDS projects to BasicCdsProject format expected by compileCdsToJson - new Map( - Array.from(dependencyGraph2.projects.entries()).map(([key, value]) => [ - key, - { - cdsFiles: value.cdsFiles, - compilationTargets: value.compilationTargets, - expectedOutputFile: value.expectedOutputFile, - projectDir: value.projectDir, - dependencies: value.dependencies, - imports: value.imports, - packageJson: value.packageJson - } - ]) - ), - task.projectDir - ); - attempt.result = { - ...compilationResult, - timestamp: startTime - }; - } catch (error) { - attempt.error = { - message: String(error), - stack: error instanceof Error ? error.stack : void 0 - }; + } else { + throwError(state, "a line break is expected"); } - return attempt; + state.line += 1; + state.lineStart = state.position; + state.firstTabInLine = -1; } -function retryCompilationTasksForProject(tasksToRetry, project, dependencyGraph2) { - const startTime = Date.now(); - const result = { - projectDir: project.projectDir, - retriedTasks: [], - successfulRetries: 0, - failedRetries: 0, - fullDependenciesAvailable: Boolean(project.retryStatus?.fullDependenciesInstalled), - executionDurationMs: 0, - retryErrors: [] - }; - cdsExtractorLog( - "info", - `Retrying ${tasksToRetry.length} task(s) for project ${project.projectDir} using ${result.fullDependenciesAvailable ? "full" : "minimal"} dependencies` - ); - for (const task of tasksToRetry) { - try { - task.retryInfo = { - hasBeenRetried: true, - retryReason: "Output validation failed", - fullDependenciesInstalled: result.fullDependenciesAvailable, - retryTimestamp: /* @__PURE__ */ new Date() - }; - const retryAttempt = retryCompilationTask( - task, - task.retryCommand, - project.projectDir, - dependencyGraph2 - ); - task.retryInfo.retryAttempt = retryAttempt; - task.attempts.push(retryAttempt); - result.retriedTasks.push(task); - if (retryAttempt.result.success) { - task.status = "success"; - result.successfulRetries++; - cdsExtractorLog("info", `Retry successful for task ${task.id}`); - } else { - task.status = "failed"; - task.errorSummary = retryAttempt.error?.message ?? "Retry compilation failed"; - result.failedRetries++; - result.retryErrors.push(task.errorSummary); - cdsExtractorLog("warn", `Retry failed for task ${task.id}: ${task.errorSummary}`); +function skipSeparationSpace(state, allowComments, checkIndent) { + var lineBreaks = 0, ch = state.input.charCodeAt(state.position); + while (ch !== 0) { + while (is_WHITE_SPACE(ch)) { + if (ch === 9 && state.firstTabInLine === -1) { + state.firstTabInLine = state.position; } - } catch (error) { - const errorMessage = `Failed to retry task ${task.id}: ${String(error)}`; - result.retryErrors.push(errorMessage); - result.failedRetries++; - task.status = "failed"; - task.errorSummary = errorMessage; - cdsExtractorLog("error", errorMessage); + ch = state.input.charCodeAt(++state.position); } - } - result.executionDurationMs = Date.now() - startTime; - cdsExtractorLog( - "info", - `Retry execution completed for project ${project.projectDir}: ${result.successfulRetries} successful, ${result.failedRetries} failed` - ); - return result; -} -function updateDependencyGraphWithRetryResults(dependencyGraph2, retryResults) { - dependencyGraph2.retryStatus.totalRetryAttempts = retryResults.totalSuccessfulRetries + retryResults.totalFailedRetries; -} - -// src/cds/compiler/graph.ts -function attemptCompilation(task, cdsCommand, cacheDir, dependencyGraph2) { - const startTime = /* @__PURE__ */ new Date(); - const attemptId = `${task.id}_${startTime.getTime()}`; - const attempt = { - id: attemptId, - cdsCommand, - cacheDir, - timestamp: startTime, - result: { - success: false, - timestamp: startTime + if (allowComments && ch === 35) { + do { + ch = state.input.charCodeAt(++state.position); + } while (ch !== 10 && ch !== 13 && ch !== 0); } - }; - try { - const primarySourceFile = task.sourceFiles[0]; - const compilationResult = compileCdsToJson( - primarySourceFile, - dependencyGraph2.sourceRootDir, - cdsCommand, - cacheDir, - // Convert CDS projects to BasicCdsProject format expected by compileCdsToJson - new Map( - Array.from(dependencyGraph2.projects.entries()).map(([key, value]) => [ - key, - { - cdsFiles: value.cdsFiles, - compilationTargets: value.compilationTargets, - expectedOutputFile: value.expectedOutputFile, - projectDir: value.projectDir, - dependencies: value.dependencies, - imports: value.imports, - packageJson: value.packageJson, - compilationConfig: value.compilationConfig - } - ]) - ), - task.projectDir - ); - const endTime = /* @__PURE__ */ new Date(); - attempt.result = { - ...compilationResult, - timestamp: endTime, - durationMs: endTime.getTime() - startTime.getTime(), - commandUsed: cdsCommand, - cacheDir - }; - if (compilationResult.success && compilationResult.outputPath) { - dependencyGraph2.statusSummary.jsonFilesGenerated++; + if (is_EOL(ch)) { + readLineBreak(state); + ch = state.input.charCodeAt(state.position); + lineBreaks++; + state.lineIndent = 0; + while (ch === 32) { + state.lineIndent++; + ch = state.input.charCodeAt(++state.position); + } + } else { + break; } - } catch (error) { - const endTime = /* @__PURE__ */ new Date(); - attempt.error = { - message: String(error), - stack: error instanceof Error ? error.stack : void 0 - }; - attempt.result.timestamp = endTime; - attempt.result.durationMs = endTime.getTime() - startTime.getTime(); } - task.attempts.push(attempt); - return attempt; -} -function createCompilationTask(type2, sourceFiles, expectedOutputFile, projectDir) { - const defaultPrimaryCommand = { - executable: "cds", - args: [], - originalCommand: "cds" - }; - const defaultRetryCommand = { - executable: "npx", - args: ["cds"], - originalCommand: "npx cds" - }; - return { - id: `${type2}_${projectDir}_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`, - type: type2, - status: "pending", - sourceFiles, - expectedOutputFile, - projectDir, - attempts: [], - dependencies: [], - primaryCommand: defaultPrimaryCommand, - retryCommand: defaultRetryCommand - }; -} -function createCompilationConfig(cdsCommand, cacheDir) { - return { - cdsCommand, - cacheDir, - versionCompatibility: { - isCompatible: true - // Will be validated during planning - }, - maxRetryAttempts: 3 - }; + if (checkIndent !== -1 && lineBreaks !== 0 && state.lineIndent < checkIndent) { + throwWarning(state, "deficient indentation"); + } + return lineBreaks; } -function executeCompilationTask(task, project, dependencyGraph2, _codeqlExePath) { - task.status = "in_progress"; - const config = project.enhancedCompilationConfig; - if (!config) { - throw new Error(`No compilation configuration found for project ${project.projectDir}`); +function testDocumentSeparator(state) { + var _position = state.position, ch; + ch = state.input.charCodeAt(_position); + if ((ch === 45 || ch === 46) && ch === state.input.charCodeAt(_position + 1) && ch === state.input.charCodeAt(_position + 2)) { + _position += 3; + ch = state.input.charCodeAt(_position); + if (ch === 0 || is_WS_OR_EOL(ch)) { + return true; + } } - const compilationAttempt = attemptCompilation( - task, - config.cdsCommand, - config.cacheDir, - dependencyGraph2 - ); - if (compilationAttempt.result.success) { - task.status = "success"; - return; + return false; +} +function writeFoldedLines(state, count) { + if (count === 1) { + state.result += " "; + } else if (count > 1) { + state.result += common.repeat("\n", count - 1); } - const lastError = compilationAttempt.error ? new Error(compilationAttempt.error.message) : new Error("Compilation failed"); - task.status = "failed"; - task.errorSummary = lastError?.message || "Compilation failed"; - cdsExtractorLog("error", `Compilation failed for task ${task.id}: ${task.errorSummary}`); } -function executeCompilationTasks(dependencyGraph2, codeqlExePath2) { - cdsExtractorLog("info", "Starting compilation execution for all projects..."); - dependencyGraph2.currentPhase = "compiling"; - const compilationStartTime = /* @__PURE__ */ new Date(); - const allTasks = []; - for (const project of dependencyGraph2.projects.values()) { - for (const task of project.compilationTasks) { - allTasks.push({ task, project }); - } +function readPlainScalar(state, nodeIndent, withinFlowCollection) { + var preceding, following, captureStart, captureEnd, hasPendingContent, _line, _lineStart, _lineIndent, _kind = state.kind, _result = state.result, ch; + ch = state.input.charCodeAt(state.position); + if (is_WS_OR_EOL(ch) || is_FLOW_INDICATOR(ch) || ch === 35 || ch === 38 || ch === 42 || ch === 33 || ch === 124 || ch === 62 || ch === 39 || ch === 34 || ch === 37 || ch === 64 || ch === 96) { + return false; } - cdsExtractorLog("info", `Executing ${allTasks.length} compilation task(s)...`); - for (const { task, project } of allTasks) { - try { - executeCompilationTask(task, project, dependencyGraph2, codeqlExePath2); - } catch (error) { - const errorMessage = `Failed to execute compilation task ${task.id}: ${String(error)}`; - cdsExtractorLog("error", errorMessage); - dependencyGraph2.errors.critical.push({ - phase: "compiling", - message: errorMessage, - timestamp: /* @__PURE__ */ new Date(), - stack: error instanceof Error ? error.stack : void 0 - }); - task.status = "failed"; - task.errorSummary = errorMessage; - dependencyGraph2.statusSummary.failedCompilations++; + if (ch === 63 || ch === 45) { + following = state.input.charCodeAt(state.position + 1); + if (is_WS_OR_EOL(following) || withinFlowCollection && is_FLOW_INDICATOR(following)) { + return false; } } - for (const project of dependencyGraph2.projects.values()) { - const allTasksCompleted = project.compilationTasks.every( - (task) => task.status === "success" || task.status === "failed" - ); - if (allTasksCompleted) { - const hasFailedTasks = project.compilationTasks.some((task) => task.status === "failed"); - project.status = hasFailedTasks ? "failed" : "completed"; - project.timestamps.compilationCompleted = /* @__PURE__ */ new Date(); + state.kind = "scalar"; + state.result = ""; + captureStart = captureEnd = state.position; + hasPendingContent = false; + while (ch !== 0) { + if (ch === 58) { + following = state.input.charCodeAt(state.position + 1); + if (is_WS_OR_EOL(following) || withinFlowCollection && is_FLOW_INDICATOR(following)) { + break; + } + } else if (ch === 35) { + preceding = state.input.charCodeAt(state.position - 1); + if (is_WS_OR_EOL(preceding)) { + break; + } + } else if (state.position === state.lineStart && testDocumentSeparator(state) || withinFlowCollection && is_FLOW_INDICATOR(ch)) { + break; + } else if (is_EOL(ch)) { + _line = state.line; + _lineStart = state.lineStart; + _lineIndent = state.lineIndent; + skipSeparationSpace(state, false, -1); + if (state.lineIndent >= nodeIndent) { + hasPendingContent = true; + ch = state.input.charCodeAt(state.position); + continue; + } else { + state.position = captureEnd; + state.line = _line; + state.lineStart = _lineStart; + state.lineIndent = _lineIndent; + break; + } } - } - const compilationEndTime = /* @__PURE__ */ new Date(); - dependencyGraph2.statusSummary.performance.compilationDurationMs = compilationEndTime.getTime() - compilationStartTime.getTime(); - cdsExtractorLog( - "info", - `Compilation execution completed. Success: ${dependencyGraph2.statusSummary.successfulCompilations}, Failed: ${dependencyGraph2.statusSummary.failedCompilations}` - ); -} -function orchestrateCompilation(dependencyGraph2, projectCacheDirMap2, codeqlExePath2) { - try { - planCompilationTasks(dependencyGraph2, projectCacheDirMap2); - executeCompilationTasks(dependencyGraph2, codeqlExePath2); - updateCdsDependencyGraphStatus(dependencyGraph2, dependencyGraph2.sourceRootDir); - cdsExtractorLog("info", "Starting retry orchestration phase..."); - const retryResults = orchestrateRetryAttempts(dependencyGraph2, codeqlExePath2); - updateCdsDependencyGraphStatus(dependencyGraph2, dependencyGraph2.sourceRootDir); - if (retryResults.totalTasksRequiringRetry > 0) { - cdsExtractorLog( - "info", - `Retry phase completed: ${retryResults.totalTasksRequiringRetry} tasks retried, ${retryResults.totalSuccessfulRetries} successful, ${retryResults.totalFailedRetries} failed` - ); - } else { - cdsExtractorLog("info", "Retry phase completed: no tasks required retry"); + if (hasPendingContent) { + captureSegment(state, captureStart, captureEnd, false); + writeFoldedLines(state, state.line - _line); + captureStart = captureEnd = state.position; + hasPendingContent = false; } - const hasFailures = dependencyGraph2.statusSummary.failedCompilations > 0 || dependencyGraph2.errors.critical.length > 0; - dependencyGraph2.statusSummary.overallSuccess = !hasFailures; - dependencyGraph2.currentPhase = hasFailures ? "failed" : "completed"; - const statusReport = generateStatusReport(dependencyGraph2); - cdsExtractorLog("info", "CDS Extractor Status Report : Post-Compilation...\n" + statusReport); - } catch (error) { - const errorMessage = `Compilation orchestration failed: ${String(error)}`; - cdsExtractorLog("error", errorMessage); - dependencyGraph2.errors.critical.push({ - phase: "compiling", - message: errorMessage, - timestamp: /* @__PURE__ */ new Date(), - stack: error instanceof Error ? error.stack : void 0 - }); - dependencyGraph2.currentPhase = "failed"; - dependencyGraph2.statusSummary.overallSuccess = false; - throw error; + if (!is_WHITE_SPACE(ch)) { + captureEnd = state.position + 1; + } + ch = state.input.charCodeAt(++state.position); + } + captureSegment(state, captureStart, captureEnd, false); + if (state.result) { + return true; } + state.kind = _kind; + state.result = _result; + return false; } -function planCompilationTasks(dependencyGraph2, projectCacheDirMap2) { - cdsExtractorLog("info", "Planning compilation tasks for all projects..."); - dependencyGraph2.currentPhase = "compilation_planning"; - for (const [projectDir, project] of dependencyGraph2.projects.entries()) { - try { - const cacheDir = projectCacheDirMap2.get(projectDir); - const commands = determineVersionAwareCdsCommands( - cacheDir, - dependencyGraph2.sourceRootDir, - projectDir, - dependencyGraph2 - ); - const cdsCommand = determineCdsCommand(cacheDir, dependencyGraph2.sourceRootDir); - const compilationConfig = createCompilationConfig(cdsCommand, cacheDir); - project.enhancedCompilationConfig = compilationConfig; - const task = createCompilationTask( - "project", - project.cdsFiles, - project.expectedOutputFile, - projectDir - ); - task.primaryCommand = commands.primaryCommand; - task.retryCommand = commands.retryCommand; - project.compilationTasks = [task]; - project.status = "compilation_planned"; - project.timestamps.compilationStarted = /* @__PURE__ */ new Date(); - cdsExtractorLog( - "info", - `Planned ${project.compilationTasks.length} compilation task(s) for project ${projectDir}` - ); - } catch (error) { - const errorMessage = `Failed to plan compilation for project ${projectDir}: ${String(error)}`; - cdsExtractorLog("error", errorMessage); - dependencyGraph2.errors.critical.push({ - phase: "compilation_planning", - message: errorMessage, - timestamp: /* @__PURE__ */ new Date(), - stack: error instanceof Error ? error.stack : void 0 - }); - project.status = "failed"; +function readSingleQuotedScalar(state, nodeIndent) { + var ch, captureStart, captureEnd; + ch = state.input.charCodeAt(state.position); + if (ch !== 39) { + return false; + } + state.kind = "scalar"; + state.result = ""; + state.position++; + captureStart = captureEnd = state.position; + while ((ch = state.input.charCodeAt(state.position)) !== 0) { + if (ch === 39) { + captureSegment(state, captureStart, state.position, true); + ch = state.input.charCodeAt(++state.position); + if (ch === 39) { + captureStart = state.position; + state.position++; + captureEnd = state.position; + } else { + return true; + } + } else if (is_EOL(ch)) { + captureSegment(state, captureStart, captureEnd, true); + writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent)); + captureStart = captureEnd = state.position; + } else if (state.position === state.lineStart && testDocumentSeparator(state)) { + throwError(state, "unexpected end of the document within a single quoted scalar"); + } else { + state.position++; + captureEnd = state.position; } } - const totalTasks = Array.from(dependencyGraph2.projects.values()).reduce( - (sum, project) => sum + project.compilationTasks.length, - 0 - ); - dependencyGraph2.statusSummary.totalCompilationTasks = totalTasks; - cdsExtractorLog("info", `Compilation planning completed. Total tasks: ${totalTasks}`); + throwError(state, "unexpected end of the stream within a single quoted scalar"); } - -// src/cds/compiler/project.ts -var import_path9 = require("path"); - -// src/cds/indexer.ts -var import_child_process8 = require("child_process"); -var import_path10 = require("path"); -var CDS_INDEXER_TIMEOUT_MS = 6e5; -var CDS_INDEXER_PACKAGE = "@sap/cds-indexer"; -function projectUsesCdsIndexer(project) { - if (!project.packageJson) { +function readDoubleQuotedScalar(state, nodeIndent) { + var captureStart, captureEnd, hexLength, hexResult, tmp, ch; + ch = state.input.charCodeAt(state.position); + if (ch !== 34) { return false; } - const inDeps = project.packageJson.dependencies?.[CDS_INDEXER_PACKAGE] !== void 0; - const inDevDeps = project.packageJson.devDependencies?.[CDS_INDEXER_PACKAGE] !== void 0; - return inDeps || inDevDeps; -} -function runCdsIndexer(project, sourceRoot2, cacheDir) { - const projectAbsPath = (0, import_path10.join)(sourceRoot2, project.projectDir); - const startTime = Date.now(); - const result = { - success: false, - projectDir: project.projectDir, - durationMs: 0, - timedOut: false - }; - try { - const nodePaths = []; - if (cacheDir) { - nodePaths.push((0, import_path10.join)(cacheDir, "node_modules")); - } - nodePaths.push((0, import_path10.join)(projectAbsPath, "node_modules")); - const env = { - ...process.env, - NODE_PATH: nodePaths.join(import_path10.delimiter) - }; - cdsExtractorLog( - "info", - `Running ${CDS_INDEXER_PACKAGE} for project '${project.projectDir}'...` - ); - const spawnResult = (0, import_child_process8.spawnSync)("npx", ["--yes", CDS_INDEXER_PACKAGE], { - cwd: projectAbsPath, - env, - stdio: "pipe", - timeout: CDS_INDEXER_TIMEOUT_MS - }); - result.durationMs = Date.now() - startTime; - if (spawnResult.signal === "SIGTERM" || spawnResult.signal === "SIGKILL") { - result.timedOut = true; - result.error = `${CDS_INDEXER_PACKAGE} timed out after ${CDS_INDEXER_TIMEOUT_MS}ms for project '${project.projectDir}'`; - cdsExtractorLog("warn", result.error); - return result; - } - if (spawnResult.error) { - result.error = `${CDS_INDEXER_PACKAGE} failed to start for project '${project.projectDir}': ${String(spawnResult.error)}`; - cdsExtractorLog("warn", result.error); - return result; - } - if (spawnResult.status !== 0) { - const stderr = spawnResult.stderr?.toString().trim() ?? ""; - const stdout = spawnResult.stdout?.toString().trim() ?? ""; - const output = stderr || stdout || "unknown error"; - result.error = `${CDS_INDEXER_PACKAGE} failed for project '${project.projectDir}' (exit code ${spawnResult.status}): ${output}`; - cdsExtractorLog("warn", result.error); - return result; + state.kind = "scalar"; + state.result = ""; + state.position++; + captureStart = captureEnd = state.position; + while ((ch = state.input.charCodeAt(state.position)) !== 0) { + if (ch === 34) { + captureSegment(state, captureStart, state.position, true); + state.position++; + return true; + } else if (ch === 92) { + captureSegment(state, captureStart, state.position, true); + ch = state.input.charCodeAt(++state.position); + if (is_EOL(ch)) { + skipSeparationSpace(state, false, nodeIndent); + } else if (ch < 256 && simpleEscapeCheck[ch]) { + state.result += simpleEscapeMap[ch]; + state.position++; + } else if ((tmp = escapedHexLen(ch)) > 0) { + hexLength = tmp; + hexResult = 0; + for (; hexLength > 0; hexLength--) { + ch = state.input.charCodeAt(++state.position); + if ((tmp = fromHexCode(ch)) >= 0) { + hexResult = (hexResult << 4) + tmp; + } else { + throwError(state, "expected hexadecimal character"); + } + } + state.result += charFromCodepoint(hexResult); + state.position++; + } else { + throwError(state, "unknown escape sequence"); + } + captureStart = captureEnd = state.position; + } else if (is_EOL(ch)) { + captureSegment(state, captureStart, captureEnd, true); + writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent)); + captureStart = captureEnd = state.position; + } else if (state.position === state.lineStart && testDocumentSeparator(state)) { + throwError(state, "unexpected end of the document within a double quoted scalar"); + } else { + state.position++; + captureEnd = state.position; } - result.success = true; - cdsExtractorLog( - "info", - `Successfully ran ${CDS_INDEXER_PACKAGE} for project '${project.projectDir}' (${result.durationMs}ms)` - ); - } catch (error) { - result.durationMs = Date.now() - startTime; - result.error = `${CDS_INDEXER_PACKAGE} threw an unexpected error for project '${project.projectDir}': ${String(error)}`; - cdsExtractorLog("error", result.error); } - return result; + throwError(state, "unexpected end of the stream within a double quoted scalar"); } -function orchestrateCdsIndexer(dependencyGraph2, sourceRoot2, projectCacheDirMap2, codeqlExePath2) { - const summary = { - totalProjects: dependencyGraph2.projects.size, - projectsRequiringIndexer: 0, - successfulRuns: 0, - failedRuns: 0, - results: [] - }; - for (const [projectDir, project] of dependencyGraph2.projects.entries()) { - if (!projectUsesCdsIndexer(project)) { - continue; - } - summary.projectsRequiringIndexer++; - const cacheDir = projectCacheDirMap2.get(projectDir); - const result = runCdsIndexer(project, sourceRoot2, cacheDir); - summary.results.push(result); - if (result.success) { - summary.successfulRuns++; - } else { - summary.failedRuns++; - if (codeqlExePath2) { - addCdsIndexerDiagnostic( - projectDir, - result.error ?? `${CDS_INDEXER_PACKAGE} failed for project '${projectDir}'`, - codeqlExePath2, - sourceRoot2 - ); - } - } - } - if (summary.projectsRequiringIndexer > 0) { - cdsExtractorLog( - "info", - `CDS indexer summary: ${summary.projectsRequiringIndexer} project(s) required indexer, ${summary.successfulRuns} succeeded, ${summary.failedRuns} failed` - ); +function readFlowCollection(state, nodeIndent) { + var readNext = true, _line, _lineStart, _pos, _tag = state.tag, _result, _anchor = state.anchor, following, terminator, isPair, isExplicitPair, isMapping, overridableKeys = /* @__PURE__ */ Object.create(null), keyNode, keyTag, valueNode, ch; + ch = state.input.charCodeAt(state.position); + if (ch === 91) { + terminator = 93; + isMapping = false; + _result = []; + } else if (ch === 123) { + terminator = 125; + isMapping = true; + _result = {}; } else { - cdsExtractorLog("info", "No projects require @sap/cds-indexer."); - } - return summary; -} - -// src/cds/parser/graph.ts -var import_path13 = require("path"); - -// src/cds/parser/functions.ts -var import_fs7 = require("fs"); -var import_path12 = require("path"); - -// src/paths-ignore.ts -var import_fs6 = require("fs"); -var import_path11 = require("path"); - -// node_modules/js-yaml/dist/js-yaml.mjs -function isNothing(subject) { - return typeof subject === "undefined" || subject === null; -} -function isObject(subject) { - return typeof subject === "object" && subject !== null; -} -function toArray(sequence) { - if (Array.isArray(sequence)) return sequence; - else if (isNothing(sequence)) return []; - return [sequence]; -} -function extend(target, source) { - var index, length, key, sourceKeys; - if (source) { - sourceKeys = Object.keys(source); - for (index = 0, length = sourceKeys.length; index < length; index += 1) { - key = sourceKeys[index]; - target[key] = source[key]; - } - } - return target; -} -function repeat(string, count) { - var result = "", cycle; - for (cycle = 0; cycle < count; cycle += 1) { - result += string; + return false; } - return result; -} -function isNegativeZero(number) { - return number === 0 && Number.NEGATIVE_INFINITY === 1 / number; -} -var isNothing_1 = isNothing; -var isObject_1 = isObject; -var toArray_1 = toArray; -var repeat_1 = repeat; -var isNegativeZero_1 = isNegativeZero; -var extend_1 = extend; -var common = { - isNothing: isNothing_1, - isObject: isObject_1, - toArray: toArray_1, - repeat: repeat_1, - isNegativeZero: isNegativeZero_1, - extend: extend_1 -}; -function formatError(exception2, compact) { - var where = "", message = exception2.reason || "(unknown reason)"; - if (!exception2.mark) return message; - if (exception2.mark.name) { - where += 'in "' + exception2.mark.name + '" '; + if (state.anchor !== null) { + state.anchorMap[state.anchor] = _result; } - where += "(" + (exception2.mark.line + 1) + ":" + (exception2.mark.column + 1) + ")"; - if (!compact && exception2.mark.snippet) { - where += "\n\n" + exception2.mark.snippet; + ch = state.input.charCodeAt(++state.position); + while (ch !== 0) { + skipSeparationSpace(state, true, nodeIndent); + ch = state.input.charCodeAt(state.position); + if (ch === terminator) { + state.position++; + state.tag = _tag; + state.anchor = _anchor; + state.kind = isMapping ? "mapping" : "sequence"; + state.result = _result; + return true; + } else if (!readNext) { + throwError(state, "missed comma between flow collection entries"); + } else if (ch === 44) { + throwError(state, "expected the node content, but found ','"); + } + keyTag = keyNode = valueNode = null; + isPair = isExplicitPair = false; + if (ch === 63) { + following = state.input.charCodeAt(state.position + 1); + if (is_WS_OR_EOL(following)) { + isPair = isExplicitPair = true; + state.position++; + skipSeparationSpace(state, true, nodeIndent); + } + } + _line = state.line; + _lineStart = state.lineStart; + _pos = state.position; + composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true); + keyTag = state.tag; + keyNode = state.result; + skipSeparationSpace(state, true, nodeIndent); + ch = state.input.charCodeAt(state.position); + if ((isExplicitPair || state.line === _line) && ch === 58) { + isPair = true; + ch = state.input.charCodeAt(++state.position); + skipSeparationSpace(state, true, nodeIndent); + composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true); + valueNode = state.result; + } + if (isMapping) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos); + } else if (isPair) { + _result.push(storeMappingPair(state, null, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos)); + } else { + _result.push(keyNode); + } + skipSeparationSpace(state, true, nodeIndent); + ch = state.input.charCodeAt(state.position); + if (ch === 44) { + readNext = true; + ch = state.input.charCodeAt(++state.position); + } else { + readNext = false; + } } - return message + " " + where; + throwError(state, "unexpected end of the stream within a flow collection"); } -function YAMLException$1(reason, mark) { - Error.call(this); - this.name = "YAMLException"; - this.reason = reason; - this.mark = mark; - this.message = formatError(this, false); - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); +function readBlockScalar(state, nodeIndent) { + var captureStart, folding, chomping = CHOMPING_CLIP, didReadContent = false, detectedIndent = false, textIndent = nodeIndent, emptyLines = 0, atMoreIndented = false, tmp, ch; + ch = state.input.charCodeAt(state.position); + if (ch === 124) { + folding = false; + } else if (ch === 62) { + folding = true; } else { - this.stack = new Error().stack || ""; - } -} -YAMLException$1.prototype = Object.create(Error.prototype); -YAMLException$1.prototype.constructor = YAMLException$1; -YAMLException$1.prototype.toString = function toString(compact) { - return this.name + ": " + formatError(this, compact); -}; -var exception = YAMLException$1; -function getLine(buffer, lineStart, lineEnd, position, maxLineLength) { - var head = ""; - var tail = ""; - var maxHalfLength = Math.floor(maxLineLength / 2) - 1; - if (position - lineStart > maxHalfLength) { - head = " ... "; - lineStart = position - maxHalfLength + head.length; + return false; } - if (lineEnd - position > maxHalfLength) { - tail = " ..."; - lineEnd = position + maxHalfLength - tail.length; + state.kind = "scalar"; + state.result = ""; + while (ch !== 0) { + ch = state.input.charCodeAt(++state.position); + if (ch === 43 || ch === 45) { + if (CHOMPING_CLIP === chomping) { + chomping = ch === 43 ? CHOMPING_KEEP : CHOMPING_STRIP; + } else { + throwError(state, "repeat of a chomping mode identifier"); + } + } else if ((tmp = fromDecimalCode(ch)) >= 0) { + if (tmp === 0) { + throwError(state, "bad explicit indentation width of a block scalar; it cannot be less than one"); + } else if (!detectedIndent) { + textIndent = nodeIndent + tmp - 1; + detectedIndent = true; + } else { + throwError(state, "repeat of an indentation width identifier"); + } + } else { + break; + } } - return { - str: head + buffer.slice(lineStart, lineEnd).replace(/\t/g, "\u2192") + tail, - pos: position - lineStart + head.length - // relative position - }; -} -function padStart(string, max) { - return common.repeat(" ", max - string.length) + string; -} -function makeSnippet(mark, options) { - options = Object.create(options || null); - if (!mark.buffer) return null; - if (!options.maxLength) options.maxLength = 79; - if (typeof options.indent !== "number") options.indent = 1; - if (typeof options.linesBefore !== "number") options.linesBefore = 3; - if (typeof options.linesAfter !== "number") options.linesAfter = 2; - var re2 = /\r?\n|\r|\0/g; - var lineStarts = [0]; - var lineEnds = []; - var match2; - var foundLineNo = -1; - while (match2 = re2.exec(mark.buffer)) { - lineEnds.push(match2.index); - lineStarts.push(match2.index + match2[0].length); - if (mark.position <= match2.index && foundLineNo < 0) { - foundLineNo = lineStarts.length - 2; + if (is_WHITE_SPACE(ch)) { + do { + ch = state.input.charCodeAt(++state.position); + } while (is_WHITE_SPACE(ch)); + if (ch === 35) { + do { + ch = state.input.charCodeAt(++state.position); + } while (!is_EOL(ch) && ch !== 0); } } - if (foundLineNo < 0) foundLineNo = lineStarts.length - 1; - var result = "", i, line; - var lineNoLength = Math.min(mark.line + options.linesAfter, lineEnds.length).toString().length; - var maxLineLength = options.maxLength - (options.indent + lineNoLength + 3); - for (i = 1; i <= options.linesBefore; i++) { - if (foundLineNo - i < 0) break; - line = getLine( - mark.buffer, - lineStarts[foundLineNo - i], - lineEnds[foundLineNo - i], - mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo - i]), - maxLineLength - ); - result = common.repeat(" ", options.indent) + padStart((mark.line - i + 1).toString(), lineNoLength) + " | " + line.str + "\n" + result; - } - line = getLine(mark.buffer, lineStarts[foundLineNo], lineEnds[foundLineNo], mark.position, maxLineLength); - result += common.repeat(" ", options.indent) + padStart((mark.line + 1).toString(), lineNoLength) + " | " + line.str + "\n"; - result += common.repeat("-", options.indent + lineNoLength + 3 + line.pos) + "^\n"; - for (i = 1; i <= options.linesAfter; i++) { - if (foundLineNo + i >= lineEnds.length) break; - line = getLine( - mark.buffer, - lineStarts[foundLineNo + i], - lineEnds[foundLineNo + i], - mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo + i]), - maxLineLength - ); - result += common.repeat(" ", options.indent) + padStart((mark.line + i + 1).toString(), lineNoLength) + " | " + line.str + "\n"; - } - return result.replace(/\n$/, ""); -} -var snippet = makeSnippet; -var TYPE_CONSTRUCTOR_OPTIONS = [ - "kind", - "multi", - "resolve", - "construct", - "instanceOf", - "predicate", - "represent", - "representName", - "defaultStyle", - "styleAliases" -]; -var YAML_NODE_KINDS = [ - "scalar", - "sequence", - "mapping" -]; -function compileStyleAliases(map2) { - var result = {}; - if (map2 !== null) { - Object.keys(map2).forEach(function(style) { - map2[style].forEach(function(alias) { - result[String(alias)] = style; - }); - }); - } - return result; -} -function Type$1(tag, options) { - options = options || {}; - Object.keys(options).forEach(function(name) { - if (TYPE_CONSTRUCTOR_OPTIONS.indexOf(name) === -1) { - throw new exception('Unknown option "' + name + '" is met in definition of "' + tag + '" YAML type.'); + while (ch !== 0) { + readLineBreak(state); + state.lineIndent = 0; + ch = state.input.charCodeAt(state.position); + while ((!detectedIndent || state.lineIndent < textIndent) && ch === 32) { + state.lineIndent++; + ch = state.input.charCodeAt(++state.position); } - }); - this.options = options; - this.tag = tag; - this.kind = options["kind"] || null; - this.resolve = options["resolve"] || function() { - return true; - }; - this.construct = options["construct"] || function(data) { - return data; - }; - this.instanceOf = options["instanceOf"] || null; - this.predicate = options["predicate"] || null; - this.represent = options["represent"] || null; - this.representName = options["representName"] || null; - this.defaultStyle = options["defaultStyle"] || null; - this.multi = options["multi"] || false; - this.styleAliases = compileStyleAliases(options["styleAliases"] || null); - if (YAML_NODE_KINDS.indexOf(this.kind) === -1) { - throw new exception('Unknown kind "' + this.kind + '" is specified for "' + tag + '" YAML type.'); - } -} -var type = Type$1; -function compileList(schema2, name) { - var result = []; - schema2[name].forEach(function(currentType) { - var newIndex = result.length; - result.forEach(function(previousType, previousIndex) { - if (previousType.tag === currentType.tag && previousType.kind === currentType.kind && previousType.multi === currentType.multi) { - newIndex = previousIndex; + if (!detectedIndent && state.lineIndent > textIndent) { + textIndent = state.lineIndent; + } + if (is_EOL(ch)) { + emptyLines++; + continue; + } + if (state.lineIndent < textIndent) { + if (chomping === CHOMPING_KEEP) { + state.result += common.repeat("\n", didReadContent ? 1 + emptyLines : emptyLines); + } else if (chomping === CHOMPING_CLIP) { + if (didReadContent) { + state.result += "\n"; + } } - }); - result[newIndex] = currentType; - }); - return result; -} -function compileMap() { - var result = { - scalar: {}, - sequence: {}, - mapping: {}, - fallback: {}, - multi: { - scalar: [], - sequence: [], - mapping: [], - fallback: [] + break; } - }, index, length; - function collectType(type2) { - if (type2.multi) { - result.multi[type2.kind].push(type2); - result.multi["fallback"].push(type2); + if (folding) { + if (is_WHITE_SPACE(ch)) { + atMoreIndented = true; + state.result += common.repeat("\n", didReadContent ? 1 + emptyLines : emptyLines); + } else if (atMoreIndented) { + atMoreIndented = false; + state.result += common.repeat("\n", emptyLines + 1); + } else if (emptyLines === 0) { + if (didReadContent) { + state.result += " "; + } + } else { + state.result += common.repeat("\n", emptyLines); + } } else { - result[type2.kind][type2.tag] = result["fallback"][type2.tag] = type2; + state.result += common.repeat("\n", didReadContent ? 1 + emptyLines : emptyLines); } + didReadContent = true; + detectedIndent = true; + emptyLines = 0; + captureStart = state.position; + while (!is_EOL(ch) && ch !== 0) { + ch = state.input.charCodeAt(++state.position); + } + captureSegment(state, captureStart, state.position, false); } - for (index = 0, length = arguments.length; index < length; index += 1) { - arguments[index].forEach(collectType); - } - return result; -} -function Schema$1(definition) { - return this.extend(definition); + return true; } -Schema$1.prototype.extend = function extend2(definition) { - var implicit = []; - var explicit = []; - if (definition instanceof type) { - explicit.push(definition); - } else if (Array.isArray(definition)) { - explicit = explicit.concat(definition); - } else if (definition && (Array.isArray(definition.implicit) || Array.isArray(definition.explicit))) { - if (definition.implicit) implicit = implicit.concat(definition.implicit); - if (definition.explicit) explicit = explicit.concat(definition.explicit); - } else { - throw new exception("Schema.extend argument should be a Type, [ Type ], or a schema definition ({ implicit: [...], explicit: [...] })"); +function readBlockSequence(state, nodeIndent) { + var _line, _tag = state.tag, _anchor = state.anchor, _result = [], following, detected = false, ch; + if (state.firstTabInLine !== -1) return false; + if (state.anchor !== null) { + state.anchorMap[state.anchor] = _result; } - implicit.forEach(function(type$1) { - if (!(type$1 instanceof type)) { - throw new exception("Specified list of YAML types (or a single Type object) contains a non-Type object."); + ch = state.input.charCodeAt(state.position); + while (ch !== 0) { + if (state.firstTabInLine !== -1) { + state.position = state.firstTabInLine; + throwError(state, "tab characters must not be used in indentation"); } - if (type$1.loadKind && type$1.loadKind !== "scalar") { - throw new exception("There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported."); + if (ch !== 45) { + break; } - if (type$1.multi) { - throw new exception("There is a multi type in the implicit list of a schema. Multi tags can only be listed as explicit."); + following = state.input.charCodeAt(state.position + 1); + if (!is_WS_OR_EOL(following)) { + break; } - }); - explicit.forEach(function(type$1) { - if (!(type$1 instanceof type)) { - throw new exception("Specified list of YAML types (or a single Type object) contains a non-Type object."); + detected = true; + state.position++; + if (skipSeparationSpace(state, true, -1)) { + if (state.lineIndent <= nodeIndent) { + _result.push(null); + ch = state.input.charCodeAt(state.position); + continue; + } + } + _line = state.line; + composeNode(state, nodeIndent, CONTEXT_BLOCK_IN, false, true); + _result.push(state.result); + skipSeparationSpace(state, true, -1); + ch = state.input.charCodeAt(state.position); + if ((state.line === _line || state.lineIndent > nodeIndent) && ch !== 0) { + throwError(state, "bad indentation of a sequence entry"); + } else if (state.lineIndent < nodeIndent) { + break; } - }); - var result = Object.create(Schema$1.prototype); - result.implicit = (this.implicit || []).concat(implicit); - result.explicit = (this.explicit || []).concat(explicit); - result.compiledImplicit = compileList(result, "implicit"); - result.compiledExplicit = compileList(result, "explicit"); - result.compiledTypeMap = compileMap(result.compiledImplicit, result.compiledExplicit); - return result; -}; -var schema = Schema$1; -var str = new type("tag:yaml.org,2002:str", { - kind: "scalar", - construct: function(data) { - return data !== null ? data : ""; - } -}); -var seq = new type("tag:yaml.org,2002:seq", { - kind: "sequence", - construct: function(data) { - return data !== null ? data : []; } -}); -var map = new type("tag:yaml.org,2002:map", { - kind: "mapping", - construct: function(data) { - return data !== null ? data : {}; + if (detected) { + state.tag = _tag; + state.anchor = _anchor; + state.kind = "sequence"; + state.result = _result; + return true; } -}); -var failsafe = new schema({ - explicit: [ - str, - seq, - map - ] -}); -function resolveYamlNull(data) { - if (data === null) return true; - var max = data.length; - return max === 1 && data === "~" || max === 4 && (data === "null" || data === "Null" || data === "NULL"); -} -function constructYamlNull() { - return null; -} -function isNull(object) { - return object === null; -} -var _null = new type("tag:yaml.org,2002:null", { - kind: "scalar", - resolve: resolveYamlNull, - construct: constructYamlNull, - predicate: isNull, - represent: { - canonical: function() { - return "~"; - }, - lowercase: function() { - return "null"; - }, - uppercase: function() { - return "NULL"; - }, - camelcase: function() { - return "Null"; - }, - empty: function() { - return ""; - } - }, - defaultStyle: "lowercase" -}); -function resolveYamlBoolean(data) { - if (data === null) return false; - var max = data.length; - return max === 4 && (data === "true" || data === "True" || data === "TRUE") || max === 5 && (data === "false" || data === "False" || data === "FALSE"); -} -function constructYamlBoolean(data) { - return data === "true" || data === "True" || data === "TRUE"; -} -function isBoolean(object) { - return Object.prototype.toString.call(object) === "[object Boolean]"; -} -var bool = new type("tag:yaml.org,2002:bool", { - kind: "scalar", - resolve: resolveYamlBoolean, - construct: constructYamlBoolean, - predicate: isBoolean, - represent: { - lowercase: function(object) { - return object ? "true" : "false"; - }, - uppercase: function(object) { - return object ? "TRUE" : "FALSE"; - }, - camelcase: function(object) { - return object ? "True" : "False"; - } - }, - defaultStyle: "lowercase" -}); -function isHexCode(c) { - return 48 <= c && c <= 57 || 65 <= c && c <= 70 || 97 <= c && c <= 102; -} -function isOctCode(c) { - return 48 <= c && c <= 55; -} -function isDecCode(c) { - return 48 <= c && c <= 57; + return false; } -function resolveYamlInteger(data) { - if (data === null) return false; - var max = data.length, index = 0, hasDigits = false, ch; - if (!max) return false; - ch = data[index]; - if (ch === "-" || ch === "+") { - ch = data[++index]; +function readBlockMapping(state, nodeIndent, flowIndent) { + var following, allowCompact, _line, _keyLine, _keyLineStart, _keyPos, _tag = state.tag, _anchor = state.anchor, _result = {}, overridableKeys = /* @__PURE__ */ Object.create(null), keyTag = null, keyNode = null, valueNode = null, atExplicitKey = false, detected = false, ch; + if (state.firstTabInLine !== -1) return false; + if (state.anchor !== null) { + state.anchorMap[state.anchor] = _result; } - if (ch === "0") { - if (index + 1 === max) return true; - ch = data[++index]; - if (ch === "b") { - index++; - for (; index < max; index++) { - ch = data[index]; - if (ch === "_") continue; - if (ch !== "0" && ch !== "1") return false; - hasDigits = true; - } - return hasDigits && ch !== "_"; + ch = state.input.charCodeAt(state.position); + while (ch !== 0) { + if (!atExplicitKey && state.firstTabInLine !== -1) { + state.position = state.firstTabInLine; + throwError(state, "tab characters must not be used in indentation"); } - if (ch === "x") { - index++; - for (; index < max; index++) { - ch = data[index]; - if (ch === "_") continue; - if (!isHexCode(data.charCodeAt(index))) return false; - hasDigits = true; + following = state.input.charCodeAt(state.position + 1); + _line = state.line; + if ((ch === 63 || ch === 58) && is_WS_OR_EOL(following)) { + if (ch === 63) { + if (atExplicitKey) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); + keyTag = keyNode = valueNode = null; + } + detected = true; + atExplicitKey = true; + allowCompact = true; + } else if (atExplicitKey) { + atExplicitKey = false; + allowCompact = true; + } else { + throwError(state, "incomplete explicit mapping pair; a key node is missed; or followed by a non-tabulated empty line"); + } + state.position += 1; + ch = following; + } else { + _keyLine = state.line; + _keyLineStart = state.lineStart; + _keyPos = state.position; + if (!composeNode(state, flowIndent, CONTEXT_FLOW_OUT, false, true)) { + break; + } + if (state.line === _line) { + ch = state.input.charCodeAt(state.position); + while (is_WHITE_SPACE(ch)) { + ch = state.input.charCodeAt(++state.position); + } + if (ch === 58) { + ch = state.input.charCodeAt(++state.position); + if (!is_WS_OR_EOL(ch)) { + throwError(state, "a whitespace character is expected after the key-value separator within a block mapping"); + } + if (atExplicitKey) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); + keyTag = keyNode = valueNode = null; + } + detected = true; + atExplicitKey = false; + allowCompact = false; + keyTag = state.tag; + keyNode = state.result; + } else if (detected) { + throwError(state, "can not read an implicit mapping pair; a colon is missed"); + } else { + state.tag = _tag; + state.anchor = _anchor; + return true; + } + } else if (detected) { + throwError(state, "can not read a block mapping entry; a multiline key may not be an implicit key"); + } else { + state.tag = _tag; + state.anchor = _anchor; + return true; } - return hasDigits && ch !== "_"; } - if (ch === "o") { - index++; - for (; index < max; index++) { - ch = data[index]; - if (ch === "_") continue; - if (!isOctCode(data.charCodeAt(index))) return false; - hasDigits = true; + if (state.line === _line || state.lineIndent > nodeIndent) { + if (atExplicitKey) { + _keyLine = state.line; + _keyLineStart = state.lineStart; + _keyPos = state.position; } - return hasDigits && ch !== "_"; + if (composeNode(state, nodeIndent, CONTEXT_BLOCK_OUT, true, allowCompact)) { + if (atExplicitKey) { + keyNode = state.result; + } else { + valueNode = state.result; + } + } + if (!atExplicitKey) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _keyLine, _keyLineStart, _keyPos); + keyTag = keyNode = valueNode = null; + } + skipSeparationSpace(state, true, -1); + ch = state.input.charCodeAt(state.position); } - } - if (ch === "_") return false; - for (; index < max; index++) { - ch = data[index]; - if (ch === "_") continue; - if (!isDecCode(data.charCodeAt(index))) { - return false; + if ((state.line === _line || state.lineIndent > nodeIndent) && ch !== 0) { + throwError(state, "bad indentation of a mapping entry"); + } else if (state.lineIndent < nodeIndent) { + break; } - hasDigits = true; - } - if (!hasDigits || ch === "_") return false; - return true; -} -function constructYamlInteger(data) { - var value = data, sign = 1, ch; - if (value.indexOf("_") !== -1) { - value = value.replace(/_/g, ""); } - ch = value[0]; - if (ch === "-" || ch === "+") { - if (ch === "-") sign = -1; - value = value.slice(1); - ch = value[0]; + if (atExplicitKey) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); } - if (value === "0") return 0; - if (ch === "0") { - if (value[1] === "b") return sign * parseInt(value.slice(2), 2); - if (value[1] === "x") return sign * parseInt(value.slice(2), 16); - if (value[1] === "o") return sign * parseInt(value.slice(2), 8); + if (detected) { + state.tag = _tag; + state.anchor = _anchor; + state.kind = "mapping"; + state.result = _result; } - return sign * parseInt(value, 10); -} -function isInteger(object) { - return Object.prototype.toString.call(object) === "[object Number]" && (object % 1 === 0 && !common.isNegativeZero(object)); + return detected; } -var int = new type("tag:yaml.org,2002:int", { - kind: "scalar", - resolve: resolveYamlInteger, - construct: constructYamlInteger, - predicate: isInteger, - represent: { - binary: function(obj) { - return obj >= 0 ? "0b" + obj.toString(2) : "-0b" + obj.toString(2).slice(1); - }, - octal: function(obj) { - return obj >= 0 ? "0o" + obj.toString(8) : "-0o" + obj.toString(8).slice(1); - }, - decimal: function(obj) { - return obj.toString(10); - }, - /* eslint-disable max-len */ - hexadecimal: function(obj) { - return obj >= 0 ? "0x" + obj.toString(16).toUpperCase() : "-0x" + obj.toString(16).toUpperCase().slice(1); - } - }, - defaultStyle: "decimal", - styleAliases: { - binary: [2, "bin"], - octal: [8, "oct"], - decimal: [10, "dec"], - hexadecimal: [16, "hex"] - } -}); -var YAML_FLOAT_PATTERN = new RegExp( - // 2.5e4, 2.5 and integers - "^(?:[-+]?(?:[0-9][0-9_]*)(?:\\.[0-9_]*)?(?:[eE][-+]?[0-9]+)?|\\.[0-9_]+(?:[eE][-+]?[0-9]+)?|[-+]?\\.(?:inf|Inf|INF)|\\.(?:nan|NaN|NAN))$" -); -function resolveYamlFloat(data) { - if (data === null) return false; - if (!YAML_FLOAT_PATTERN.test(data) || // Quick hack to not allow integers end with `_` - // Probably should update regexp & check speed - data[data.length - 1] === "_") { - return false; - } - return true; -} -function constructYamlFloat(data) { - var value, sign; - value = data.replace(/_/g, "").toLowerCase(); - sign = value[0] === "-" ? -1 : 1; - if ("+-".indexOf(value[0]) >= 0) { - value = value.slice(1); +function readTagProperty(state) { + var _position, isVerbatim = false, isNamed = false, tagHandle, tagName, ch; + ch = state.input.charCodeAt(state.position); + if (ch !== 33) return false; + if (state.tag !== null) { + throwError(state, "duplication of a tag property"); } - if (value === ".inf") { - return sign === 1 ? Number.POSITIVE_INFINITY : Number.NEGATIVE_INFINITY; - } else if (value === ".nan") { - return NaN; + ch = state.input.charCodeAt(++state.position); + if (ch === 60) { + isVerbatim = true; + ch = state.input.charCodeAt(++state.position); + } else if (ch === 33) { + isNamed = true; + tagHandle = "!!"; + ch = state.input.charCodeAt(++state.position); + } else { + tagHandle = "!"; } - return sign * parseFloat(value, 10); -} -var SCIENTIFIC_WITHOUT_DOT = /^[-+]?[0-9]+e/; -function representYamlFloat(object, style) { - var res; - if (isNaN(object)) { - switch (style) { - case "lowercase": - return ".nan"; - case "uppercase": - return ".NAN"; - case "camelcase": - return ".NaN"; + _position = state.position; + if (isVerbatim) { + do { + ch = state.input.charCodeAt(++state.position); + } while (ch !== 0 && ch !== 62); + if (state.position < state.length) { + tagName = state.input.slice(_position, state.position); + ch = state.input.charCodeAt(++state.position); + } else { + throwError(state, "unexpected end of the stream within a verbatim tag"); } - } else if (Number.POSITIVE_INFINITY === object) { - switch (style) { - case "lowercase": - return ".inf"; - case "uppercase": - return ".INF"; - case "camelcase": - return ".Inf"; + } else { + while (ch !== 0 && !is_WS_OR_EOL(ch)) { + if (ch === 33) { + if (!isNamed) { + tagHandle = state.input.slice(_position - 1, state.position + 1); + if (!PATTERN_TAG_HANDLE.test(tagHandle)) { + throwError(state, "named tag handle cannot contain such characters"); + } + isNamed = true; + _position = state.position + 1; + } else { + throwError(state, "tag suffix cannot contain exclamation marks"); + } + } + ch = state.input.charCodeAt(++state.position); } - } else if (Number.NEGATIVE_INFINITY === object) { - switch (style) { - case "lowercase": - return "-.inf"; - case "uppercase": - return "-.INF"; - case "camelcase": - return "-.Inf"; + tagName = state.input.slice(_position, state.position); + if (PATTERN_FLOW_INDICATORS.test(tagName)) { + throwError(state, "tag suffix cannot contain flow indicator characters"); } - } else if (common.isNegativeZero(object)) { - return "-0.0"; } - res = object.toString(10); - return SCIENTIFIC_WITHOUT_DOT.test(res) ? res.replace("e", ".e") : res; -} -function isFloat(object) { - return Object.prototype.toString.call(object) === "[object Number]" && (object % 1 !== 0 || common.isNegativeZero(object)); -} -var float = new type("tag:yaml.org,2002:float", { - kind: "scalar", - resolve: resolveYamlFloat, - construct: constructYamlFloat, - predicate: isFloat, - represent: representYamlFloat, - defaultStyle: "lowercase" -}); -var json = failsafe.extend({ - implicit: [ - _null, - bool, - int, - float - ] -}); -var core = json; -var YAML_DATE_REGEXP = new RegExp( - "^([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])$" -); -var YAML_TIMESTAMP_REGEXP = new RegExp( - "^([0-9][0-9][0-9][0-9])-([0-9][0-9]?)-([0-9][0-9]?)(?:[Tt]|[ \\t]+)([0-9][0-9]?):([0-9][0-9]):([0-9][0-9])(?:\\.([0-9]*))?(?:[ \\t]*(Z|([-+])([0-9][0-9]?)(?::([0-9][0-9]))?))?$" -); -function resolveYamlTimestamp(data) { - if (data === null) return false; - if (YAML_DATE_REGEXP.exec(data) !== null) return true; - if (YAML_TIMESTAMP_REGEXP.exec(data) !== null) return true; - return false; -} -function constructYamlTimestamp(data) { - var match2, year, month, day, hour, minute, second, fraction = 0, delta = null, tz_hour, tz_minute, date; - match2 = YAML_DATE_REGEXP.exec(data); - if (match2 === null) match2 = YAML_TIMESTAMP_REGEXP.exec(data); - if (match2 === null) throw new Error("Date resolve error"); - year = +match2[1]; - month = +match2[2] - 1; - day = +match2[3]; - if (!match2[4]) { - return new Date(Date.UTC(year, month, day)); + if (tagName && !PATTERN_TAG_URI.test(tagName)) { + throwError(state, "tag name cannot contain such characters: " + tagName); } - hour = +match2[4]; - minute = +match2[5]; - second = +match2[6]; - if (match2[7]) { - fraction = match2[7].slice(0, 3); - while (fraction.length < 3) { - fraction += "0"; - } - fraction = +fraction; + try { + tagName = decodeURIComponent(tagName); + } catch (err) { + throwError(state, "tag name is malformed: " + tagName); } - if (match2[9]) { - tz_hour = +match2[10]; - tz_minute = +(match2[11] || 0); - delta = (tz_hour * 60 + tz_minute) * 6e4; - if (match2[9] === "-") delta = -delta; + if (isVerbatim) { + state.tag = tagName; + } else if (_hasOwnProperty$1.call(state.tagMap, tagHandle)) { + state.tag = state.tagMap[tagHandle] + tagName; + } else if (tagHandle === "!") { + state.tag = "!" + tagName; + } else if (tagHandle === "!!") { + state.tag = "tag:yaml.org,2002:" + tagName; + } else { + throwError(state, 'undeclared tag handle "' + tagHandle + '"'); } - date = new Date(Date.UTC(year, month, day, hour, minute, second, fraction)); - if (delta) date.setTime(date.getTime() - delta); - return date; -} -function representYamlTimestamp(object) { - return object.toISOString(); -} -var timestamp = new type("tag:yaml.org,2002:timestamp", { - kind: "scalar", - resolve: resolveYamlTimestamp, - construct: constructYamlTimestamp, - instanceOf: Date, - represent: representYamlTimestamp -}); -function resolveYamlMerge(data) { - return data === "<<" || data === null; + return true; } -var merge = new type("tag:yaml.org,2002:merge", { - kind: "scalar", - resolve: resolveYamlMerge -}); -var BASE64_MAP = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\n\r"; -function resolveYamlBinary(data) { - if (data === null) return false; - var code, idx, bitlen = 0, max = data.length, map2 = BASE64_MAP; - for (idx = 0; idx < max; idx++) { - code = map2.indexOf(data.charAt(idx)); - if (code > 64) continue; - if (code < 0) return false; - bitlen += 6; +function readAnchorProperty(state) { + var _position, ch; + ch = state.input.charCodeAt(state.position); + if (ch !== 38) return false; + if (state.anchor !== null) { + throwError(state, "duplication of an anchor property"); } - return bitlen % 8 === 0; -} -function constructYamlBinary(data) { - var idx, tailbits, input = data.replace(/[\r\n=]/g, ""), max = input.length, map2 = BASE64_MAP, bits = 0, result = []; - for (idx = 0; idx < max; idx++) { - if (idx % 4 === 0 && idx) { - result.push(bits >> 16 & 255); - result.push(bits >> 8 & 255); - result.push(bits & 255); - } - bits = bits << 6 | map2.indexOf(input.charAt(idx)); + ch = state.input.charCodeAt(++state.position); + _position = state.position; + while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) { + ch = state.input.charCodeAt(++state.position); } - tailbits = max % 4 * 6; - if (tailbits === 0) { - result.push(bits >> 16 & 255); - result.push(bits >> 8 & 255); - result.push(bits & 255); - } else if (tailbits === 18) { - result.push(bits >> 10 & 255); - result.push(bits >> 2 & 255); - } else if (tailbits === 12) { - result.push(bits >> 4 & 255); + if (state.position === _position) { + throwError(state, "name of an anchor node must contain at least one character"); } - return new Uint8Array(result); + state.anchor = state.input.slice(_position, state.position); + return true; } -function representYamlBinary(object) { - var result = "", bits = 0, idx, tail, max = object.length, map2 = BASE64_MAP; - for (idx = 0; idx < max; idx++) { - if (idx % 3 === 0 && idx) { - result += map2[bits >> 18 & 63]; - result += map2[bits >> 12 & 63]; - result += map2[bits >> 6 & 63]; - result += map2[bits & 63]; - } - bits = (bits << 8) + object[idx]; - } - tail = max % 3; - if (tail === 0) { - result += map2[bits >> 18 & 63]; - result += map2[bits >> 12 & 63]; - result += map2[bits >> 6 & 63]; - result += map2[bits & 63]; - } else if (tail === 2) { - result += map2[bits >> 10 & 63]; - result += map2[bits >> 4 & 63]; - result += map2[bits << 2 & 63]; - result += map2[64]; - } else if (tail === 1) { - result += map2[bits >> 2 & 63]; - result += map2[bits << 4 & 63]; - result += map2[64]; - result += map2[64]; +function readAlias(state) { + var _position, alias, ch; + ch = state.input.charCodeAt(state.position); + if (ch !== 42) return false; + ch = state.input.charCodeAt(++state.position); + _position = state.position; + while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) { + ch = state.input.charCodeAt(++state.position); } - return result; -} -function isBinary(obj) { - return Object.prototype.toString.call(obj) === "[object Uint8Array]"; -} -var binary = new type("tag:yaml.org,2002:binary", { - kind: "scalar", - resolve: resolveYamlBinary, - construct: constructYamlBinary, - predicate: isBinary, - represent: representYamlBinary -}); -var _hasOwnProperty$3 = Object.prototype.hasOwnProperty; -var _toString$2 = Object.prototype.toString; -function resolveYamlOmap(data) { - if (data === null) return true; - var objectKeys = [], index, length, pair, pairKey, pairHasKey, object = data; - for (index = 0, length = object.length; index < length; index += 1) { - pair = object[index]; - pairHasKey = false; - if (_toString$2.call(pair) !== "[object Object]") return false; - for (pairKey in pair) { - if (_hasOwnProperty$3.call(pair, pairKey)) { - if (!pairHasKey) pairHasKey = true; - else return false; - } - } - if (!pairHasKey) return false; - if (objectKeys.indexOf(pairKey) === -1) objectKeys.push(pairKey); - else return false; + if (state.position === _position) { + throwError(state, "name of an alias node must contain at least one character"); } - return true; -} -function constructYamlOmap(data) { - return data !== null ? data : []; -} -var omap = new type("tag:yaml.org,2002:omap", { - kind: "sequence", - resolve: resolveYamlOmap, - construct: constructYamlOmap -}); -var _toString$1 = Object.prototype.toString; -function resolveYamlPairs(data) { - if (data === null) return true; - var index, length, pair, keys, result, object = data; - result = new Array(object.length); - for (index = 0, length = object.length; index < length; index += 1) { - pair = object[index]; - if (_toString$1.call(pair) !== "[object Object]") return false; - keys = Object.keys(pair); - if (keys.length !== 1) return false; - result[index] = [keys[0], pair[keys[0]]]; + alias = state.input.slice(_position, state.position); + if (!_hasOwnProperty$1.call(state.anchorMap, alias)) { + throwError(state, 'unidentified alias "' + alias + '"'); } + state.result = state.anchorMap[alias]; + skipSeparationSpace(state, true, -1); return true; } -function constructYamlPairs(data) { - if (data === null) return []; - var index, length, pair, keys, result, object = data; - result = new Array(object.length); - for (index = 0, length = object.length; index < length; index += 1) { - pair = object[index]; - keys = Object.keys(pair); - result[index] = [keys[0], pair[keys[0]]]; +function composeNode(state, parentIndent, nodeContext, allowToSeek, allowCompact) { + var allowBlockStyles, allowBlockScalars, allowBlockCollections, indentStatus = 1, atNewLine = false, hasContent = false, typeIndex, typeQuantity, typeList, type2, flowIndent, blockIndent; + if (state.listener !== null) { + state.listener("open", state); } - return result; -} -var pairs = new type("tag:yaml.org,2002:pairs", { - kind: "sequence", - resolve: resolveYamlPairs, - construct: constructYamlPairs -}); -var _hasOwnProperty$2 = Object.prototype.hasOwnProperty; -function resolveYamlSet(data) { - if (data === null) return true; - var key, object = data; - for (key in object) { - if (_hasOwnProperty$2.call(object, key)) { - if (object[key] !== null) return false; + state.tag = null; + state.anchor = null; + state.kind = null; + state.result = null; + allowBlockStyles = allowBlockScalars = allowBlockCollections = CONTEXT_BLOCK_OUT === nodeContext || CONTEXT_BLOCK_IN === nodeContext; + if (allowToSeek) { + if (skipSeparationSpace(state, true, -1)) { + atNewLine = true; + if (state.lineIndent > parentIndent) { + indentStatus = 1; + } else if (state.lineIndent === parentIndent) { + indentStatus = 0; + } else if (state.lineIndent < parentIndent) { + indentStatus = -1; + } } } - return true; -} -function constructYamlSet(data) { - return data !== null ? data : {}; -} -var set = new type("tag:yaml.org,2002:set", { - kind: "mapping", - resolve: resolveYamlSet, - construct: constructYamlSet -}); -var _default = core.extend({ - implicit: [ - timestamp, - merge - ], - explicit: [ - binary, - omap, - pairs, - set - ] -}); -var _hasOwnProperty$1 = Object.prototype.hasOwnProperty; -var CONTEXT_FLOW_IN = 1; -var CONTEXT_FLOW_OUT = 2; -var CONTEXT_BLOCK_IN = 3; -var CONTEXT_BLOCK_OUT = 4; -var CHOMPING_CLIP = 1; -var CHOMPING_STRIP = 2; -var CHOMPING_KEEP = 3; -var PATTERN_NON_PRINTABLE = /[\x00-\x08\x0B\x0C\x0E-\x1F\x7F-\x84\x86-\x9F\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/; -var PATTERN_NON_ASCII_LINE_BREAKS = /[\x85\u2028\u2029]/; -var PATTERN_FLOW_INDICATORS = /[,\[\]\{\}]/; -var PATTERN_TAG_HANDLE = /^(?:!|!!|![a-z\-]+!)$/i; -var PATTERN_TAG_URI = /^(?:!|[^,\[\]\{\}])(?:%[0-9a-f]{2}|[0-9a-z\-#;\/\?:@&=\+\$,_\.!~\*'\(\)\[\]])*$/i; -function _class(obj) { - return Object.prototype.toString.call(obj); -} -function is_EOL(c) { - return c === 10 || c === 13; -} -function is_WHITE_SPACE(c) { - return c === 9 || c === 32; -} -function is_WS_OR_EOL(c) { - return c === 9 || c === 32 || c === 10 || c === 13; -} -function is_FLOW_INDICATOR(c) { - return c === 44 || c === 91 || c === 93 || c === 123 || c === 125; -} -function fromHexCode(c) { - var lc; - if (48 <= c && c <= 57) { - return c - 48; - } - lc = c | 32; - if (97 <= lc && lc <= 102) { - return lc - 97 + 10; - } - return -1; -} -function escapedHexLen(c) { - if (c === 120) { - return 2; - } - if (c === 117) { - return 4; + if (indentStatus === 1) { + while (readTagProperty(state) || readAnchorProperty(state)) { + if (skipSeparationSpace(state, true, -1)) { + atNewLine = true; + allowBlockCollections = allowBlockStyles; + if (state.lineIndent > parentIndent) { + indentStatus = 1; + } else if (state.lineIndent === parentIndent) { + indentStatus = 0; + } else if (state.lineIndent < parentIndent) { + indentStatus = -1; + } + } else { + allowBlockCollections = false; + } + } } - if (c === 85) { - return 8; + if (allowBlockCollections) { + allowBlockCollections = atNewLine || allowCompact; } - return 0; -} -function fromDecimalCode(c) { - if (48 <= c && c <= 57) { - return c - 48; + if (indentStatus === 1 || CONTEXT_BLOCK_OUT === nodeContext) { + if (CONTEXT_FLOW_IN === nodeContext || CONTEXT_FLOW_OUT === nodeContext) { + flowIndent = parentIndent; + } else { + flowIndent = parentIndent + 1; + } + blockIndent = state.position - state.lineStart; + if (indentStatus === 1) { + if (allowBlockCollections && (readBlockSequence(state, blockIndent) || readBlockMapping(state, blockIndent, flowIndent)) || readFlowCollection(state, flowIndent)) { + hasContent = true; + } else { + if (allowBlockScalars && readBlockScalar(state, flowIndent) || readSingleQuotedScalar(state, flowIndent) || readDoubleQuotedScalar(state, flowIndent)) { + hasContent = true; + } else if (readAlias(state)) { + hasContent = true; + if (state.tag !== null || state.anchor !== null) { + throwError(state, "alias node should not have any properties"); + } + } else if (readPlainScalar(state, flowIndent, CONTEXT_FLOW_IN === nodeContext)) { + hasContent = true; + if (state.tag === null) { + state.tag = "?"; + } + } + if (state.anchor !== null) { + state.anchorMap[state.anchor] = state.result; + } + } + } else if (indentStatus === 0) { + hasContent = allowBlockCollections && readBlockSequence(state, blockIndent); + } } - return -1; -} -function simpleEscapeSequence(c) { - return c === 48 ? "\0" : c === 97 ? "\x07" : c === 98 ? "\b" : c === 116 ? " " : c === 9 ? " " : c === 110 ? "\n" : c === 118 ? "\v" : c === 102 ? "\f" : c === 114 ? "\r" : c === 101 ? "\x1B" : c === 32 ? " " : c === 34 ? '"' : c === 47 ? "/" : c === 92 ? "\\" : c === 78 ? "\x85" : c === 95 ? "\xA0" : c === 76 ? "\u2028" : c === 80 ? "\u2029" : ""; -} -function charFromCodepoint(c) { - if (c <= 65535) { - return String.fromCharCode(c); - } - return String.fromCharCode( - (c - 65536 >> 10) + 55296, - (c - 65536 & 1023) + 56320 - ); -} -function setProperty(object, key, value) { - if (key === "__proto__") { - Object.defineProperty(object, key, { - configurable: true, - enumerable: true, - writable: true, - value - }); - } else { - object[key] = value; - } -} -var simpleEscapeCheck = new Array(256); -var simpleEscapeMap = new Array(256); -for (i = 0; i < 256; i++) { - simpleEscapeCheck[i] = simpleEscapeSequence(i) ? 1 : 0; - simpleEscapeMap[i] = simpleEscapeSequence(i); -} -var i; -function State$1(input, options) { - this.input = input; - this.filename = options["filename"] || null; - this.schema = options["schema"] || _default; - this.onWarning = options["onWarning"] || null; - this.legacy = options["legacy"] || false; - this.json = options["json"] || false; - this.listener = options["listener"] || null; - this.implicitTypes = this.schema.compiledImplicit; - this.typeMap = this.schema.compiledTypeMap; - this.length = input.length; - this.position = 0; - this.line = 0; - this.lineStart = 0; - this.lineIndent = 0; - this.firstTabInLine = -1; - this.documents = []; -} -function generateError(state, message) { - var mark = { - name: state.filename, - buffer: state.input.slice(0, -1), - // omit trailing \0 - position: state.position, - line: state.line, - column: state.position - state.lineStart - }; - mark.snippet = snippet(mark); - return new exception(message, mark); -} -function throwError(state, message) { - throw generateError(state, message); -} -function throwWarning(state, message) { - if (state.onWarning) { - state.onWarning.call(null, generateError(state, message)); - } -} -var directiveHandlers = { - YAML: function handleYamlDirective(state, name, args) { - var match2, major, minor; - if (state.version !== null) { - throwError(state, "duplication of %YAML directive"); + if (state.tag === null) { + if (state.anchor !== null) { + state.anchorMap[state.anchor] = state.result; } - if (args.length !== 1) { - throwError(state, "YAML directive accepts exactly one argument"); + } else if (state.tag === "?") { + if (state.result !== null && state.kind !== "scalar") { + throwError(state, 'unacceptable node kind for ! tag; it should be "scalar", not "' + state.kind + '"'); } - match2 = /^([0-9]+)\.([0-9]+)$/.exec(args[0]); - if (match2 === null) { - throwError(state, "ill-formed argument of the YAML directive"); + for (typeIndex = 0, typeQuantity = state.implicitTypes.length; typeIndex < typeQuantity; typeIndex += 1) { + type2 = state.implicitTypes[typeIndex]; + if (type2.resolve(state.result)) { + state.result = type2.construct(state.result); + state.tag = type2.tag; + if (state.anchor !== null) { + state.anchorMap[state.anchor] = state.result; + } + break; + } } - major = parseInt(match2[1], 10); - minor = parseInt(match2[2], 10); - if (major !== 1) { - throwError(state, "unacceptable YAML version of the document"); + } else if (state.tag !== "!") { + if (_hasOwnProperty$1.call(state.typeMap[state.kind || "fallback"], state.tag)) { + type2 = state.typeMap[state.kind || "fallback"][state.tag]; + } else { + type2 = null; + typeList = state.typeMap.multi[state.kind || "fallback"]; + for (typeIndex = 0, typeQuantity = typeList.length; typeIndex < typeQuantity; typeIndex += 1) { + if (state.tag.slice(0, typeList[typeIndex].tag.length) === typeList[typeIndex].tag) { + type2 = typeList[typeIndex]; + break; + } + } } - state.version = args[0]; - state.checkLineBreaks = minor < 2; - if (minor !== 1 && minor !== 2) { - throwWarning(state, "unsupported YAML version of the document"); + if (!type2) { + throwError(state, "unknown tag !<" + state.tag + ">"); } - }, - TAG: function handleTagDirective(state, name, args) { - var handle, prefix; - if (args.length !== 2) { - throwError(state, "TAG directive accepts exactly two arguments"); + if (state.result !== null && type2.kind !== state.kind) { + throwError(state, "unacceptable node kind for !<" + state.tag + '> tag; it should be "' + type2.kind + '", not "' + state.kind + '"'); } - handle = args[0]; - prefix = args[1]; - if (!PATTERN_TAG_HANDLE.test(handle)) { - throwError(state, "ill-formed tag handle (first argument) of the TAG directive"); + if (!type2.resolve(state.result, state.tag)) { + throwError(state, "cannot resolve a node with !<" + state.tag + "> explicit tag"); + } else { + state.result = type2.construct(state.result, state.tag); + if (state.anchor !== null) { + state.anchorMap[state.anchor] = state.result; + } } - if (_hasOwnProperty$1.call(state.tagMap, handle)) { - throwError(state, 'there is a previously declared suffix for "' + handle + '" tag handle'); + } + if (state.listener !== null) { + state.listener("close", state); + } + return state.tag !== null || state.anchor !== null || hasContent; +} +function readDocument(state) { + var documentStart = state.position, _position, directiveName, directiveArgs, hasDirectives = false, ch; + state.version = null; + state.checkLineBreaks = state.legacy; + state.tagMap = /* @__PURE__ */ Object.create(null); + state.anchorMap = /* @__PURE__ */ Object.create(null); + while ((ch = state.input.charCodeAt(state.position)) !== 0) { + skipSeparationSpace(state, true, -1); + ch = state.input.charCodeAt(state.position); + if (state.lineIndent > 0 || ch !== 37) { + break; } - if (!PATTERN_TAG_URI.test(prefix)) { - throwError(state, "ill-formed tag prefix (second argument) of the TAG directive"); + hasDirectives = true; + ch = state.input.charCodeAt(++state.position); + _position = state.position; + while (ch !== 0 && !is_WS_OR_EOL(ch)) { + ch = state.input.charCodeAt(++state.position); } - try { - prefix = decodeURIComponent(prefix); - } catch (err) { - throwError(state, "tag prefix is malformed: " + prefix); + directiveName = state.input.slice(_position, state.position); + directiveArgs = []; + if (directiveName.length < 1) { + throwError(state, "directive name must not be less than one character in length"); } - state.tagMap[handle] = prefix; - } -}; -function captureSegment(state, start, end, checkJson) { - var _position, _length, _character, _result; - if (start < end) { - _result = state.input.slice(start, end); - if (checkJson) { - for (_position = 0, _length = _result.length; _position < _length; _position += 1) { - _character = _result.charCodeAt(_position); - if (!(_character === 9 || 32 <= _character && _character <= 1114111)) { - throwError(state, "expected valid JSON character"); - } + while (ch !== 0) { + while (is_WHITE_SPACE(ch)) { + ch = state.input.charCodeAt(++state.position); } - } else if (PATTERN_NON_PRINTABLE.test(_result)) { - throwError(state, "the stream contains non-printable characters"); + if (ch === 35) { + do { + ch = state.input.charCodeAt(++state.position); + } while (ch !== 0 && !is_EOL(ch)); + break; + } + if (is_EOL(ch)) break; + _position = state.position; + while (ch !== 0 && !is_WS_OR_EOL(ch)) { + ch = state.input.charCodeAt(++state.position); + } + directiveArgs.push(state.input.slice(_position, state.position)); + } + if (ch !== 0) readLineBreak(state); + if (_hasOwnProperty$1.call(directiveHandlers, directiveName)) { + directiveHandlers[directiveName](state, directiveName, directiveArgs); + } else { + throwWarning(state, 'unknown document directive "' + directiveName + '"'); } - state.result += _result; } -} -function mergeMappings(state, destination, source, overridableKeys) { - var sourceKeys, key, index, quantity; - if (!common.isObject(source)) { - throwError(state, "cannot merge mappings; the provided source object is unacceptable"); + skipSeparationSpace(state, true, -1); + if (state.lineIndent === 0 && state.input.charCodeAt(state.position) === 45 && state.input.charCodeAt(state.position + 1) === 45 && state.input.charCodeAt(state.position + 2) === 45) { + state.position += 3; + skipSeparationSpace(state, true, -1); + } else if (hasDirectives) { + throwError(state, "directives end mark is expected"); } - sourceKeys = Object.keys(source); - for (index = 0, quantity = sourceKeys.length; index < quantity; index += 1) { - key = sourceKeys[index]; - if (!_hasOwnProperty$1.call(destination, key)) { - setProperty(destination, key, source[key]); - overridableKeys[key] = true; - } + composeNode(state, state.lineIndent - 1, CONTEXT_BLOCK_OUT, false, true); + skipSeparationSpace(state, true, -1); + if (state.checkLineBreaks && PATTERN_NON_ASCII_LINE_BREAKS.test(state.input.slice(documentStart, state.position))) { + throwWarning(state, "non-ASCII line breaks are interpreted as content"); } -} -function storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, startLine, startLineStart, startPos) { - var index, quantity; - if (Array.isArray(keyNode)) { - keyNode = Array.prototype.slice.call(keyNode); - for (index = 0, quantity = keyNode.length; index < quantity; index += 1) { - if (Array.isArray(keyNode[index])) { - throwError(state, "nested arrays are not supported inside keys"); - } - if (typeof keyNode === "object" && _class(keyNode[index]) === "[object Object]") { - keyNode[index] = "[object Object]"; - } + state.documents.push(state.result); + if (state.position === state.lineStart && testDocumentSeparator(state)) { + if (state.input.charCodeAt(state.position) === 46) { + state.position += 3; + skipSeparationSpace(state, true, -1); } + return; } - if (typeof keyNode === "object" && _class(keyNode) === "[object Object]") { - keyNode = "[object Object]"; - } - keyNode = String(keyNode); - if (_result === null) { - _result = {}; + if (state.position < state.length - 1) { + throwError(state, "end of the stream or a document separator is expected"); + } else { + return; } - if (keyTag === "tag:yaml.org,2002:merge") { - if (Array.isArray(valueNode)) { - for (index = 0, quantity = valueNode.length; index < quantity; index += 1) { - mergeMappings(state, _result, valueNode[index], overridableKeys); - } - } else { - mergeMappings(state, _result, valueNode, overridableKeys); +} +function loadDocuments(input, options) { + input = String(input); + options = options || {}; + if (input.length !== 0) { + if (input.charCodeAt(input.length - 1) !== 10 && input.charCodeAt(input.length - 1) !== 13) { + input += "\n"; } - } else { - if (!state.json && !_hasOwnProperty$1.call(overridableKeys, keyNode) && _hasOwnProperty$1.call(_result, keyNode)) { - state.line = startLine || state.line; - state.lineStart = startLineStart || state.lineStart; - state.position = startPos || state.position; - throwError(state, "duplicated mapping key"); + if (input.charCodeAt(0) === 65279) { + input = input.slice(1); } - setProperty(_result, keyNode, valueNode); - delete overridableKeys[keyNode]; } - return _result; + var state = new State$1(input, options); + var nullpos = input.indexOf("\0"); + if (nullpos !== -1) { + state.position = nullpos; + throwError(state, "null byte is not allowed in input"); + } + state.input += "\0"; + while (state.input.charCodeAt(state.position) === 32) { + state.lineIndent += 1; + state.position += 1; + } + while (state.position < state.length - 1) { + readDocument(state); + } + return state.documents; } -function readLineBreak(state) { - var ch; - ch = state.input.charCodeAt(state.position); - if (ch === 10) { - state.position++; - } else if (ch === 13) { - state.position++; - if (state.input.charCodeAt(state.position) === 10) { - state.position++; - } - } else { - throwError(state, "a line break is expected"); +function loadAll$1(input, iterator, options) { + if (iterator !== null && typeof iterator === "object" && typeof options === "undefined") { + options = iterator; + iterator = null; + } + var documents = loadDocuments(input, options); + if (typeof iterator !== "function") { + return documents; + } + for (var index = 0, length = documents.length; index < length; index += 1) { + iterator(documents[index]); } - state.line += 1; - state.lineStart = state.position; - state.firstTabInLine = -1; } -function skipSeparationSpace(state, allowComments, checkIndent) { - var lineBreaks = 0, ch = state.input.charCodeAt(state.position); - while (ch !== 0) { - while (is_WHITE_SPACE(ch)) { - if (ch === 9 && state.firstTabInLine === -1) { - state.firstTabInLine = state.position; - } - ch = state.input.charCodeAt(++state.position); +function load$1(input, options) { + var documents = loadDocuments(input, options); + if (documents.length === 0) { + return void 0; + } else if (documents.length === 1) { + return documents[0]; + } + throw new exception("expected a single document in the stream, but found more"); +} +var loadAll_1 = loadAll$1; +var load_1 = load$1; +var loader = { + loadAll: loadAll_1, + load: load_1 +}; +var _toString = Object.prototype.toString; +var _hasOwnProperty = Object.prototype.hasOwnProperty; +var CHAR_BOM = 65279; +var CHAR_TAB = 9; +var CHAR_LINE_FEED = 10; +var CHAR_CARRIAGE_RETURN = 13; +var CHAR_SPACE = 32; +var CHAR_EXCLAMATION = 33; +var CHAR_DOUBLE_QUOTE = 34; +var CHAR_SHARP = 35; +var CHAR_PERCENT = 37; +var CHAR_AMPERSAND = 38; +var CHAR_SINGLE_QUOTE = 39; +var CHAR_ASTERISK = 42; +var CHAR_COMMA = 44; +var CHAR_MINUS = 45; +var CHAR_COLON = 58; +var CHAR_EQUALS = 61; +var CHAR_GREATER_THAN = 62; +var CHAR_QUESTION = 63; +var CHAR_COMMERCIAL_AT = 64; +var CHAR_LEFT_SQUARE_BRACKET = 91; +var CHAR_RIGHT_SQUARE_BRACKET = 93; +var CHAR_GRAVE_ACCENT = 96; +var CHAR_LEFT_CURLY_BRACKET = 123; +var CHAR_VERTICAL_LINE = 124; +var CHAR_RIGHT_CURLY_BRACKET = 125; +var ESCAPE_SEQUENCES = {}; +ESCAPE_SEQUENCES[0] = "\\0"; +ESCAPE_SEQUENCES[7] = "\\a"; +ESCAPE_SEQUENCES[8] = "\\b"; +ESCAPE_SEQUENCES[9] = "\\t"; +ESCAPE_SEQUENCES[10] = "\\n"; +ESCAPE_SEQUENCES[11] = "\\v"; +ESCAPE_SEQUENCES[12] = "\\f"; +ESCAPE_SEQUENCES[13] = "\\r"; +ESCAPE_SEQUENCES[27] = "\\e"; +ESCAPE_SEQUENCES[34] = '\\"'; +ESCAPE_SEQUENCES[92] = "\\\\"; +ESCAPE_SEQUENCES[133] = "\\N"; +ESCAPE_SEQUENCES[160] = "\\_"; +ESCAPE_SEQUENCES[8232] = "\\L"; +ESCAPE_SEQUENCES[8233] = "\\P"; +var DEPRECATED_BOOLEANS_SYNTAX = [ + "y", + "Y", + "yes", + "Yes", + "YES", + "on", + "On", + "ON", + "n", + "N", + "no", + "No", + "NO", + "off", + "Off", + "OFF" +]; +var DEPRECATED_BASE60_SYNTAX = /^[-+]?[0-9_]+(?::[0-9_]+)+(?:\.[0-9_]*)?$/; +function compileStyleMap(schema2, map2) { + var result, keys, index, length, tag, style, type2; + if (map2 === null) return {}; + result = {}; + keys = Object.keys(map2); + for (index = 0, length = keys.length; index < length; index += 1) { + tag = keys[index]; + style = String(map2[tag]); + if (tag.slice(0, 2) === "!!") { + tag = "tag:yaml.org,2002:" + tag.slice(2); } - if (allowComments && ch === 35) { - do { - ch = state.input.charCodeAt(++state.position); - } while (ch !== 10 && ch !== 13 && ch !== 0); + type2 = schema2.compiledTypeMap["fallback"][tag]; + if (type2 && _hasOwnProperty.call(type2.styleAliases, style)) { + style = type2.styleAliases[style]; } - if (is_EOL(ch)) { - readLineBreak(state); - ch = state.input.charCodeAt(state.position); - lineBreaks++; - state.lineIndent = 0; - while (ch === 32) { - state.lineIndent++; - ch = state.input.charCodeAt(++state.position); - } + result[tag] = style; + } + return result; +} +function encodeHex(character) { + var string, handle, length; + string = character.toString(16).toUpperCase(); + if (character <= 255) { + handle = "x"; + length = 2; + } else if (character <= 65535) { + handle = "u"; + length = 4; + } else if (character <= 4294967295) { + handle = "U"; + length = 8; + } else { + throw new exception("code point within a string may not be greater than 0xFFFFFFFF"); + } + return "\\" + handle + common.repeat("0", length - string.length) + string; +} +var QUOTING_TYPE_SINGLE = 1; +var QUOTING_TYPE_DOUBLE = 2; +function State(options) { + this.schema = options["schema"] || _default; + this.indent = Math.max(1, options["indent"] || 2); + this.noArrayIndent = options["noArrayIndent"] || false; + this.skipInvalid = options["skipInvalid"] || false; + this.flowLevel = common.isNothing(options["flowLevel"]) ? -1 : options["flowLevel"]; + this.styleMap = compileStyleMap(this.schema, options["styles"] || null); + this.sortKeys = options["sortKeys"] || false; + this.lineWidth = options["lineWidth"] || 80; + this.noRefs = options["noRefs"] || false; + this.noCompatMode = options["noCompatMode"] || false; + this.condenseFlow = options["condenseFlow"] || false; + this.quotingType = options["quotingType"] === '"' ? QUOTING_TYPE_DOUBLE : QUOTING_TYPE_SINGLE; + this.forceQuotes = options["forceQuotes"] || false; + this.replacer = typeof options["replacer"] === "function" ? options["replacer"] : null; + this.implicitTypes = this.schema.compiledImplicit; + this.explicitTypes = this.schema.compiledExplicit; + this.tag = null; + this.result = ""; + this.duplicates = []; + this.usedDuplicates = null; +} +function indentString(string, spaces) { + var ind = common.repeat(" ", spaces), position = 0, next = -1, result = "", line, length = string.length; + while (position < length) { + next = string.indexOf("\n", position); + if (next === -1) { + line = string.slice(position); + position = length; } else { - break; + line = string.slice(position, next + 1); + position = next + 1; } + if (line.length && line !== "\n") result += ind; + result += line; } - if (checkIndent !== -1 && lineBreaks !== 0 && state.lineIndent < checkIndent) { - throwWarning(state, "deficient indentation"); - } - return lineBreaks; + return result; } -function testDocumentSeparator(state) { - var _position = state.position, ch; - ch = state.input.charCodeAt(_position); - if ((ch === 45 || ch === 46) && ch === state.input.charCodeAt(_position + 1) && ch === state.input.charCodeAt(_position + 2)) { - _position += 3; - ch = state.input.charCodeAt(_position); - if (ch === 0 || is_WS_OR_EOL(ch)) { +function generateNextLine(state, level) { + return "\n" + common.repeat(" ", state.indent * level); +} +function testImplicitResolving(state, str2) { + var index, length, type2; + for (index = 0, length = state.implicitTypes.length; index < length; index += 1) { + type2 = state.implicitTypes[index]; + if (type2.resolve(str2)) { return true; } } return false; } -function writeFoldedLines(state, count) { - if (count === 1) { - state.result += " "; - } else if (count > 1) { - state.result += common.repeat("\n", count - 1); - } +function isWhitespace(c) { + return c === CHAR_SPACE || c === CHAR_TAB; } -function readPlainScalar(state, nodeIndent, withinFlowCollection) { - var preceding, following, captureStart, captureEnd, hasPendingContent, _line, _lineStart, _lineIndent, _kind = state.kind, _result = state.result, ch; - ch = state.input.charCodeAt(state.position); - if (is_WS_OR_EOL(ch) || is_FLOW_INDICATOR(ch) || ch === 35 || ch === 38 || ch === 42 || ch === 33 || ch === 124 || ch === 62 || ch === 39 || ch === 34 || ch === 37 || ch === 64 || ch === 96) { - return false; - } - if (ch === 63 || ch === 45) { - following = state.input.charCodeAt(state.position + 1); - if (is_WS_OR_EOL(following) || withinFlowCollection && is_FLOW_INDICATOR(following)) { - return false; +function isPrintable(c) { + return 32 <= c && c <= 126 || 161 <= c && c <= 55295 && c !== 8232 && c !== 8233 || 57344 <= c && c <= 65533 && c !== CHAR_BOM || 65536 <= c && c <= 1114111; +} +function isNsCharOrWhitespace(c) { + return isPrintable(c) && c !== CHAR_BOM && c !== CHAR_CARRIAGE_RETURN && c !== CHAR_LINE_FEED; +} +function isPlainSafe(c, prev, inblock) { + var cIsNsCharOrWhitespace = isNsCharOrWhitespace(c); + var cIsNsChar = cIsNsCharOrWhitespace && !isWhitespace(c); + return ( + // ns-plain-safe + (inblock ? ( + // c = flow-in + cIsNsCharOrWhitespace + ) : cIsNsCharOrWhitespace && c !== CHAR_COMMA && c !== CHAR_LEFT_SQUARE_BRACKET && c !== CHAR_RIGHT_SQUARE_BRACKET && c !== CHAR_LEFT_CURLY_BRACKET && c !== CHAR_RIGHT_CURLY_BRACKET) && c !== CHAR_SHARP && !(prev === CHAR_COLON && !cIsNsChar) || isNsCharOrWhitespace(prev) && !isWhitespace(prev) && c === CHAR_SHARP || prev === CHAR_COLON && cIsNsChar + ); +} +function isPlainSafeFirst(c) { + return isPrintable(c) && c !== CHAR_BOM && !isWhitespace(c) && c !== CHAR_MINUS && c !== CHAR_QUESTION && c !== CHAR_COLON && c !== CHAR_COMMA && c !== CHAR_LEFT_SQUARE_BRACKET && c !== CHAR_RIGHT_SQUARE_BRACKET && c !== CHAR_LEFT_CURLY_BRACKET && c !== CHAR_RIGHT_CURLY_BRACKET && c !== CHAR_SHARP && c !== CHAR_AMPERSAND && c !== CHAR_ASTERISK && c !== CHAR_EXCLAMATION && c !== CHAR_VERTICAL_LINE && c !== CHAR_EQUALS && c !== CHAR_GREATER_THAN && c !== CHAR_SINGLE_QUOTE && c !== CHAR_DOUBLE_QUOTE && c !== CHAR_PERCENT && c !== CHAR_COMMERCIAL_AT && c !== CHAR_GRAVE_ACCENT; +} +function isPlainSafeLast(c) { + return !isWhitespace(c) && c !== CHAR_COLON; +} +function codePointAt(string, pos) { + var first = string.charCodeAt(pos), second; + if (first >= 55296 && first <= 56319 && pos + 1 < string.length) { + second = string.charCodeAt(pos + 1); + if (second >= 56320 && second <= 57343) { + return (first - 55296) * 1024 + second - 56320 + 65536; } } - state.kind = "scalar"; - state.result = ""; - captureStart = captureEnd = state.position; - hasPendingContent = false; - while (ch !== 0) { - if (ch === 58) { - following = state.input.charCodeAt(state.position + 1); - if (is_WS_OR_EOL(following) || withinFlowCollection && is_FLOW_INDICATOR(following)) { - break; - } - } else if (ch === 35) { - preceding = state.input.charCodeAt(state.position - 1); - if (is_WS_OR_EOL(preceding)) { - break; - } - } else if (state.position === state.lineStart && testDocumentSeparator(state) || withinFlowCollection && is_FLOW_INDICATOR(ch)) { - break; - } else if (is_EOL(ch)) { - _line = state.line; - _lineStart = state.lineStart; - _lineIndent = state.lineIndent; - skipSeparationSpace(state, false, -1); - if (state.lineIndent >= nodeIndent) { - hasPendingContent = true; - ch = state.input.charCodeAt(state.position); - continue; - } else { - state.position = captureEnd; - state.line = _line; - state.lineStart = _lineStart; - state.lineIndent = _lineIndent; - break; + return first; +} +function needIndentIndicator(string) { + var leadingSpaceRe = /^\n* /; + return leadingSpaceRe.test(string); +} +var STYLE_PLAIN = 1; +var STYLE_SINGLE = 2; +var STYLE_LITERAL = 3; +var STYLE_FOLDED = 4; +var STYLE_DOUBLE = 5; +function chooseScalarStyle(string, singleLineOnly, indentPerLevel, lineWidth, testAmbiguousType, quotingType, forceQuotes, inblock) { + var i; + var char = 0; + var prevChar = null; + var hasLineBreak = false; + var hasFoldableLine = false; + var shouldTrackWidth = lineWidth !== -1; + var previousLineBreak = -1; + var plain = isPlainSafeFirst(codePointAt(string, 0)) && isPlainSafeLast(codePointAt(string, string.length - 1)); + if (singleLineOnly || forceQuotes) { + for (i = 0; i < string.length; char >= 65536 ? i += 2 : i++) { + char = codePointAt(string, i); + if (!isPrintable(char)) { + return STYLE_DOUBLE; } + plain = plain && isPlainSafe(char, prevChar, inblock); + prevChar = char; } - if (hasPendingContent) { - captureSegment(state, captureStart, captureEnd, false); - writeFoldedLines(state, state.line - _line); - captureStart = captureEnd = state.position; - hasPendingContent = false; + } else { + for (i = 0; i < string.length; char >= 65536 ? i += 2 : i++) { + char = codePointAt(string, i); + if (char === CHAR_LINE_FEED) { + hasLineBreak = true; + if (shouldTrackWidth) { + hasFoldableLine = hasFoldableLine || // Foldable line = too long, and not more-indented. + i - previousLineBreak - 1 > lineWidth && string[previousLineBreak + 1] !== " "; + previousLineBreak = i; + } + } else if (!isPrintable(char)) { + return STYLE_DOUBLE; + } + plain = plain && isPlainSafe(char, prevChar, inblock); + prevChar = char; } - if (!is_WHITE_SPACE(ch)) { - captureEnd = state.position + 1; + hasFoldableLine = hasFoldableLine || shouldTrackWidth && (i - previousLineBreak - 1 > lineWidth && string[previousLineBreak + 1] !== " "); + } + if (!hasLineBreak && !hasFoldableLine) { + if (plain && !forceQuotes && !testAmbiguousType(string)) { + return STYLE_PLAIN; } - ch = state.input.charCodeAt(++state.position); + return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE; } - captureSegment(state, captureStart, captureEnd, false); - if (state.result) { - return true; + if (indentPerLevel > 9 && needIndentIndicator(string)) { + return STYLE_DOUBLE; } - state.kind = _kind; - state.result = _result; - return false; -} -function readSingleQuotedScalar(state, nodeIndent) { - var ch, captureStart, captureEnd; - ch = state.input.charCodeAt(state.position); - if (ch !== 39) { - return false; + if (!forceQuotes) { + return hasFoldableLine ? STYLE_FOLDED : STYLE_LITERAL; } - state.kind = "scalar"; - state.result = ""; - state.position++; - captureStart = captureEnd = state.position; - while ((ch = state.input.charCodeAt(state.position)) !== 0) { - if (ch === 39) { - captureSegment(state, captureStart, state.position, true); - ch = state.input.charCodeAt(++state.position); - if (ch === 39) { - captureStart = state.position; - state.position++; - captureEnd = state.position; - } else { - return true; + return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE; +} +function writeScalar(state, string, level, iskey, inblock) { + state.dump = (function() { + if (string.length === 0) { + return state.quotingType === QUOTING_TYPE_DOUBLE ? '""' : "''"; + } + if (!state.noCompatMode) { + if (DEPRECATED_BOOLEANS_SYNTAX.indexOf(string) !== -1 || DEPRECATED_BASE60_SYNTAX.test(string)) { + return state.quotingType === QUOTING_TYPE_DOUBLE ? '"' + string + '"' : "'" + string + "'"; } - } else if (is_EOL(ch)) { - captureSegment(state, captureStart, captureEnd, true); - writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent)); - captureStart = captureEnd = state.position; - } else if (state.position === state.lineStart && testDocumentSeparator(state)) { - throwError(state, "unexpected end of the document within a single quoted scalar"); - } else { - state.position++; - captureEnd = state.position; } - } - throwError(state, "unexpected end of the stream within a single quoted scalar"); + var indent = state.indent * Math.max(1, level); + var lineWidth = state.lineWidth === -1 ? -1 : Math.max(Math.min(state.lineWidth, 40), state.lineWidth - indent); + var singleLineOnly = iskey || state.flowLevel > -1 && level >= state.flowLevel; + function testAmbiguity(string2) { + return testImplicitResolving(state, string2); + } + switch (chooseScalarStyle( + string, + singleLineOnly, + state.indent, + lineWidth, + testAmbiguity, + state.quotingType, + state.forceQuotes && !iskey, + inblock + )) { + case STYLE_PLAIN: + return string; + case STYLE_SINGLE: + return "'" + string.replace(/'/g, "''") + "'"; + case STYLE_LITERAL: + return "|" + blockHeader(string, state.indent) + dropEndingNewline(indentString(string, indent)); + case STYLE_FOLDED: + return ">" + blockHeader(string, state.indent) + dropEndingNewline(indentString(foldString(string, lineWidth), indent)); + case STYLE_DOUBLE: + return '"' + escapeString(string) + '"'; + default: + throw new exception("impossible error: invalid scalar style"); + } + })(); } -function readDoubleQuotedScalar(state, nodeIndent) { - var captureStart, captureEnd, hexLength, hexResult, tmp, ch; - ch = state.input.charCodeAt(state.position); - if (ch !== 34) { - return false; +function blockHeader(string, indentPerLevel) { + var indentIndicator = needIndentIndicator(string) ? String(indentPerLevel) : ""; + var clip = string[string.length - 1] === "\n"; + var keep = clip && (string[string.length - 2] === "\n" || string === "\n"); + var chomp = keep ? "+" : clip ? "" : "-"; + return indentIndicator + chomp + "\n"; +} +function dropEndingNewline(string) { + return string[string.length - 1] === "\n" ? string.slice(0, -1) : string; +} +function foldString(string, width) { + var lineRe = /(\n+)([^\n]*)/g; + var result = (function() { + var nextLF = string.indexOf("\n"); + nextLF = nextLF !== -1 ? nextLF : string.length; + lineRe.lastIndex = nextLF; + return foldLine(string.slice(0, nextLF), width); + })(); + var prevMoreIndented = string[0] === "\n" || string[0] === " "; + var moreIndented; + var match2; + while (match2 = lineRe.exec(string)) { + var prefix = match2[1], line = match2[2]; + moreIndented = line[0] === " "; + result += prefix + (!prevMoreIndented && !moreIndented && line !== "" ? "\n" : "") + foldLine(line, width); + prevMoreIndented = moreIndented; } - state.kind = "scalar"; - state.result = ""; - state.position++; - captureStart = captureEnd = state.position; - while ((ch = state.input.charCodeAt(state.position)) !== 0) { - if (ch === 34) { - captureSegment(state, captureStart, state.position, true); - state.position++; - return true; - } else if (ch === 92) { - captureSegment(state, captureStart, state.position, true); - ch = state.input.charCodeAt(++state.position); - if (is_EOL(ch)) { - skipSeparationSpace(state, false, nodeIndent); - } else if (ch < 256 && simpleEscapeCheck[ch]) { - state.result += simpleEscapeMap[ch]; - state.position++; - } else if ((tmp = escapedHexLen(ch)) > 0) { - hexLength = tmp; - hexResult = 0; - for (; hexLength > 0; hexLength--) { - ch = state.input.charCodeAt(++state.position); - if ((tmp = fromHexCode(ch)) >= 0) { - hexResult = (hexResult << 4) + tmp; - } else { - throwError(state, "expected hexadecimal character"); - } - } - state.result += charFromCodepoint(hexResult); - state.position++; - } else { - throwError(state, "unknown escape sequence"); - } - captureStart = captureEnd = state.position; - } else if (is_EOL(ch)) { - captureSegment(state, captureStart, captureEnd, true); - writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent)); - captureStart = captureEnd = state.position; - } else if (state.position === state.lineStart && testDocumentSeparator(state)) { - throwError(state, "unexpected end of the document within a double quoted scalar"); - } else { - state.position++; - captureEnd = state.position; + return result; +} +function foldLine(line, width) { + if (line === "" || line[0] === " ") return line; + var breakRe = / [^ ]/g; + var match2; + var start = 0, end, curr = 0, next = 0; + var result = ""; + while (match2 = breakRe.exec(line)) { + next = match2.index; + if (next - start > width) { + end = curr > start ? curr : next; + result += "\n" + line.slice(start, end); + start = end + 1; } + curr = next; } - throwError(state, "unexpected end of the stream within a double quoted scalar"); -} -function readFlowCollection(state, nodeIndent) { - var readNext = true, _line, _lineStart, _pos, _tag = state.tag, _result, _anchor = state.anchor, following, terminator, isPair, isExplicitPair, isMapping, overridableKeys = /* @__PURE__ */ Object.create(null), keyNode, keyTag, valueNode, ch; - ch = state.input.charCodeAt(state.position); - if (ch === 91) { - terminator = 93; - isMapping = false; - _result = []; - } else if (ch === 123) { - terminator = 125; - isMapping = true; - _result = {}; + result += "\n"; + if (line.length - start > width && curr > start) { + result += line.slice(start, curr) + "\n" + line.slice(curr + 1); } else { - return false; - } - if (state.anchor !== null) { - state.anchorMap[state.anchor] = _result; + result += line.slice(start); } - ch = state.input.charCodeAt(++state.position); - while (ch !== 0) { - skipSeparationSpace(state, true, nodeIndent); - ch = state.input.charCodeAt(state.position); - if (ch === terminator) { - state.position++; - state.tag = _tag; - state.anchor = _anchor; - state.kind = isMapping ? "mapping" : "sequence"; - state.result = _result; - return true; - } else if (!readNext) { - throwError(state, "missed comma between flow collection entries"); - } else if (ch === 44) { - throwError(state, "expected the node content, but found ','"); - } - keyTag = keyNode = valueNode = null; - isPair = isExplicitPair = false; - if (ch === 63) { - following = state.input.charCodeAt(state.position + 1); - if (is_WS_OR_EOL(following)) { - isPair = isExplicitPair = true; - state.position++; - skipSeparationSpace(state, true, nodeIndent); - } - } - _line = state.line; - _lineStart = state.lineStart; - _pos = state.position; - composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true); - keyTag = state.tag; - keyNode = state.result; - skipSeparationSpace(state, true, nodeIndent); - ch = state.input.charCodeAt(state.position); - if ((isExplicitPair || state.line === _line) && ch === 58) { - isPair = true; - ch = state.input.charCodeAt(++state.position); - skipSeparationSpace(state, true, nodeIndent); - composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true); - valueNode = state.result; - } - if (isMapping) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos); - } else if (isPair) { - _result.push(storeMappingPair(state, null, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos)); - } else { - _result.push(keyNode); - } - skipSeparationSpace(state, true, nodeIndent); - ch = state.input.charCodeAt(state.position); - if (ch === 44) { - readNext = true; - ch = state.input.charCodeAt(++state.position); + return result.slice(1); +} +function escapeString(string) { + var result = ""; + var char = 0; + var escapeSeq; + for (var i = 0; i < string.length; char >= 65536 ? i += 2 : i++) { + char = codePointAt(string, i); + escapeSeq = ESCAPE_SEQUENCES[char]; + if (!escapeSeq && isPrintable(char)) { + result += string[i]; + if (char >= 65536) result += string[i + 1]; } else { - readNext = false; + result += escapeSeq || encodeHex(char); } } - throwError(state, "unexpected end of the stream within a flow collection"); + return result; } -function readBlockScalar(state, nodeIndent) { - var captureStart, folding, chomping = CHOMPING_CLIP, didReadContent = false, detectedIndent = false, textIndent = nodeIndent, emptyLines = 0, atMoreIndented = false, tmp, ch; - ch = state.input.charCodeAt(state.position); - if (ch === 124) { - folding = false; - } else if (ch === 62) { - folding = true; - } else { - return false; +function writeFlowSequence(state, level, object) { + var _result = "", _tag = state.tag, index, length, value; + for (index = 0, length = object.length; index < length; index += 1) { + value = object[index]; + if (state.replacer) { + value = state.replacer.call(object, String(index), value); + } + if (writeNode(state, level, value, false, false) || typeof value === "undefined" && writeNode(state, level, null, false, false)) { + if (_result !== "") _result += "," + (!state.condenseFlow ? " " : ""); + _result += state.dump; + } } - state.kind = "scalar"; - state.result = ""; - while (ch !== 0) { - ch = state.input.charCodeAt(++state.position); - if (ch === 43 || ch === 45) { - if (CHOMPING_CLIP === chomping) { - chomping = ch === 43 ? CHOMPING_KEEP : CHOMPING_STRIP; - } else { - throwError(state, "repeat of a chomping mode identifier"); + state.tag = _tag; + state.dump = "[" + _result + "]"; +} +function writeBlockSequence(state, level, object, compact) { + var _result = "", _tag = state.tag, index, length, value; + for (index = 0, length = object.length; index < length; index += 1) { + value = object[index]; + if (state.replacer) { + value = state.replacer.call(object, String(index), value); + } + if (writeNode(state, level + 1, value, true, true, false, true) || typeof value === "undefined" && writeNode(state, level + 1, null, true, true, false, true)) { + if (!compact || _result !== "") { + _result += generateNextLine(state, level); } - } else if ((tmp = fromDecimalCode(ch)) >= 0) { - if (tmp === 0) { - throwError(state, "bad explicit indentation width of a block scalar; it cannot be less than one"); - } else if (!detectedIndent) { - textIndent = nodeIndent + tmp - 1; - detectedIndent = true; + if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { + _result += "-"; } else { - throwError(state, "repeat of an indentation width identifier"); + _result += "- "; } - } else { - break; + _result += state.dump; } } - if (is_WHITE_SPACE(ch)) { - do { - ch = state.input.charCodeAt(++state.position); - } while (is_WHITE_SPACE(ch)); - if (ch === 35) { - do { - ch = state.input.charCodeAt(++state.position); - } while (!is_EOL(ch) && ch !== 0); + state.tag = _tag; + state.dump = _result || "[]"; +} +function writeFlowMapping(state, level, object) { + var _result = "", _tag = state.tag, objectKeyList = Object.keys(object), index, length, objectKey, objectValue, pairBuffer; + for (index = 0, length = objectKeyList.length; index < length; index += 1) { + pairBuffer = ""; + if (_result !== "") pairBuffer += ", "; + if (state.condenseFlow) pairBuffer += '"'; + objectKey = objectKeyList[index]; + objectValue = object[objectKey]; + if (state.replacer) { + objectValue = state.replacer.call(object, objectKey, objectValue); + } + if (!writeNode(state, level, objectKey, false, false)) { + continue; } + if (state.dump.length > 1024) pairBuffer += "? "; + pairBuffer += state.dump + (state.condenseFlow ? '"' : "") + ":" + (state.condenseFlow ? "" : " "); + if (!writeNode(state, level, objectValue, false, false)) { + continue; + } + pairBuffer += state.dump; + _result += pairBuffer; } - while (ch !== 0) { - readLineBreak(state); - state.lineIndent = 0; - ch = state.input.charCodeAt(state.position); - while ((!detectedIndent || state.lineIndent < textIndent) && ch === 32) { - state.lineIndent++; - ch = state.input.charCodeAt(++state.position); + state.tag = _tag; + state.dump = "{" + _result + "}"; +} +function writeBlockMapping(state, level, object, compact) { + var _result = "", _tag = state.tag, objectKeyList = Object.keys(object), index, length, objectKey, objectValue, explicitPair, pairBuffer; + if (state.sortKeys === true) { + objectKeyList.sort(); + } else if (typeof state.sortKeys === "function") { + objectKeyList.sort(state.sortKeys); + } else if (state.sortKeys) { + throw new exception("sortKeys must be a boolean or a function"); + } + for (index = 0, length = objectKeyList.length; index < length; index += 1) { + pairBuffer = ""; + if (!compact || _result !== "") { + pairBuffer += generateNextLine(state, level); } - if (!detectedIndent && state.lineIndent > textIndent) { - textIndent = state.lineIndent; + objectKey = objectKeyList[index]; + objectValue = object[objectKey]; + if (state.replacer) { + objectValue = state.replacer.call(object, objectKey, objectValue); } - if (is_EOL(ch)) { - emptyLines++; + if (!writeNode(state, level + 1, objectKey, true, true, true)) { continue; } - if (state.lineIndent < textIndent) { - if (chomping === CHOMPING_KEEP) { - state.result += common.repeat("\n", didReadContent ? 1 + emptyLines : emptyLines); - } else if (chomping === CHOMPING_CLIP) { - if (didReadContent) { - state.result += "\n"; - } - } - break; - } - if (folding) { - if (is_WHITE_SPACE(ch)) { - atMoreIndented = true; - state.result += common.repeat("\n", didReadContent ? 1 + emptyLines : emptyLines); - } else if (atMoreIndented) { - atMoreIndented = false; - state.result += common.repeat("\n", emptyLines + 1); - } else if (emptyLines === 0) { - if (didReadContent) { - state.result += " "; - } + explicitPair = state.tag !== null && state.tag !== "?" || state.dump && state.dump.length > 1024; + if (explicitPair) { + if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { + pairBuffer += "?"; } else { - state.result += common.repeat("\n", emptyLines); + pairBuffer += "? "; } - } else { - state.result += common.repeat("\n", didReadContent ? 1 + emptyLines : emptyLines); - } - didReadContent = true; - detectedIndent = true; - emptyLines = 0; - captureStart = state.position; - while (!is_EOL(ch) && ch !== 0) { - ch = state.input.charCodeAt(++state.position); } - captureSegment(state, captureStart, state.position, false); - } - return true; -} -function readBlockSequence(state, nodeIndent) { - var _line, _tag = state.tag, _anchor = state.anchor, _result = [], following, detected = false, ch; - if (state.firstTabInLine !== -1) return false; - if (state.anchor !== null) { - state.anchorMap[state.anchor] = _result; - } - ch = state.input.charCodeAt(state.position); - while (ch !== 0) { - if (state.firstTabInLine !== -1) { - state.position = state.firstTabInLine; - throwError(state, "tab characters must not be used in indentation"); + pairBuffer += state.dump; + if (explicitPair) { + pairBuffer += generateNextLine(state, level); } - if (ch !== 45) { - break; + if (!writeNode(state, level + 1, objectValue, true, explicitPair)) { + continue; } - following = state.input.charCodeAt(state.position + 1); - if (!is_WS_OR_EOL(following)) { - break; + if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { + pairBuffer += ":"; + } else { + pairBuffer += ": "; } - detected = true; - state.position++; - if (skipSeparationSpace(state, true, -1)) { - if (state.lineIndent <= nodeIndent) { - _result.push(null); - ch = state.input.charCodeAt(state.position); - continue; + pairBuffer += state.dump; + _result += pairBuffer; + } + state.tag = _tag; + state.dump = _result || "{}"; +} +function detectType(state, object, explicit) { + var _result, typeList, index, length, type2, style; + typeList = explicit ? state.explicitTypes : state.implicitTypes; + for (index = 0, length = typeList.length; index < length; index += 1) { + type2 = typeList[index]; + if ((type2.instanceOf || type2.predicate) && (!type2.instanceOf || typeof object === "object" && object instanceof type2.instanceOf) && (!type2.predicate || type2.predicate(object))) { + if (explicit) { + if (type2.multi && type2.representName) { + state.tag = type2.representName(object); + } else { + state.tag = type2.tag; + } + } else { + state.tag = "?"; } + if (type2.represent) { + style = state.styleMap[type2.tag] || type2.defaultStyle; + if (_toString.call(type2.represent) === "[object Function]") { + _result = type2.represent(object, style); + } else if (_hasOwnProperty.call(type2.represent, style)) { + _result = type2.represent[style](object, style); + } else { + throw new exception("!<" + type2.tag + '> tag resolver accepts not "' + style + '" style'); + } + state.dump = _result; + } + return true; } - _line = state.line; - composeNode(state, nodeIndent, CONTEXT_BLOCK_IN, false, true); - _result.push(state.result); - skipSeparationSpace(state, true, -1); - ch = state.input.charCodeAt(state.position); - if ((state.line === _line || state.lineIndent > nodeIndent) && ch !== 0) { - throwError(state, "bad indentation of a sequence entry"); - } else if (state.lineIndent < nodeIndent) { - break; - } - } - if (detected) { - state.tag = _tag; - state.anchor = _anchor; - state.kind = "sequence"; - state.result = _result; - return true; } return false; } -function readBlockMapping(state, nodeIndent, flowIndent) { - var following, allowCompact, _line, _keyLine, _keyLineStart, _keyPos, _tag = state.tag, _anchor = state.anchor, _result = {}, overridableKeys = /* @__PURE__ */ Object.create(null), keyTag = null, keyNode = null, valueNode = null, atExplicitKey = false, detected = false, ch; - if (state.firstTabInLine !== -1) return false; - if (state.anchor !== null) { - state.anchorMap[state.anchor] = _result; +function writeNode(state, level, object, block, compact, iskey, isblockseq) { + state.tag = null; + state.dump = object; + if (!detectType(state, object, false)) { + detectType(state, object, true); } - ch = state.input.charCodeAt(state.position); - while (ch !== 0) { - if (!atExplicitKey && state.firstTabInLine !== -1) { - state.position = state.firstTabInLine; - throwError(state, "tab characters must not be used in indentation"); + var type2 = _toString.call(state.dump); + var inblock = block; + var tagStr; + if (block) { + block = state.flowLevel < 0 || state.flowLevel > level; + } + var objectOrArray = type2 === "[object Object]" || type2 === "[object Array]", duplicateIndex, duplicate; + if (objectOrArray) { + duplicateIndex = state.duplicates.indexOf(object); + duplicate = duplicateIndex !== -1; + } + if (state.tag !== null && state.tag !== "?" || duplicate || state.indent !== 2 && level > 0) { + compact = false; + } + if (duplicate && state.usedDuplicates[duplicateIndex]) { + state.dump = "*ref_" + duplicateIndex; + } else { + if (objectOrArray && duplicate && !state.usedDuplicates[duplicateIndex]) { + state.usedDuplicates[duplicateIndex] = true; } - following = state.input.charCodeAt(state.position + 1); - _line = state.line; - if ((ch === 63 || ch === 58) && is_WS_OR_EOL(following)) { - if (ch === 63) { - if (atExplicitKey) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); - keyTag = keyNode = valueNode = null; + if (type2 === "[object Object]") { + if (block && Object.keys(state.dump).length !== 0) { + writeBlockMapping(state, level, state.dump, compact); + if (duplicate) { + state.dump = "&ref_" + duplicateIndex + state.dump; } - detected = true; - atExplicitKey = true; - allowCompact = true; - } else if (atExplicitKey) { - atExplicitKey = false; - allowCompact = true; } else { - throwError(state, "incomplete explicit mapping pair; a key node is missed; or followed by a non-tabulated empty line"); - } - state.position += 1; - ch = following; - } else { - _keyLine = state.line; - _keyLineStart = state.lineStart; - _keyPos = state.position; - if (!composeNode(state, flowIndent, CONTEXT_FLOW_OUT, false, true)) { - break; - } - if (state.line === _line) { - ch = state.input.charCodeAt(state.position); - while (is_WHITE_SPACE(ch)) { - ch = state.input.charCodeAt(++state.position); + writeFlowMapping(state, level, state.dump); + if (duplicate) { + state.dump = "&ref_" + duplicateIndex + " " + state.dump; } - if (ch === 58) { - ch = state.input.charCodeAt(++state.position); - if (!is_WS_OR_EOL(ch)) { - throwError(state, "a whitespace character is expected after the key-value separator within a block mapping"); - } - if (atExplicitKey) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); - keyTag = keyNode = valueNode = null; - } - detected = true; - atExplicitKey = false; - allowCompact = false; - keyTag = state.tag; - keyNode = state.result; - } else if (detected) { - throwError(state, "can not read an implicit mapping pair; a colon is missed"); + } + } else if (type2 === "[object Array]") { + if (block && state.dump.length !== 0) { + if (state.noArrayIndent && !isblockseq && level > 0) { + writeBlockSequence(state, level - 1, state.dump, compact); } else { - state.tag = _tag; - state.anchor = _anchor; - return true; + writeBlockSequence(state, level, state.dump, compact); + } + if (duplicate) { + state.dump = "&ref_" + duplicateIndex + state.dump; } - } else if (detected) { - throwError(state, "can not read a block mapping entry; a multiline key may not be an implicit key"); } else { - state.tag = _tag; - state.anchor = _anchor; - return true; - } - } - if (state.line === _line || state.lineIndent > nodeIndent) { - if (atExplicitKey) { - _keyLine = state.line; - _keyLineStart = state.lineStart; - _keyPos = state.position; - } - if (composeNode(state, nodeIndent, CONTEXT_BLOCK_OUT, true, allowCompact)) { - if (atExplicitKey) { - keyNode = state.result; - } else { - valueNode = state.result; + writeFlowSequence(state, level, state.dump); + if (duplicate) { + state.dump = "&ref_" + duplicateIndex + " " + state.dump; } } - if (!atExplicitKey) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _keyLine, _keyLineStart, _keyPos); - keyTag = keyNode = valueNode = null; + } else if (type2 === "[object String]") { + if (state.tag !== "?") { + writeScalar(state, state.dump, level, iskey, inblock); } - skipSeparationSpace(state, true, -1); - ch = state.input.charCodeAt(state.position); + } else if (type2 === "[object Undefined]") { + return false; + } else { + if (state.skipInvalid) return false; + throw new exception("unacceptable kind of an object to dump " + type2); } - if ((state.line === _line || state.lineIndent > nodeIndent) && ch !== 0) { - throwError(state, "bad indentation of a mapping entry"); - } else if (state.lineIndent < nodeIndent) { - break; + if (state.tag !== null && state.tag !== "?") { + tagStr = encodeURI( + state.tag[0] === "!" ? state.tag.slice(1) : state.tag + ).replace(/!/g, "%21"); + if (state.tag[0] === "!") { + tagStr = "!" + tagStr; + } else if (tagStr.slice(0, 18) === "tag:yaml.org,2002:") { + tagStr = "!!" + tagStr.slice(18); + } else { + tagStr = "!<" + tagStr + ">"; + } + state.dump = tagStr + " " + state.dump; } } - if (atExplicitKey) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); - } - if (detected) { - state.tag = _tag; - state.anchor = _anchor; - state.kind = "mapping"; - state.result = _result; + return true; +} +function getDuplicateReferences(object, state) { + var objects = [], duplicatesIndexes = [], index, length; + inspectNode(object, objects, duplicatesIndexes); + for (index = 0, length = duplicatesIndexes.length; index < length; index += 1) { + state.duplicates.push(objects[duplicatesIndexes[index]]); } - return detected; + state.usedDuplicates = new Array(length); } -function readTagProperty(state) { - var _position, isVerbatim = false, isNamed = false, tagHandle, tagName, ch; - ch = state.input.charCodeAt(state.position); - if (ch !== 33) return false; - if (state.tag !== null) { - throwError(state, "duplication of a tag property"); +function inspectNode(object, objects, duplicatesIndexes) { + var objectKeyList, index, length; + if (object !== null && typeof object === "object") { + index = objects.indexOf(object); + if (index !== -1) { + if (duplicatesIndexes.indexOf(index) === -1) { + duplicatesIndexes.push(index); + } + } else { + objects.push(object); + if (Array.isArray(object)) { + for (index = 0, length = object.length; index < length; index += 1) { + inspectNode(object[index], objects, duplicatesIndexes); + } + } else { + objectKeyList = Object.keys(object); + for (index = 0, length = objectKeyList.length; index < length; index += 1) { + inspectNode(object[objectKeyList[index]], objects, duplicatesIndexes); + } + } + } } - ch = state.input.charCodeAt(++state.position); - if (ch === 60) { - isVerbatim = true; - ch = state.input.charCodeAt(++state.position); - } else if (ch === 33) { - isNamed = true; - tagHandle = "!!"; - ch = state.input.charCodeAt(++state.position); - } else { - tagHandle = "!"; +} +function dump$1(input, options) { + options = options || {}; + var state = new State(options); + if (!state.noRefs) getDuplicateReferences(input, state); + var value = input; + if (state.replacer) { + value = state.replacer.call({ "": value }, "", value); } - _position = state.position; - if (isVerbatim) { - do { - ch = state.input.charCodeAt(++state.position); - } while (ch !== 0 && ch !== 62); - if (state.position < state.length) { - tagName = state.input.slice(_position, state.position); - ch = state.input.charCodeAt(++state.position); - } else { - throwError(state, "unexpected end of the stream within a verbatim tag"); + if (writeNode(state, 0, value, true, true)) return state.dump + "\n"; + return ""; +} +var dump_1 = dump$1; +var dumper = { + dump: dump_1 +}; +function renamed(from, to) { + return function() { + throw new Error("Function yaml." + from + " is removed in js-yaml 4. Use yaml." + to + " instead, which is now safe by default."); + }; +} +var load = loader.load; +var loadAll = loader.loadAll; +var dump = dumper.dump; +var safeLoad = renamed("safeLoad", "load"); +var safeLoadAll = renamed("safeLoadAll", "loadAll"); +var safeDump = renamed("safeDump", "dump"); + +// node_modules/brace-expansion/node_modules/balanced-match/dist/esm/index.js +var balanced = (a, b, str2) => { + const ma = a instanceof RegExp ? maybeMatch(a, str2) : a; + const mb = b instanceof RegExp ? maybeMatch(b, str2) : b; + const r = ma !== null && mb != null && range(ma, mb, str2); + return r && { + start: r[0], + end: r[1], + pre: str2.slice(0, r[0]), + body: str2.slice(r[0] + ma.length, r[1]), + post: str2.slice(r[1] + mb.length) + }; +}; +var maybeMatch = (reg, str2) => { + const m = str2.match(reg); + return m ? m[0] : null; +}; +var range = (a, b, str2) => { + let begs, beg, left, right = void 0, result; + let ai2 = str2.indexOf(a); + let bi2 = str2.indexOf(b, ai2 + 1); + let i = ai2; + if (ai2 >= 0 && bi2 > 0) { + if (a === b) { + return [ai2, bi2]; } - } else { - while (ch !== 0 && !is_WS_OR_EOL(ch)) { - if (ch === 33) { - if (!isNamed) { - tagHandle = state.input.slice(_position - 1, state.position + 1); - if (!PATTERN_TAG_HANDLE.test(tagHandle)) { - throwError(state, "named tag handle cannot contain such characters"); - } - isNamed = true; - _position = state.position + 1; - } else { - throwError(state, "tag suffix cannot contain exclamation marks"); + begs = []; + left = str2.length; + while (i >= 0 && !result) { + if (i === ai2) { + begs.push(i); + ai2 = str2.indexOf(a, i + 1); + } else if (begs.length === 1) { + const r = begs.pop(); + if (r !== void 0) + result = [r, bi2]; + } else { + beg = begs.pop(); + if (beg !== void 0 && beg < left) { + left = beg; + right = bi2; } + bi2 = str2.indexOf(b, i + 1); } - ch = state.input.charCodeAt(++state.position); + i = ai2 < bi2 && ai2 >= 0 ? ai2 : bi2; } - tagName = state.input.slice(_position, state.position); - if (PATTERN_FLOW_INDICATORS.test(tagName)) { - throwError(state, "tag suffix cannot contain flow indicator characters"); + if (begs.length && right !== void 0) { + result = [left, right]; } } - if (tagName && !PATTERN_TAG_URI.test(tagName)) { - throwError(state, "tag name cannot contain such characters: " + tagName); + return result; +}; + +// node_modules/brace-expansion/dist/esm/index.js +var escSlash = "\0SLASH" + Math.random() + "\0"; +var escOpen = "\0OPEN" + Math.random() + "\0"; +var escClose = "\0CLOSE" + Math.random() + "\0"; +var escComma = "\0COMMA" + Math.random() + "\0"; +var escPeriod = "\0PERIOD" + Math.random() + "\0"; +var escSlashPattern = new RegExp(escSlash, "g"); +var escOpenPattern = new RegExp(escOpen, "g"); +var escClosePattern = new RegExp(escClose, "g"); +var escCommaPattern = new RegExp(escComma, "g"); +var escPeriodPattern = new RegExp(escPeriod, "g"); +var slashPattern = /\\\\/g; +var openPattern = /\\{/g; +var closePattern = /\\}/g; +var commaPattern = /\\,/g; +var periodPattern = /\\\./g; +var EXPANSION_MAX = 1e5; +function numeric(str2) { + return !isNaN(str2) ? parseInt(str2, 10) : str2.charCodeAt(0); +} +function escapeBraces(str2) { + return str2.replace(slashPattern, escSlash).replace(openPattern, escOpen).replace(closePattern, escClose).replace(commaPattern, escComma).replace(periodPattern, escPeriod); +} +function unescapeBraces(str2) { + return str2.replace(escSlashPattern, "\\").replace(escOpenPattern, "{").replace(escClosePattern, "}").replace(escCommaPattern, ",").replace(escPeriodPattern, "."); +} +function parseCommaParts(str2) { + if (!str2) { + return [""]; } - try { - tagName = decodeURIComponent(tagName); - } catch (err) { - throwError(state, "tag name is malformed: " + tagName); + const parts = []; + const m = balanced("{", "}", str2); + if (!m) { + return str2.split(","); } - if (isVerbatim) { - state.tag = tagName; - } else if (_hasOwnProperty$1.call(state.tagMap, tagHandle)) { - state.tag = state.tagMap[tagHandle] + tagName; - } else if (tagHandle === "!") { - state.tag = "!" + tagName; - } else if (tagHandle === "!!") { - state.tag = "tag:yaml.org,2002:" + tagName; - } else { - throwError(state, 'undeclared tag handle "' + tagHandle + '"'); + const { pre, body, post } = m; + const p = pre.split(","); + p[p.length - 1] += "{" + body + "}"; + const postParts = parseCommaParts(post); + if (post.length) { + ; + p[p.length - 1] += postParts.shift(); + p.push.apply(p, postParts); } - return true; + parts.push.apply(parts, p); + return parts; } -function readAnchorProperty(state) { - var _position, ch; - ch = state.input.charCodeAt(state.position); - if (ch !== 38) return false; - if (state.anchor !== null) { - throwError(state, "duplication of an anchor property"); - } - ch = state.input.charCodeAt(++state.position); - _position = state.position; - while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) { - ch = state.input.charCodeAt(++state.position); +function expand(str2, options = {}) { + if (!str2) { + return []; } - if (state.position === _position) { - throwError(state, "name of an anchor node must contain at least one character"); + const { max = EXPANSION_MAX } = options; + if (str2.slice(0, 2) === "{}") { + str2 = "\\{\\}" + str2.slice(2); } - state.anchor = state.input.slice(_position, state.position); - return true; + return expand_(escapeBraces(str2), max, true).map(unescapeBraces); } -function readAlias(state) { - var _position, alias, ch; - ch = state.input.charCodeAt(state.position); - if (ch !== 42) return false; - ch = state.input.charCodeAt(++state.position); - _position = state.position; - while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) { - ch = state.input.charCodeAt(++state.position); - } - if (state.position === _position) { - throwError(state, "name of an alias node must contain at least one character"); - } - alias = state.input.slice(_position, state.position); - if (!_hasOwnProperty$1.call(state.anchorMap, alias)) { - throwError(state, 'unidentified alias "' + alias + '"'); - } - state.result = state.anchorMap[alias]; - skipSeparationSpace(state, true, -1); - return true; +function embrace(str2) { + return "{" + str2 + "}"; } -function composeNode(state, parentIndent, nodeContext, allowToSeek, allowCompact) { - var allowBlockStyles, allowBlockScalars, allowBlockCollections, indentStatus = 1, atNewLine = false, hasContent = false, typeIndex, typeQuantity, typeList, type2, flowIndent, blockIndent; - if (state.listener !== null) { - state.listener("open", state); - } - state.tag = null; - state.anchor = null; - state.kind = null; - state.result = null; - allowBlockStyles = allowBlockScalars = allowBlockCollections = CONTEXT_BLOCK_OUT === nodeContext || CONTEXT_BLOCK_IN === nodeContext; - if (allowToSeek) { - if (skipSeparationSpace(state, true, -1)) { - atNewLine = true; - if (state.lineIndent > parentIndent) { - indentStatus = 1; - } else if (state.lineIndent === parentIndent) { - indentStatus = 0; - } else if (state.lineIndent < parentIndent) { - indentStatus = -1; - } +function isPadded(el) { + return /^-?0\d/.test(el); +} +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} +function expand_(str2, max, isTop) { + const expansions = []; + const m = balanced("{", "}", str2); + if (!m) + return [str2]; + const pre = m.pre; + const post = m.post.length ? expand_(m.post, max, false) : [""]; + if (/\$$/.test(m.pre)) { + for (let k2 = 0; k2 < post.length && k2 < max; k2++) { + const expansion = pre + "{" + m.body + "}" + post[k2]; + expansions.push(expansion); } - } - if (indentStatus === 1) { - while (readTagProperty(state) || readAnchorProperty(state)) { - if (skipSeparationSpace(state, true, -1)) { - atNewLine = true; - allowBlockCollections = allowBlockStyles; - if (state.lineIndent > parentIndent) { - indentStatus = 1; - } else if (state.lineIndent === parentIndent) { - indentStatus = 0; - } else if (state.lineIndent < parentIndent) { - indentStatus = -1; - } - } else { - allowBlockCollections = false; + } else { + const isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + const isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + const isSequence = isNumericSequence || isAlphaSequence; + const isOptions = m.body.indexOf(",") >= 0; + if (!isSequence && !isOptions) { + if (m.post.match(/,(?!,).*\}/)) { + str2 = m.pre + "{" + m.body + escClose + m.post; + return expand_(str2, max, true); } + return [str2]; } - } - if (allowBlockCollections) { - allowBlockCollections = atNewLine || allowCompact; - } - if (indentStatus === 1 || CONTEXT_BLOCK_OUT === nodeContext) { - if (CONTEXT_FLOW_IN === nodeContext || CONTEXT_FLOW_OUT === nodeContext) { - flowIndent = parentIndent; + let n7; + if (isSequence) { + n7 = m.body.split(/\.\./); } else { - flowIndent = parentIndent + 1; + n7 = parseCommaParts(m.body); + if (n7.length === 1 && n7[0] !== void 0) { + n7 = expand_(n7[0], max, false).map(embrace); + if (n7.length === 1) { + return post.map((p) => m.pre + n7[0] + p); + } + } } - blockIndent = state.position - state.lineStart; - if (indentStatus === 1) { - if (allowBlockCollections && (readBlockSequence(state, blockIndent) || readBlockMapping(state, blockIndent, flowIndent)) || readFlowCollection(state, flowIndent)) { - hasContent = true; - } else { - if (allowBlockScalars && readBlockScalar(state, flowIndent) || readSingleQuotedScalar(state, flowIndent) || readDoubleQuotedScalar(state, flowIndent)) { - hasContent = true; - } else if (readAlias(state)) { - hasContent = true; - if (state.tag !== null || state.anchor !== null) { - throwError(state, "alias node should not have any properties"); + let N2; + if (isSequence && n7[0] !== void 0 && n7[1] !== void 0) { + const x2 = numeric(n7[0]); + const y = numeric(n7[1]); + const width = Math.max(n7[0].length, n7[1].length); + let incr = n7.length === 3 && n7[2] !== void 0 ? Math.max(Math.abs(numeric(n7[2])), 1) : 1; + let test = lte; + const reverse = y < x2; + if (reverse) { + incr *= -1; + test = gte; + } + const pad = n7.some(isPadded); + N2 = []; + for (let i = x2; test(i, y); i += incr) { + let c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === "\\") { + c = ""; } - } else if (readPlainScalar(state, flowIndent, CONTEXT_FLOW_IN === nodeContext)) { - hasContent = true; - if (state.tag === null) { - state.tag = "?"; + } else { + c = String(i); + if (pad) { + const need = width - c.length; + if (need > 0) { + const z = new Array(need + 1).join("0"); + if (i < 0) { + c = "-" + z + c.slice(1); + } else { + c = z + c; + } + } } } - if (state.anchor !== null) { - state.anchorMap[state.anchor] = state.result; + N2.push(c); + } + } else { + N2 = []; + for (let j2 = 0; j2 < n7.length; j2++) { + N2.push.apply(N2, expand_(n7[j2], max, false)); + } + } + for (let j2 = 0; j2 < N2.length; j2++) { + for (let k2 = 0; k2 < post.length && expansions.length < max; k2++) { + const expansion = pre + N2[j2] + post[k2]; + if (!isTop || isSequence || expansion) { + expansions.push(expansion); } } - } else if (indentStatus === 0) { - hasContent = allowBlockCollections && readBlockSequence(state, blockIndent); } } - if (state.tag === null) { - if (state.anchor !== null) { - state.anchorMap[state.anchor] = state.result; + return expansions; +} + +// node_modules/minimatch/dist/esm/assert-valid-pattern.js +var MAX_PATTERN_LENGTH = 1024 * 64; +var assertValidPattern = (pattern) => { + if (typeof pattern !== "string") { + throw new TypeError("invalid pattern"); + } + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError("pattern is too long"); + } +}; + +// node_modules/minimatch/dist/esm/brace-expressions.js +var posixClasses = { + "[:alnum:]": ["\\p{L}\\p{Nl}\\p{Nd}", true], + "[:alpha:]": ["\\p{L}\\p{Nl}", true], + "[:ascii:]": ["\\x00-\\x7f", false], + "[:blank:]": ["\\p{Zs}\\t", true], + "[:cntrl:]": ["\\p{Cc}", true], + "[:digit:]": ["\\p{Nd}", true], + "[:graph:]": ["\\p{Z}\\p{C}", true, true], + "[:lower:]": ["\\p{Ll}", true], + "[:print:]": ["\\p{C}", true], + "[:punct:]": ["\\p{P}", true], + "[:space:]": ["\\p{Z}\\t\\r\\n\\v\\f", true], + "[:upper:]": ["\\p{Lu}", true], + "[:word:]": ["\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}", true], + "[:xdigit:]": ["A-Fa-f0-9", false] +}; +var braceEscape = (s) => s.replace(/[[\]\\-]/g, "\\$&"); +var regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); +var rangesToString = (ranges) => ranges.join(""); +var parseClass = (glob, position) => { + const pos = position; + if (glob.charAt(pos) !== "[") { + throw new Error("not in a brace expression"); + } + const ranges = []; + const negs = []; + let i = pos + 1; + let sawStart = false; + let uflag = false; + let escaping = false; + let negate = false; + let endPos = pos; + let rangeStart = ""; + WHILE: while (i < glob.length) { + const c = glob.charAt(i); + if ((c === "!" || c === "^") && i === pos + 1) { + negate = true; + i++; + continue; } - } else if (state.tag === "?") { - if (state.result !== null && state.kind !== "scalar") { - throwError(state, 'unacceptable node kind for ! tag; it should be "scalar", not "' + state.kind + '"'); + if (c === "]" && sawStart && !escaping) { + endPos = i + 1; + break; } - for (typeIndex = 0, typeQuantity = state.implicitTypes.length; typeIndex < typeQuantity; typeIndex += 1) { - type2 = state.implicitTypes[typeIndex]; - if (type2.resolve(state.result)) { - state.result = type2.construct(state.result); - state.tag = type2.tag; - if (state.anchor !== null) { - state.anchorMap[state.anchor] = state.result; + sawStart = true; + if (c === "\\") { + if (!escaping) { + escaping = true; + i++; + continue; + } + } + if (c === "[" && !escaping) { + for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) { + if (glob.startsWith(cls, i)) { + if (rangeStart) { + return ["$.", false, glob.length - pos, true]; + } + i += cls.length; + if (neg) + negs.push(unip); + else + ranges.push(unip); + uflag = uflag || u; + continue WHILE; } - break; } } - } else if (state.tag !== "!") { - if (_hasOwnProperty$1.call(state.typeMap[state.kind || "fallback"], state.tag)) { - type2 = state.typeMap[state.kind || "fallback"][state.tag]; - } else { - type2 = null; - typeList = state.typeMap.multi[state.kind || "fallback"]; - for (typeIndex = 0, typeQuantity = typeList.length; typeIndex < typeQuantity; typeIndex += 1) { - if (state.tag.slice(0, typeList[typeIndex].tag.length) === typeList[typeIndex].tag) { - type2 = typeList[typeIndex]; - break; - } + escaping = false; + if (rangeStart) { + if (c > rangeStart) { + ranges.push(braceEscape(rangeStart) + "-" + braceEscape(c)); + } else if (c === rangeStart) { + ranges.push(braceEscape(c)); } + rangeStart = ""; + i++; + continue; } - if (!type2) { - throwError(state, "unknown tag !<" + state.tag + ">"); - } - if (state.result !== null && type2.kind !== state.kind) { - throwError(state, "unacceptable node kind for !<" + state.tag + '> tag; it should be "' + type2.kind + '", not "' + state.kind + '"'); + if (glob.startsWith("-]", i + 1)) { + ranges.push(braceEscape(c + "-")); + i += 2; + continue; } - if (!type2.resolve(state.result, state.tag)) { - throwError(state, "cannot resolve a node with !<" + state.tag + "> explicit tag"); - } else { - state.result = type2.construct(state.result, state.tag); - if (state.anchor !== null) { - state.anchorMap[state.anchor] = state.result; - } + if (glob.startsWith("-", i + 1)) { + rangeStart = c; + i += 2; + continue; } + ranges.push(braceEscape(c)); + i++; } - if (state.listener !== null) { - state.listener("close", state); + if (endPos < i) { + return ["", false, 0, false]; } - return state.tag !== null || state.anchor !== null || hasContent; -} -function readDocument(state) { - var documentStart = state.position, _position, directiveName, directiveArgs, hasDirectives = false, ch; - state.version = null; - state.checkLineBreaks = state.legacy; - state.tagMap = /* @__PURE__ */ Object.create(null); - state.anchorMap = /* @__PURE__ */ Object.create(null); - while ((ch = state.input.charCodeAt(state.position)) !== 0) { - skipSeparationSpace(state, true, -1); - ch = state.input.charCodeAt(state.position); - if (state.lineIndent > 0 || ch !== 37) { - break; - } - hasDirectives = true; - ch = state.input.charCodeAt(++state.position); - _position = state.position; - while (ch !== 0 && !is_WS_OR_EOL(ch)) { - ch = state.input.charCodeAt(++state.position); - } - directiveName = state.input.slice(_position, state.position); - directiveArgs = []; - if (directiveName.length < 1) { - throwError(state, "directive name must not be less than one character in length"); - } - while (ch !== 0) { - while (is_WHITE_SPACE(ch)) { - ch = state.input.charCodeAt(++state.position); - } - if (ch === 35) { - do { - ch = state.input.charCodeAt(++state.position); - } while (ch !== 0 && !is_EOL(ch)); - break; - } - if (is_EOL(ch)) break; - _position = state.position; - while (ch !== 0 && !is_WS_OR_EOL(ch)) { - ch = state.input.charCodeAt(++state.position); - } - directiveArgs.push(state.input.slice(_position, state.position)); - } - if (ch !== 0) readLineBreak(state); - if (_hasOwnProperty$1.call(directiveHandlers, directiveName)) { - directiveHandlers[directiveName](state, directiveName, directiveArgs); - } else { - throwWarning(state, 'unknown document directive "' + directiveName + '"'); - } + if (!ranges.length && !negs.length) { + return ["$.", false, glob.length - pos, true]; } - skipSeparationSpace(state, true, -1); - if (state.lineIndent === 0 && state.input.charCodeAt(state.position) === 45 && state.input.charCodeAt(state.position + 1) === 45 && state.input.charCodeAt(state.position + 2) === 45) { - state.position += 3; - skipSeparationSpace(state, true, -1); - } else if (hasDirectives) { - throwError(state, "directives end mark is expected"); + if (negs.length === 0 && ranges.length === 1 && /^\\?.$/.test(ranges[0]) && !negate) { + const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0]; + return [regexpEscape(r), false, endPos - pos, false]; } - composeNode(state, state.lineIndent - 1, CONTEXT_BLOCK_OUT, false, true); - skipSeparationSpace(state, true, -1); - if (state.checkLineBreaks && PATTERN_NON_ASCII_LINE_BREAKS.test(state.input.slice(documentStart, state.position))) { - throwWarning(state, "non-ASCII line breaks are interpreted as content"); + const sranges = "[" + (negate ? "^" : "") + rangesToString(ranges) + "]"; + const snegs = "[" + (negate ? "" : "^") + rangesToString(negs) + "]"; + const comb = ranges.length && negs.length ? "(" + sranges + "|" + snegs + ")" : ranges.length ? sranges : snegs; + return [comb, uflag, endPos - pos, true]; +}; + +// node_modules/minimatch/dist/esm/unescape.js +var unescape = (s, { windowsPathsNoEscape = false, magicalBraces = true } = {}) => { + if (magicalBraces) { + return windowsPathsNoEscape ? s.replace(/\[([^\/\\])\]/g, "$1") : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, "$1$2").replace(/\\([^\/])/g, "$1"); } - state.documents.push(state.result); - if (state.position === state.lineStart && testDocumentSeparator(state)) { - if (state.input.charCodeAt(state.position) === 46) { - state.position += 3; - skipSeparationSpace(state, true, -1); - } - return; + return windowsPathsNoEscape ? s.replace(/\[([^\/\\{}])\]/g, "$1") : s.replace(/((?!\\).|^)\[([^\/\\{}])\]/g, "$1$2").replace(/\\([^\/{}])/g, "$1"); +}; + +// node_modules/minimatch/dist/esm/ast.js +var _a; +var types = /* @__PURE__ */ new Set(["!", "?", "+", "*", "@"]); +var isExtglobType = (c) => types.has(c); +var isExtglobAST = (c) => isExtglobType(c.type); +var adoptionMap = /* @__PURE__ */ new Map([ + ["!", ["@"]], + ["?", ["?", "@"]], + ["@", ["@"]], + ["*", ["*", "+", "?", "@"]], + ["+", ["+", "@"]] +]); +var adoptionWithSpaceMap = /* @__PURE__ */ new Map([ + ["!", ["?"]], + ["@", ["?"]], + ["+", ["?", "*"]] +]); +var adoptionAnyMap = /* @__PURE__ */ new Map([ + ["!", ["?", "@"]], + ["?", ["?", "@"]], + ["@", ["?", "@"]], + ["*", ["*", "+", "?", "@"]], + ["+", ["+", "@", "?", "*"]] +]); +var usurpMap = /* @__PURE__ */ new Map([ + ["!", /* @__PURE__ */ new Map([["!", "@"]])], + [ + "?", + /* @__PURE__ */ new Map([ + ["*", "*"], + ["+", "*"] + ]) + ], + [ + "@", + /* @__PURE__ */ new Map([ + ["!", "!"], + ["?", "?"], + ["@", "@"], + ["*", "*"], + ["+", "+"] + ]) + ], + [ + "+", + /* @__PURE__ */ new Map([ + ["?", "*"], + ["*", "*"] + ]) + ] +]); +var startNoTraversal = "(?!(?:^|/)\\.\\.?(?:$|/))"; +var startNoDot = "(?!\\.)"; +var addPatternStart = /* @__PURE__ */ new Set(["[", "."]); +var justDots = /* @__PURE__ */ new Set(["..", "."]); +var reSpecials = new Set("().*{}+?[]^$\\!"); +var regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); +var qmark = "[^/]"; +var star = qmark + "*?"; +var starNoEmpty = qmark + "+?"; +var ID = 0; +var AST = class { + type; + #root; + #hasMagic; + #uflag = false; + #parts = []; + #parent; + #parentIndex; + #negs; + #filledNegs = false; + #options; + #toString; + // set to true if it's an extglob with no children + // (which really means one child of '') + #emptyExt = false; + id = ++ID; + get depth() { + return (this.#parent?.depth ?? -1) + 1; } - if (state.position < state.length - 1) { - throwError(state, "end of the stream or a document separator is expected"); - } else { - return; + [/* @__PURE__ */ Symbol.for("nodejs.util.inspect.custom")]() { + return { + "@@type": "AST", + id: this.id, + type: this.type, + root: this.#root.id, + parent: this.#parent?.id, + depth: this.depth, + partsLength: this.#parts.length, + parts: this.#parts + }; } -} -function loadDocuments(input, options) { - input = String(input); - options = options || {}; - if (input.length !== 0) { - if (input.charCodeAt(input.length - 1) !== 10 && input.charCodeAt(input.length - 1) !== 13) { - input += "\n"; - } - if (input.charCodeAt(0) === 65279) { - input = input.slice(1); - } + constructor(type2, parent, options = {}) { + this.type = type2; + if (type2) + this.#hasMagic = true; + this.#parent = parent; + this.#root = this.#parent ? this.#parent.#root : this; + this.#options = this.#root === this ? options : this.#root.#options; + this.#negs = this.#root === this ? [] : this.#root.#negs; + if (type2 === "!" && !this.#root.#filledNegs) + this.#negs.push(this); + this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0; } - var state = new State$1(input, options); - var nullpos = input.indexOf("\0"); - if (nullpos !== -1) { - state.position = nullpos; - throwError(state, "null byte is not allowed in input"); + get hasMagic() { + if (this.#hasMagic !== void 0) + return this.#hasMagic; + for (const p of this.#parts) { + if (typeof p === "string") + continue; + if (p.type || p.hasMagic) + return this.#hasMagic = true; + } + return this.#hasMagic; } - state.input += "\0"; - while (state.input.charCodeAt(state.position) === 32) { - state.lineIndent += 1; - state.position += 1; - } - while (state.position < state.length - 1) { - readDocument(state); - } - return state.documents; -} -function loadAll$1(input, iterator, options) { - if (iterator !== null && typeof iterator === "object" && typeof options === "undefined") { - options = iterator; - iterator = null; - } - var documents = loadDocuments(input, options); - if (typeof iterator !== "function") { - return documents; - } - for (var index = 0, length = documents.length; index < length; index += 1) { - iterator(documents[index]); - } -} -function load$1(input, options) { - var documents = loadDocuments(input, options); - if (documents.length === 0) { - return void 0; - } else if (documents.length === 1) { - return documents[0]; - } - throw new exception("expected a single document in the stream, but found more"); -} -var loadAll_1 = loadAll$1; -var load_1 = load$1; -var loader = { - loadAll: loadAll_1, - load: load_1 -}; -var _toString = Object.prototype.toString; -var _hasOwnProperty = Object.prototype.hasOwnProperty; -var CHAR_BOM = 65279; -var CHAR_TAB = 9; -var CHAR_LINE_FEED = 10; -var CHAR_CARRIAGE_RETURN = 13; -var CHAR_SPACE = 32; -var CHAR_EXCLAMATION = 33; -var CHAR_DOUBLE_QUOTE = 34; -var CHAR_SHARP = 35; -var CHAR_PERCENT = 37; -var CHAR_AMPERSAND = 38; -var CHAR_SINGLE_QUOTE = 39; -var CHAR_ASTERISK = 42; -var CHAR_COMMA = 44; -var CHAR_MINUS = 45; -var CHAR_COLON = 58; -var CHAR_EQUALS = 61; -var CHAR_GREATER_THAN = 62; -var CHAR_QUESTION = 63; -var CHAR_COMMERCIAL_AT = 64; -var CHAR_LEFT_SQUARE_BRACKET = 91; -var CHAR_RIGHT_SQUARE_BRACKET = 93; -var CHAR_GRAVE_ACCENT = 96; -var CHAR_LEFT_CURLY_BRACKET = 123; -var CHAR_VERTICAL_LINE = 124; -var CHAR_RIGHT_CURLY_BRACKET = 125; -var ESCAPE_SEQUENCES = {}; -ESCAPE_SEQUENCES[0] = "\\0"; -ESCAPE_SEQUENCES[7] = "\\a"; -ESCAPE_SEQUENCES[8] = "\\b"; -ESCAPE_SEQUENCES[9] = "\\t"; -ESCAPE_SEQUENCES[10] = "\\n"; -ESCAPE_SEQUENCES[11] = "\\v"; -ESCAPE_SEQUENCES[12] = "\\f"; -ESCAPE_SEQUENCES[13] = "\\r"; -ESCAPE_SEQUENCES[27] = "\\e"; -ESCAPE_SEQUENCES[34] = '\\"'; -ESCAPE_SEQUENCES[92] = "\\\\"; -ESCAPE_SEQUENCES[133] = "\\N"; -ESCAPE_SEQUENCES[160] = "\\_"; -ESCAPE_SEQUENCES[8232] = "\\L"; -ESCAPE_SEQUENCES[8233] = "\\P"; -var DEPRECATED_BOOLEANS_SYNTAX = [ - "y", - "Y", - "yes", - "Yes", - "YES", - "on", - "On", - "ON", - "n", - "N", - "no", - "No", - "NO", - "off", - "Off", - "OFF" -]; -var DEPRECATED_BASE60_SYNTAX = /^[-+]?[0-9_]+(?::[0-9_]+)+(?:\.[0-9_]*)?$/; -function compileStyleMap(schema2, map2) { - var result, keys, index, length, tag, style, type2; - if (map2 === null) return {}; - result = {}; - keys = Object.keys(map2); - for (index = 0, length = keys.length; index < length; index += 1) { - tag = keys[index]; - style = String(map2[tag]); - if (tag.slice(0, 2) === "!!") { - tag = "tag:yaml.org,2002:" + tag.slice(2); + // reconstructs the pattern + toString() { + if (this.#toString !== void 0) + return this.#toString; + if (!this.type) { + return this.#toString = this.#parts.map((p) => String(p)).join(""); + } else { + return this.#toString = this.type + "(" + this.#parts.map((p) => String(p)).join("|") + ")"; } - type2 = schema2.compiledTypeMap["fallback"][tag]; - if (type2 && _hasOwnProperty.call(type2.styleAliases, style)) { - style = type2.styleAliases[style]; + } + #fillNegs() { + if (this !== this.#root) + throw new Error("should only call on root"); + if (this.#filledNegs) + return this; + this.toString(); + this.#filledNegs = true; + let n7; + while (n7 = this.#negs.pop()) { + if (n7.type !== "!") + continue; + let p = n7; + let pp = p.#parent; + while (pp) { + for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) { + for (const part of n7.#parts) { + if (typeof part === "string") { + throw new Error("string part in extglob AST??"); + } + part.copyIn(pp.#parts[i]); + } + } + p = pp; + pp = p.#parent; + } } - result[tag] = style; + return this; } - return result; -} -function encodeHex(character) { - var string, handle, length; - string = character.toString(16).toUpperCase(); - if (character <= 255) { - handle = "x"; - length = 2; - } else if (character <= 65535) { - handle = "u"; - length = 4; - } else if (character <= 4294967295) { - handle = "U"; - length = 8; - } else { - throw new exception("code point within a string may not be greater than 0xFFFFFFFF"); + push(...parts) { + for (const p of parts) { + if (p === "") + continue; + if (typeof p !== "string" && !(p instanceof _a && p.#parent === this)) { + throw new Error("invalid part: " + p); + } + this.#parts.push(p); + } } - return "\\" + handle + common.repeat("0", length - string.length) + string; -} -var QUOTING_TYPE_SINGLE = 1; -var QUOTING_TYPE_DOUBLE = 2; -function State(options) { - this.schema = options["schema"] || _default; - this.indent = Math.max(1, options["indent"] || 2); - this.noArrayIndent = options["noArrayIndent"] || false; - this.skipInvalid = options["skipInvalid"] || false; - this.flowLevel = common.isNothing(options["flowLevel"]) ? -1 : options["flowLevel"]; - this.styleMap = compileStyleMap(this.schema, options["styles"] || null); - this.sortKeys = options["sortKeys"] || false; - this.lineWidth = options["lineWidth"] || 80; - this.noRefs = options["noRefs"] || false; - this.noCompatMode = options["noCompatMode"] || false; - this.condenseFlow = options["condenseFlow"] || false; - this.quotingType = options["quotingType"] === '"' ? QUOTING_TYPE_DOUBLE : QUOTING_TYPE_SINGLE; - this.forceQuotes = options["forceQuotes"] || false; - this.replacer = typeof options["replacer"] === "function" ? options["replacer"] : null; - this.implicitTypes = this.schema.compiledImplicit; - this.explicitTypes = this.schema.compiledExplicit; - this.tag = null; - this.result = ""; - this.duplicates = []; - this.usedDuplicates = null; -} -function indentString(string, spaces) { - var ind = common.repeat(" ", spaces), position = 0, next = -1, result = "", line, length = string.length; - while (position < length) { - next = string.indexOf("\n", position); - if (next === -1) { - line = string.slice(position); - position = length; - } else { - line = string.slice(position, next + 1); - position = next + 1; + toJSON() { + const ret = this.type === null ? this.#parts.slice().map((p) => typeof p === "string" ? p : p.toJSON()) : [this.type, ...this.#parts.map((p) => p.toJSON())]; + if (this.isStart() && !this.type) + ret.unshift([]); + if (this.isEnd() && (this === this.#root || this.#root.#filledNegs && this.#parent?.type === "!")) { + ret.push({}); } - if (line.length && line !== "\n") result += ind; - result += line; + return ret; } - return result; -} -function generateNextLine(state, level) { - return "\n" + common.repeat(" ", state.indent * level); -} -function testImplicitResolving(state, str2) { - var index, length, type2; - for (index = 0, length = state.implicitTypes.length; index < length; index += 1) { - type2 = state.implicitTypes[index]; - if (type2.resolve(str2)) { + isStart() { + if (this.#root === this) + return true; + if (!this.#parent?.isStart()) + return false; + if (this.#parentIndex === 0) return true; + const p = this.#parent; + for (let i = 0; i < this.#parentIndex; i++) { + const pp = p.#parts[i]; + if (!(pp instanceof _a && pp.type === "!")) { + return false; + } } + return true; } - return false; -} -function isWhitespace(c) { - return c === CHAR_SPACE || c === CHAR_TAB; -} -function isPrintable(c) { - return 32 <= c && c <= 126 || 161 <= c && c <= 55295 && c !== 8232 && c !== 8233 || 57344 <= c && c <= 65533 && c !== CHAR_BOM || 65536 <= c && c <= 1114111; -} -function isNsCharOrWhitespace(c) { - return isPrintable(c) && c !== CHAR_BOM && c !== CHAR_CARRIAGE_RETURN && c !== CHAR_LINE_FEED; -} -function isPlainSafe(c, prev, inblock) { - var cIsNsCharOrWhitespace = isNsCharOrWhitespace(c); - var cIsNsChar = cIsNsCharOrWhitespace && !isWhitespace(c); - return ( - // ns-plain-safe - (inblock ? ( - // c = flow-in - cIsNsCharOrWhitespace - ) : cIsNsCharOrWhitespace && c !== CHAR_COMMA && c !== CHAR_LEFT_SQUARE_BRACKET && c !== CHAR_RIGHT_SQUARE_BRACKET && c !== CHAR_LEFT_CURLY_BRACKET && c !== CHAR_RIGHT_CURLY_BRACKET) && c !== CHAR_SHARP && !(prev === CHAR_COLON && !cIsNsChar) || isNsCharOrWhitespace(prev) && !isWhitespace(prev) && c === CHAR_SHARP || prev === CHAR_COLON && cIsNsChar - ); -} -function isPlainSafeFirst(c) { - return isPrintable(c) && c !== CHAR_BOM && !isWhitespace(c) && c !== CHAR_MINUS && c !== CHAR_QUESTION && c !== CHAR_COLON && c !== CHAR_COMMA && c !== CHAR_LEFT_SQUARE_BRACKET && c !== CHAR_RIGHT_SQUARE_BRACKET && c !== CHAR_LEFT_CURLY_BRACKET && c !== CHAR_RIGHT_CURLY_BRACKET && c !== CHAR_SHARP && c !== CHAR_AMPERSAND && c !== CHAR_ASTERISK && c !== CHAR_EXCLAMATION && c !== CHAR_VERTICAL_LINE && c !== CHAR_EQUALS && c !== CHAR_GREATER_THAN && c !== CHAR_SINGLE_QUOTE && c !== CHAR_DOUBLE_QUOTE && c !== CHAR_PERCENT && c !== CHAR_COMMERCIAL_AT && c !== CHAR_GRAVE_ACCENT; -} -function isPlainSafeLast(c) { - return !isWhitespace(c) && c !== CHAR_COLON; -} -function codePointAt(string, pos) { - var first = string.charCodeAt(pos), second; - if (first >= 55296 && first <= 56319 && pos + 1 < string.length) { - second = string.charCodeAt(pos + 1); - if (second >= 56320 && second <= 57343) { - return (first - 55296) * 1024 + second - 56320 + 65536; + isEnd() { + if (this.#root === this) + return true; + if (this.#parent?.type === "!") + return true; + if (!this.#parent?.isEnd()) + return false; + if (!this.type) + return this.#parent?.isEnd(); + const pl = this.#parent ? this.#parent.#parts.length : 0; + return this.#parentIndex === pl - 1; + } + copyIn(part) { + if (typeof part === "string") + this.push(part); + else + this.push(part.clone(this)); + } + clone(parent) { + const c = new _a(this.type, parent); + for (const p of this.#parts) { + c.copyIn(p); } + return c; } - return first; -} -function needIndentIndicator(string) { - var leadingSpaceRe = /^\n* /; - return leadingSpaceRe.test(string); -} -var STYLE_PLAIN = 1; -var STYLE_SINGLE = 2; -var STYLE_LITERAL = 3; -var STYLE_FOLDED = 4; -var STYLE_DOUBLE = 5; -function chooseScalarStyle(string, singleLineOnly, indentPerLevel, lineWidth, testAmbiguousType, quotingType, forceQuotes, inblock) { - var i; - var char = 0; - var prevChar = null; - var hasLineBreak = false; - var hasFoldableLine = false; - var shouldTrackWidth = lineWidth !== -1; - var previousLineBreak = -1; - var plain = isPlainSafeFirst(codePointAt(string, 0)) && isPlainSafeLast(codePointAt(string, string.length - 1)); - if (singleLineOnly || forceQuotes) { - for (i = 0; i < string.length; char >= 65536 ? i += 2 : i++) { - char = codePointAt(string, i); - if (!isPrintable(char)) { - return STYLE_DOUBLE; + static #parseAST(str2, ast, pos, opt, extDepth) { + const maxDepth = opt.maxExtglobRecursion ?? 2; + let escaping = false; + let inBrace = false; + let braceStart = -1; + let braceNeg = false; + if (ast.type === null) { + let i2 = pos; + let acc2 = ""; + while (i2 < str2.length) { + const c = str2.charAt(i2++); + if (escaping || c === "\\") { + escaping = !escaping; + acc2 += c; + continue; + } + if (inBrace) { + if (i2 === braceStart + 1) { + if (c === "^" || c === "!") { + braceNeg = true; + } + } else if (c === "]" && !(i2 === braceStart + 2 && braceNeg)) { + inBrace = false; + } + acc2 += c; + continue; + } else if (c === "[") { + inBrace = true; + braceStart = i2; + braceNeg = false; + acc2 += c; + continue; + } + const doRecurse = !opt.noext && isExtglobType(c) && str2.charAt(i2) === "(" && extDepth <= maxDepth; + if (doRecurse) { + ast.push(acc2); + acc2 = ""; + const ext2 = new _a(c, ast); + i2 = _a.#parseAST(str2, ext2, i2, opt, extDepth + 1); + ast.push(ext2); + continue; + } + acc2 += c; } - plain = plain && isPlainSafe(char, prevChar, inblock); - prevChar = char; + ast.push(acc2); + return i2; } - } else { - for (i = 0; i < string.length; char >= 65536 ? i += 2 : i++) { - char = codePointAt(string, i); - if (char === CHAR_LINE_FEED) { - hasLineBreak = true; - if (shouldTrackWidth) { - hasFoldableLine = hasFoldableLine || // Foldable line = too long, and not more-indented. - i - previousLineBreak - 1 > lineWidth && string[previousLineBreak + 1] !== " "; - previousLineBreak = i; + let i = pos + 1; + let part = new _a(null, ast); + const parts = []; + let acc = ""; + while (i < str2.length) { + const c = str2.charAt(i++); + if (escaping || c === "\\") { + escaping = !escaping; + acc += c; + continue; + } + if (inBrace) { + if (i === braceStart + 1) { + if (c === "^" || c === "!") { + braceNeg = true; + } + } else if (c === "]" && !(i === braceStart + 2 && braceNeg)) { + inBrace = false; } - } else if (!isPrintable(char)) { - return STYLE_DOUBLE; + acc += c; + continue; + } else if (c === "[") { + inBrace = true; + braceStart = i; + braceNeg = false; + acc += c; + continue; } - plain = plain && isPlainSafe(char, prevChar, inblock); - prevChar = char; + const doRecurse = !opt.noext && isExtglobType(c) && str2.charAt(i) === "(" && /* c8 ignore start - the maxDepth is sufficient here */ + (extDepth <= maxDepth || ast && ast.#canAdoptType(c)); + if (doRecurse) { + const depthAdd = ast && ast.#canAdoptType(c) ? 0 : 1; + part.push(acc); + acc = ""; + const ext2 = new _a(c, part); + part.push(ext2); + i = _a.#parseAST(str2, ext2, i, opt, extDepth + depthAdd); + continue; + } + if (c === "|") { + part.push(acc); + acc = ""; + parts.push(part); + part = new _a(null, ast); + continue; + } + if (c === ")") { + if (acc === "" && ast.#parts.length === 0) { + ast.#emptyExt = true; + } + part.push(acc); + acc = ""; + ast.push(...parts, part); + return i; + } + acc += c; } - hasFoldableLine = hasFoldableLine || shouldTrackWidth && (i - previousLineBreak - 1 > lineWidth && string[previousLineBreak + 1] !== " "); + ast.type = null; + ast.#hasMagic = void 0; + ast.#parts = [str2.substring(pos - 1)]; + return i; } - if (!hasLineBreak && !hasFoldableLine) { - if (plain && !forceQuotes && !testAmbiguousType(string)) { - return STYLE_PLAIN; + #canAdoptWithSpace(child) { + return this.#canAdopt(child, adoptionWithSpaceMap); + } + #canAdopt(child, map2 = adoptionMap) { + if (!child || typeof child !== "object" || child.type !== null || child.#parts.length !== 1 || this.type === null) { + return false; } - return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE; + const gc = child.#parts[0]; + if (!gc || typeof gc !== "object" || gc.type === null) { + return false; + } + return this.#canAdoptType(gc.type, map2); } - if (indentPerLevel > 9 && needIndentIndicator(string)) { - return STYLE_DOUBLE; + #canAdoptType(c, map2 = adoptionAnyMap) { + return !!map2.get(this.type)?.includes(c); } - if (!forceQuotes) { - return hasFoldableLine ? STYLE_FOLDED : STYLE_LITERAL; + #adoptWithSpace(child, index) { + const gc = child.#parts[0]; + const blank = new _a(null, gc, this.options); + blank.#parts.push(""); + gc.push(blank); + this.#adopt(child, index); } - return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE; -} -function writeScalar(state, string, level, iskey, inblock) { - state.dump = (function() { - if (string.length === 0) { - return state.quotingType === QUOTING_TYPE_DOUBLE ? '""' : "''"; + #adopt(child, index) { + const gc = child.#parts[0]; + this.#parts.splice(index, 1, ...gc.#parts); + for (const p of gc.#parts) { + if (typeof p === "object") + p.#parent = this; } - if (!state.noCompatMode) { - if (DEPRECATED_BOOLEANS_SYNTAX.indexOf(string) !== -1 || DEPRECATED_BASE60_SYNTAX.test(string)) { - return state.quotingType === QUOTING_TYPE_DOUBLE ? '"' + string + '"' : "'" + string + "'"; - } + this.#toString = void 0; + } + #canUsurpType(c) { + const m = usurpMap.get(this.type); + return !!m?.has(c); + } + #canUsurp(child) { + if (!child || typeof child !== "object" || child.type !== null || child.#parts.length !== 1 || this.type === null || this.#parts.length !== 1) { + return false; } - var indent = state.indent * Math.max(1, level); - var lineWidth = state.lineWidth === -1 ? -1 : Math.max(Math.min(state.lineWidth, 40), state.lineWidth - indent); - var singleLineOnly = iskey || state.flowLevel > -1 && level >= state.flowLevel; - function testAmbiguity(string2) { - return testImplicitResolving(state, string2); + const gc = child.#parts[0]; + if (!gc || typeof gc !== "object" || gc.type === null) { + return false; } - switch (chooseScalarStyle( - string, - singleLineOnly, - state.indent, - lineWidth, - testAmbiguity, - state.quotingType, - state.forceQuotes && !iskey, - inblock - )) { - case STYLE_PLAIN: - return string; - case STYLE_SINGLE: - return "'" + string.replace(/'/g, "''") + "'"; - case STYLE_LITERAL: - return "|" + blockHeader(string, state.indent) + dropEndingNewline(indentString(string, indent)); - case STYLE_FOLDED: - return ">" + blockHeader(string, state.indent) + dropEndingNewline(indentString(foldString(string, lineWidth), indent)); - case STYLE_DOUBLE: - return '"' + escapeString(string) + '"'; - default: - throw new exception("impossible error: invalid scalar style"); - } - })(); -} -function blockHeader(string, indentPerLevel) { - var indentIndicator = needIndentIndicator(string) ? String(indentPerLevel) : ""; - var clip = string[string.length - 1] === "\n"; - var keep = clip && (string[string.length - 2] === "\n" || string === "\n"); - var chomp = keep ? "+" : clip ? "" : "-"; - return indentIndicator + chomp + "\n"; -} -function dropEndingNewline(string) { - return string[string.length - 1] === "\n" ? string.slice(0, -1) : string; -} -function foldString(string, width) { - var lineRe = /(\n+)([^\n]*)/g; - var result = (function() { - var nextLF = string.indexOf("\n"); - nextLF = nextLF !== -1 ? nextLF : string.length; - lineRe.lastIndex = nextLF; - return foldLine(string.slice(0, nextLF), width); - })(); - var prevMoreIndented = string[0] === "\n" || string[0] === " "; - var moreIndented; - var match2; - while (match2 = lineRe.exec(string)) { - var prefix = match2[1], line = match2[2]; - moreIndented = line[0] === " "; - result += prefix + (!prevMoreIndented && !moreIndented && line !== "" ? "\n" : "") + foldLine(line, width); - prevMoreIndented = moreIndented; + return this.#canUsurpType(gc.type); } - return result; -} -function foldLine(line, width) { - if (line === "" || line[0] === " ") return line; - var breakRe = / [^ ]/g; - var match2; - var start = 0, end, curr = 0, next = 0; - var result = ""; - while (match2 = breakRe.exec(line)) { - next = match2.index; - if (next - start > width) { - end = curr > start ? curr : next; - result += "\n" + line.slice(start, end); - start = end + 1; + #usurp(child) { + const m = usurpMap.get(this.type); + const gc = child.#parts[0]; + const nt2 = m?.get(gc.type); + if (!nt2) + return false; + this.#parts = gc.#parts; + for (const p of this.#parts) { + if (typeof p === "object") { + p.#parent = this; + } } - curr = next; + this.type = nt2; + this.#toString = void 0; + this.#emptyExt = false; } - result += "\n"; - if (line.length - start > width && curr > start) { - result += line.slice(start, curr) + "\n" + line.slice(curr + 1); - } else { - result += line.slice(start); + static fromGlob(pattern, options = {}) { + const ast = new _a(null, void 0, options); + _a.#parseAST(pattern, ast, 0, options, 0); + return ast; } - return result.slice(1); -} -function escapeString(string) { - var result = ""; - var char = 0; - var escapeSeq; - for (var i = 0; i < string.length; char >= 65536 ? i += 2 : i++) { - char = codePointAt(string, i); - escapeSeq = ESCAPE_SEQUENCES[char]; - if (!escapeSeq && isPrintable(char)) { - result += string[i]; - if (char >= 65536) result += string[i + 1]; - } else { - result += escapeSeq || encodeHex(char); + // returns the regular expression if there's magic, or the unescaped + // string if not. + toMMPattern() { + if (this !== this.#root) + return this.#root.toMMPattern(); + const glob = this.toString(); + const [re2, body, hasMagic, uflag] = this.toRegExpSource(); + const anyMagic = hasMagic || this.#hasMagic || this.#options.nocase && !this.#options.nocaseMagicOnly && glob.toUpperCase() !== glob.toLowerCase(); + if (!anyMagic) { + return body; } + const flags = (this.#options.nocase ? "i" : "") + (uflag ? "u" : ""); + return Object.assign(new RegExp(`^${re2}$`, flags), { + _src: re2, + _glob: glob + }); } - return result; -} -function writeFlowSequence(state, level, object) { - var _result = "", _tag = state.tag, index, length, value; - for (index = 0, length = object.length; index < length; index += 1) { - value = object[index]; - if (state.replacer) { - value = state.replacer.call(object, String(index), value); - } - if (writeNode(state, level, value, false, false) || typeof value === "undefined" && writeNode(state, level, null, false, false)) { - if (_result !== "") _result += "," + (!state.condenseFlow ? " " : ""); - _result += state.dump; - } + get options() { + return this.#options; } - state.tag = _tag; - state.dump = "[" + _result + "]"; -} -function writeBlockSequence(state, level, object, compact) { - var _result = "", _tag = state.tag, index, length, value; - for (index = 0, length = object.length; index < length; index += 1) { - value = object[index]; - if (state.replacer) { - value = state.replacer.call(object, String(index), value); + // returns the string match, the regexp source, whether there's magic + // in the regexp (so a regular expression is required) and whether or + // not the uflag is needed for the regular expression (for posix classes) + // TODO: instead of injecting the start/end at this point, just return + // the BODY of the regexp, along with the start/end portions suitable + // for binding the start/end in either a joined full-path makeRe context + // (where we bind to (^|/), or a standalone matchPart context (where + // we bind to ^, and not /). Otherwise slashes get duped! + // + // In part-matching mode, the start is: + // - if not isStart: nothing + // - if traversal possible, but not allowed: ^(?!\.\.?$) + // - if dots allowed or not possible: ^ + // - if dots possible and not allowed: ^(?!\.) + // end is: + // - if not isEnd(): nothing + // - else: $ + // + // In full-path matching mode, we put the slash at the START of the + // pattern, so start is: + // - if first pattern: same as part-matching mode + // - if not isStart(): nothing + // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/)) + // - if dots allowed or not possible: / + // - if dots possible and not allowed: /(?!\.) + // end is: + // - if last pattern, same as part-matching mode + // - else nothing + // + // Always put the (?:$|/) on negated tails, though, because that has to be + // there to bind the end of the negated pattern portion, and it's easier to + // just stick it in now rather than try to inject it later in the middle of + // the pattern. + // + // We can just always return the same end, and leave it up to the caller + // to know whether it's going to be used joined or in parts. + // And, if the start is adjusted slightly, can do the same there: + // - if not isStart: nothing + // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$) + // - if dots allowed or not possible: (?:/|^) + // - if dots possible and not allowed: (?:/|^)(?!\.) + // + // But it's better to have a simpler binding without a conditional, for + // performance, so probably better to return both start options. + // + // Then the caller just ignores the end if it's not the first pattern, + // and the start always gets applied. + // + // But that's always going to be $ if it's the ending pattern, or nothing, + // so the caller can just attach $ at the end of the pattern when building. + // + // So the todo is: + // - better detect what kind of start is needed + // - return both flavors of starting pattern + // - attach $ at the end of the pattern when creating the actual RegExp + // + // Ah, but wait, no, that all only applies to the root when the first pattern + // is not an extglob. If the first pattern IS an extglob, then we need all + // that dot prevention biz to live in the extglob portions, because eg + // +(*|.x*) can match .xy but not .yx. + // + // So, return the two flavors if it's #root and the first child is not an + // AST, otherwise leave it to the child AST to handle it, and there, + // use the (?:^|/) style of start binding. + // + // Even simplified further: + // - Since the start for a join is eg /(?!\.) and the start for a part + // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root + // or start or whatever) and prepend ^ or / at the Regexp construction. + toRegExpSource(allowDot) { + const dot = allowDot ?? !!this.#options.dot; + if (this.#root === this) { + this.#flatten(); + this.#fillNegs(); } - if (writeNode(state, level + 1, value, true, true, false, true) || typeof value === "undefined" && writeNode(state, level + 1, null, true, true, false, true)) { - if (!compact || _result !== "") { - _result += generateNextLine(state, level); + if (!isExtglobAST(this)) { + const noEmpty = this.isStart() && this.isEnd() && !this.#parts.some((s) => typeof s !== "string"); + const src = this.#parts.map((p) => { + const [re2, _2, hasMagic, uflag] = typeof p === "string" ? _a.#parseGlob(p, this.#hasMagic, noEmpty) : p.toRegExpSource(allowDot); + this.#hasMagic = this.#hasMagic || hasMagic; + this.#uflag = this.#uflag || uflag; + return re2; + }).join(""); + let start2 = ""; + if (this.isStart()) { + if (typeof this.#parts[0] === "string") { + const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]); + if (!dotTravAllowed) { + const aps = addPatternStart; + const needNoTrav = ( + // dots are allowed, and the pattern starts with [ or . + dot && aps.has(src.charAt(0)) || // the pattern starts with \., and then [ or . + src.startsWith("\\.") && aps.has(src.charAt(2)) || // the pattern starts with \.\., and then [ or . + src.startsWith("\\.\\.") && aps.has(src.charAt(4)) + ); + const needNoDot = !dot && !allowDot && aps.has(src.charAt(0)); + start2 = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : ""; + } + } } - if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { - _result += "-"; - } else { - _result += "- "; + let end = ""; + if (this.isEnd() && this.#root.#filledNegs && this.#parent?.type === "!") { + end = "(?:$|\\/)"; } - _result += state.dump; + const final2 = start2 + src + end; + return [ + final2, + unescape(src), + this.#hasMagic = !!this.#hasMagic, + this.#uflag + ]; } - } - state.tag = _tag; - state.dump = _result || "[]"; -} -function writeFlowMapping(state, level, object) { - var _result = "", _tag = state.tag, objectKeyList = Object.keys(object), index, length, objectKey, objectValue, pairBuffer; - for (index = 0, length = objectKeyList.length; index < length; index += 1) { - pairBuffer = ""; - if (_result !== "") pairBuffer += ", "; - if (state.condenseFlow) pairBuffer += '"'; - objectKey = objectKeyList[index]; - objectValue = object[objectKey]; - if (state.replacer) { - objectValue = state.replacer.call(object, objectKey, objectValue); - } - if (!writeNode(state, level, objectKey, false, false)) { - continue; - } - if (state.dump.length > 1024) pairBuffer += "? "; - pairBuffer += state.dump + (state.condenseFlow ? '"' : "") + ":" + (state.condenseFlow ? "" : " "); - if (!writeNode(state, level, objectValue, false, false)) { - continue; - } - pairBuffer += state.dump; - _result += pairBuffer; - } - state.tag = _tag; - state.dump = "{" + _result + "}"; -} -function writeBlockMapping(state, level, object, compact) { - var _result = "", _tag = state.tag, objectKeyList = Object.keys(object), index, length, objectKey, objectValue, explicitPair, pairBuffer; - if (state.sortKeys === true) { - objectKeyList.sort(); - } else if (typeof state.sortKeys === "function") { - objectKeyList.sort(state.sortKeys); - } else if (state.sortKeys) { - throw new exception("sortKeys must be a boolean or a function"); - } - for (index = 0, length = objectKeyList.length; index < length; index += 1) { - pairBuffer = ""; - if (!compact || _result !== "") { - pairBuffer += generateNextLine(state, level); - } - objectKey = objectKeyList[index]; - objectValue = object[objectKey]; - if (state.replacer) { - objectValue = state.replacer.call(object, objectKey, objectValue); - } - if (!writeNode(state, level + 1, objectKey, true, true, true)) { - continue; - } - explicitPair = state.tag !== null && state.tag !== "?" || state.dump && state.dump.length > 1024; - if (explicitPair) { - if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { - pairBuffer += "?"; - } else { - pairBuffer += "? "; - } + const repeated = this.type === "*" || this.type === "+"; + const start = this.type === "!" ? "(?:(?!(?:" : "(?:"; + let body = this.#partsToRegExp(dot); + if (this.isStart() && this.isEnd() && !body && this.type !== "!") { + const s = this.toString(); + const me2 = this; + me2.#parts = [s]; + me2.type = null; + me2.#hasMagic = void 0; + return [s, unescape(this.toString()), false, false]; } - pairBuffer += state.dump; - if (explicitPair) { - pairBuffer += generateNextLine(state, level); + let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot ? "" : this.#partsToRegExp(true); + if (bodyDotAllowed === body) { + bodyDotAllowed = ""; } - if (!writeNode(state, level + 1, objectValue, true, explicitPair)) { - continue; + if (bodyDotAllowed) { + body = `(?:${body})(?:${bodyDotAllowed})*?`; } - if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { - pairBuffer += ":"; + let final = ""; + if (this.type === "!" && this.#emptyExt) { + final = (this.isStart() && !dot ? startNoDot : "") + starNoEmpty; } else { - pairBuffer += ": "; + const close = this.type === "!" ? ( + // !() must match something,but !(x) can match '' + "))" + (this.isStart() && !dot && !allowDot ? startNoDot : "") + star + ")" + ) : this.type === "@" ? ")" : this.type === "?" ? ")?" : this.type === "+" && bodyDotAllowed ? ")" : this.type === "*" && bodyDotAllowed ? `)?` : `)${this.type}`; + final = start + body + close; } - pairBuffer += state.dump; - _result += pairBuffer; + return [ + final, + unescape(body), + this.#hasMagic = !!this.#hasMagic, + this.#uflag + ]; } - state.tag = _tag; - state.dump = _result || "{}"; -} -function detectType(state, object, explicit) { - var _result, typeList, index, length, type2, style; - typeList = explicit ? state.explicitTypes : state.implicitTypes; - for (index = 0, length = typeList.length; index < length; index += 1) { - type2 = typeList[index]; - if ((type2.instanceOf || type2.predicate) && (!type2.instanceOf || typeof object === "object" && object instanceof type2.instanceOf) && (!type2.predicate || type2.predicate(object))) { - if (explicit) { - if (type2.multi && type2.representName) { - state.tag = type2.representName(object); - } else { - state.tag = type2.tag; + #flatten() { + if (!isExtglobAST(this)) { + for (const p of this.#parts) { + if (typeof p === "object") { + p.#flatten(); } - } else { - state.tag = "?"; } - if (type2.represent) { - style = state.styleMap[type2.tag] || type2.defaultStyle; - if (_toString.call(type2.represent) === "[object Function]") { - _result = type2.represent(object, style); - } else if (_hasOwnProperty.call(type2.represent, style)) { - _result = type2.represent[style](object, style); - } else { - throw new exception("!<" + type2.tag + '> tag resolver accepts not "' + style + '" style'); + } else { + let iterations = 0; + let done = false; + do { + done = true; + for (let i = 0; i < this.#parts.length; i++) { + const c = this.#parts[i]; + if (typeof c === "object") { + c.#flatten(); + if (this.#canAdopt(c)) { + done = false; + this.#adopt(c, i); + } else if (this.#canAdoptWithSpace(c)) { + done = false; + this.#adoptWithSpace(c, i); + } else if (this.#canUsurp(c)) { + done = false; + this.#usurp(c); + } + } } - state.dump = _result; - } - return true; + } while (!done && ++iterations < 10); } + this.#toString = void 0; } - return false; -} -function writeNode(state, level, object, block, compact, iskey, isblockseq) { - state.tag = null; - state.dump = object; - if (!detectType(state, object, false)) { - detectType(state, object, true); - } - var type2 = _toString.call(state.dump); - var inblock = block; - var tagStr; - if (block) { - block = state.flowLevel < 0 || state.flowLevel > level; - } - var objectOrArray = type2 === "[object Object]" || type2 === "[object Array]", duplicateIndex, duplicate; - if (objectOrArray) { - duplicateIndex = state.duplicates.indexOf(object); - duplicate = duplicateIndex !== -1; - } - if (state.tag !== null && state.tag !== "?" || duplicate || state.indent !== 2 && level > 0) { - compact = false; + #partsToRegExp(dot) { + return this.#parts.map((p) => { + if (typeof p === "string") { + throw new Error("string type in extglob ast??"); + } + const [re2, _2, _hasMagic, uflag] = p.toRegExpSource(dot); + this.#uflag = this.#uflag || uflag; + return re2; + }).filter((p) => !(this.isStart() && this.isEnd()) || !!p).join("|"); } - if (duplicate && state.usedDuplicates[duplicateIndex]) { - state.dump = "*ref_" + duplicateIndex; - } else { - if (objectOrArray && duplicate && !state.usedDuplicates[duplicateIndex]) { - state.usedDuplicates[duplicateIndex] = true; - } - if (type2 === "[object Object]") { - if (block && Object.keys(state.dump).length !== 0) { - writeBlockMapping(state, level, state.dump, compact); - if (duplicate) { - state.dump = "&ref_" + duplicateIndex + state.dump; - } + static #parseGlob(glob, hasMagic, noEmpty = false) { + let escaping = false; + let re2 = ""; + let uflag = false; + let inStar = false; + for (let i = 0; i < glob.length; i++) { + const c = glob.charAt(i); + if (escaping) { + escaping = false; + re2 += (reSpecials.has(c) ? "\\" : "") + c; + continue; + } + if (c === "*") { + if (inStar) + continue; + inStar = true; + re2 += noEmpty && /^[*]+$/.test(glob) ? starNoEmpty : star; + hasMagic = true; + continue; } else { - writeFlowMapping(state, level, state.dump); - if (duplicate) { - state.dump = "&ref_" + duplicateIndex + " " + state.dump; - } + inStar = false; } - } else if (type2 === "[object Array]") { - if (block && state.dump.length !== 0) { - if (state.noArrayIndent && !isblockseq && level > 0) { - writeBlockSequence(state, level - 1, state.dump, compact); + if (c === "\\") { + if (i === glob.length - 1) { + re2 += "\\\\"; } else { - writeBlockSequence(state, level, state.dump, compact); - } - if (duplicate) { - state.dump = "&ref_" + duplicateIndex + state.dump; + escaping = true; } - } else { - writeFlowSequence(state, level, state.dump); - if (duplicate) { - state.dump = "&ref_" + duplicateIndex + " " + state.dump; + continue; + } + if (c === "[") { + const [src, needUflag, consumed, magic] = parseClass(glob, i); + if (consumed) { + re2 += src; + uflag = uflag || needUflag; + i += consumed - 1; + hasMagic = hasMagic || magic; + continue; } } - } else if (type2 === "[object String]") { - if (state.tag !== "?") { - writeScalar(state, state.dump, level, iskey, inblock); - } - } else if (type2 === "[object Undefined]") { - return false; - } else { - if (state.skipInvalid) return false; - throw new exception("unacceptable kind of an object to dump " + type2); - } - if (state.tag !== null && state.tag !== "?") { - tagStr = encodeURI( - state.tag[0] === "!" ? state.tag.slice(1) : state.tag - ).replace(/!/g, "%21"); - if (state.tag[0] === "!") { - tagStr = "!" + tagStr; - } else if (tagStr.slice(0, 18) === "tag:yaml.org,2002:") { - tagStr = "!!" + tagStr.slice(18); - } else { - tagStr = "!<" + tagStr + ">"; - } - state.dump = tagStr + " " + state.dump; - } - } - return true; -} -function getDuplicateReferences(object, state) { - var objects = [], duplicatesIndexes = [], index, length; - inspectNode(object, objects, duplicatesIndexes); - for (index = 0, length = duplicatesIndexes.length; index < length; index += 1) { - state.duplicates.push(objects[duplicatesIndexes[index]]); - } - state.usedDuplicates = new Array(length); -} -function inspectNode(object, objects, duplicatesIndexes) { - var objectKeyList, index, length; - if (object !== null && typeof object === "object") { - index = objects.indexOf(object); - if (index !== -1) { - if (duplicatesIndexes.indexOf(index) === -1) { - duplicatesIndexes.push(index); - } - } else { - objects.push(object); - if (Array.isArray(object)) { - for (index = 0, length = object.length; index < length; index += 1) { - inspectNode(object[index], objects, duplicatesIndexes); - } - } else { - objectKeyList = Object.keys(object); - for (index = 0, length = objectKeyList.length; index < length; index += 1) { - inspectNode(object[objectKeyList[index]], objects, duplicatesIndexes); - } + if (c === "?") { + re2 += qmark; + hasMagic = true; + continue; } + re2 += regExpEscape(c); } + return [re2, unescape(glob), !!hasMagic, uflag]; } -} -function dump$1(input, options) { - options = options || {}; - var state = new State(options); - if (!state.noRefs) getDuplicateReferences(input, state); - var value = input; - if (state.replacer) { - value = state.replacer.call({ "": value }, "", value); +}; +_a = AST; + +// node_modules/minimatch/dist/esm/escape.js +var escape = (s, { windowsPathsNoEscape = false, magicalBraces = false } = {}) => { + if (magicalBraces) { + return windowsPathsNoEscape ? s.replace(/[?*()[\]{}]/g, "[$&]") : s.replace(/[?*()[\]\\{}]/g, "\\$&"); } - if (writeNode(state, 0, value, true, true)) return state.dump + "\n"; - return ""; -} -var dump_1 = dump$1; -var dumper = { - dump: dump_1 + return windowsPathsNoEscape ? s.replace(/[?*()[\]]/g, "[$&]") : s.replace(/[?*()[\]\\]/g, "\\$&"); }; -function renamed(from, to) { - return function() { - throw new Error("Function yaml." + from + " is removed in js-yaml 4. Use yaml." + to + " instead, which is now safe by default."); - }; -} -var load = loader.load; -var loadAll = loader.loadAll; -var dump = dumper.dump; -var safeLoad = renamed("safeLoad", "load"); -var safeLoadAll = renamed("safeLoadAll", "loadAll"); -var safeDump = renamed("safeDump", "dump"); -// node_modules/brace-expansion/node_modules/balanced-match/dist/esm/index.js -var balanced = (a, b, str2) => { - const ma = a instanceof RegExp ? maybeMatch(a, str2) : a; - const mb = b instanceof RegExp ? maybeMatch(b, str2) : b; - const r = ma !== null && mb != null && range(ma, mb, str2); - return r && { - start: r[0], - end: r[1], - pre: str2.slice(0, r[0]), - body: str2.slice(r[0] + ma.length, r[1]), - post: str2.slice(r[1] + mb.length) - }; +// node_modules/minimatch/dist/esm/index.js +var minimatch = (p, pattern, options = {}) => { + assertValidPattern(pattern); + if (!options.nocomment && pattern.charAt(0) === "#") { + return false; + } + return new Minimatch(pattern, options).match(p); }; -var maybeMatch = (reg, str2) => { - const m = str2.match(reg); - return m ? m[0] : null; +var starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/; +var starDotExtTest = (ext2) => (f) => !f.startsWith(".") && f.endsWith(ext2); +var starDotExtTestDot = (ext2) => (f) => f.endsWith(ext2); +var starDotExtTestNocase = (ext2) => { + ext2 = ext2.toLowerCase(); + return (f) => !f.startsWith(".") && f.toLowerCase().endsWith(ext2); }; -var range = (a, b, str2) => { - let begs, beg, left, right = void 0, result; - let ai2 = str2.indexOf(a); - let bi2 = str2.indexOf(b, ai2 + 1); - let i = ai2; - if (ai2 >= 0 && bi2 > 0) { - if (a === b) { - return [ai2, bi2]; - } - begs = []; - left = str2.length; - while (i >= 0 && !result) { - if (i === ai2) { - begs.push(i); - ai2 = str2.indexOf(a, i + 1); - } else if (begs.length === 1) { - const r = begs.pop(); - if (r !== void 0) - result = [r, bi2]; - } else { - beg = begs.pop(); - if (beg !== void 0 && beg < left) { - left = beg; - right = bi2; - } - bi2 = str2.indexOf(b, i + 1); +var starDotExtTestNocaseDot = (ext2) => { + ext2 = ext2.toLowerCase(); + return (f) => f.toLowerCase().endsWith(ext2); +}; +var starDotStarRE = /^\*+\.\*+$/; +var starDotStarTest = (f) => !f.startsWith(".") && f.includes("."); +var starDotStarTestDot = (f) => f !== "." && f !== ".." && f.includes("."); +var dotStarRE = /^\.\*+$/; +var dotStarTest = (f) => f !== "." && f !== ".." && f.startsWith("."); +var starRE = /^\*+$/; +var starTest = (f) => f.length !== 0 && !f.startsWith("."); +var starTestDot = (f) => f.length !== 0 && f !== "." && f !== ".."; +var qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/; +var qmarksTestNocase = ([$0, ext2 = ""]) => { + const noext = qmarksTestNoExt([$0]); + if (!ext2) + return noext; + ext2 = ext2.toLowerCase(); + return (f) => noext(f) && f.toLowerCase().endsWith(ext2); +}; +var qmarksTestNocaseDot = ([$0, ext2 = ""]) => { + const noext = qmarksTestNoExtDot([$0]); + if (!ext2) + return noext; + ext2 = ext2.toLowerCase(); + return (f) => noext(f) && f.toLowerCase().endsWith(ext2); +}; +var qmarksTestDot = ([$0, ext2 = ""]) => { + const noext = qmarksTestNoExtDot([$0]); + return !ext2 ? noext : (f) => noext(f) && f.endsWith(ext2); +}; +var qmarksTest = ([$0, ext2 = ""]) => { + const noext = qmarksTestNoExt([$0]); + return !ext2 ? noext : (f) => noext(f) && f.endsWith(ext2); +}; +var qmarksTestNoExt = ([$0]) => { + const len = $0.length; + return (f) => f.length === len && !f.startsWith("."); +}; +var qmarksTestNoExtDot = ([$0]) => { + const len = $0.length; + return (f) => f.length === len && f !== "." && f !== ".."; +}; +var defaultPlatform = typeof process === "object" && process ? typeof process.env === "object" && process.env && process.env.__MINIMATCH_TESTING_PLATFORM__ || process.platform : "posix"; +var path = { + win32: { sep: "\\" }, + posix: { sep: "/" } +}; +var sep = defaultPlatform === "win32" ? path.win32.sep : path.posix.sep; +minimatch.sep = sep; +var GLOBSTAR = /* @__PURE__ */ Symbol("globstar **"); +minimatch.GLOBSTAR = GLOBSTAR; +var qmark2 = "[^/]"; +var star2 = qmark2 + "*?"; +var twoStarDot = "(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?"; +var twoStarNoDot = "(?:(?!(?:\\/|^)\\.).)*?"; +var filter = (pattern, options = {}) => (p) => minimatch(p, pattern, options); +minimatch.filter = filter; +var ext = (a, b = {}) => Object.assign({}, a, b); +var defaults = (def) => { + if (!def || typeof def !== "object" || !Object.keys(def).length) { + return minimatch; + } + const orig = minimatch; + const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options)); + return Object.assign(m, { + Minimatch: class Minimatch extends orig.Minimatch { + constructor(pattern, options = {}) { + super(pattern, ext(def, options)); } - i = ai2 < bi2 && ai2 >= 0 ? ai2 : bi2; - } - if (begs.length && right !== void 0) { - result = [left, right]; - } + static defaults(options) { + return orig.defaults(ext(def, options)).Minimatch; + } + }, + AST: class AST extends orig.AST { + /* c8 ignore start */ + constructor(type2, parent, options = {}) { + super(type2, parent, ext(def, options)); + } + /* c8 ignore stop */ + static fromGlob(pattern, options = {}) { + return orig.AST.fromGlob(pattern, ext(def, options)); + } + }, + unescape: (s, options = {}) => orig.unescape(s, ext(def, options)), + escape: (s, options = {}) => orig.escape(s, ext(def, options)), + filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)), + defaults: (options) => orig.defaults(ext(def, options)), + makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)), + braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)), + match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)), + sep: orig.sep, + GLOBSTAR + }); +}; +minimatch.defaults = defaults; +var braceExpand = (pattern, options = {}) => { + assertValidPattern(pattern); + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + return [pattern]; } - return result; + return expand(pattern, { max: options.braceExpandMax }); }; - -// node_modules/brace-expansion/dist/esm/index.js -var escSlash = "\0SLASH" + Math.random() + "\0"; -var escOpen = "\0OPEN" + Math.random() + "\0"; -var escClose = "\0CLOSE" + Math.random() + "\0"; -var escComma = "\0COMMA" + Math.random() + "\0"; -var escPeriod = "\0PERIOD" + Math.random() + "\0"; -var escSlashPattern = new RegExp(escSlash, "g"); -var escOpenPattern = new RegExp(escOpen, "g"); -var escClosePattern = new RegExp(escClose, "g"); -var escCommaPattern = new RegExp(escComma, "g"); -var escPeriodPattern = new RegExp(escPeriod, "g"); -var slashPattern = /\\\\/g; -var openPattern = /\\{/g; -var closePattern = /\\}/g; -var commaPattern = /\\,/g; -var periodPattern = /\\\./g; -var EXPANSION_MAX = 1e5; -function numeric(str2) { - return !isNaN(str2) ? parseInt(str2, 10) : str2.charCodeAt(0); -} -function escapeBraces(str2) { - return str2.replace(slashPattern, escSlash).replace(openPattern, escOpen).replace(closePattern, escClose).replace(commaPattern, escComma).replace(periodPattern, escPeriod); -} -function unescapeBraces(str2) { - return str2.replace(escSlashPattern, "\\").replace(escOpenPattern, "{").replace(escClosePattern, "}").replace(escCommaPattern, ",").replace(escPeriodPattern, "."); -} -function parseCommaParts(str2) { - if (!str2) { - return [""]; - } - const parts = []; - const m = balanced("{", "}", str2); - if (!m) { - return str2.split(","); +minimatch.braceExpand = braceExpand; +var makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe(); +minimatch.makeRe = makeRe; +var match = (list, pattern, options = {}) => { + const mm = new Minimatch(pattern, options); + list = list.filter((f) => mm.match(f)); + if (mm.options.nonull && !list.length) { + list.push(pattern); } - const { pre, body, post } = m; - const p = pre.split(","); - p[p.length - 1] += "{" + body + "}"; - const postParts = parseCommaParts(post); - if (post.length) { - ; - p[p.length - 1] += postParts.shift(); - p.push.apply(p, postParts); + return list; +}; +minimatch.match = match; +var globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/; +var regExpEscape2 = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); +var Minimatch = class { + options; + set; + pattern; + windowsPathsNoEscape; + nonegate; + negate; + comment; + empty; + preserveMultipleSlashes; + partial; + globSet; + globParts; + nocase; + isWindows; + platform; + windowsNoMagicRoot; + maxGlobstarRecursion; + regexp; + constructor(pattern, options = {}) { + assertValidPattern(pattern); + options = options || {}; + this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion ?? 200; + this.pattern = pattern; + this.platform = options.platform || defaultPlatform; + this.isWindows = this.platform === "win32"; + const awe = "allowWindowsEscape"; + this.windowsPathsNoEscape = !!options.windowsPathsNoEscape || options[awe] === false; + if (this.windowsPathsNoEscape) { + this.pattern = this.pattern.replace(/\\/g, "/"); + } + this.preserveMultipleSlashes = !!options.preserveMultipleSlashes; + this.regexp = null; + this.negate = false; + this.nonegate = !!options.nonegate; + this.comment = false; + this.empty = false; + this.partial = !!options.partial; + this.nocase = !!this.options.nocase; + this.windowsNoMagicRoot = options.windowsNoMagicRoot !== void 0 ? options.windowsNoMagicRoot : !!(this.isWindows && this.nocase); + this.globSet = []; + this.globParts = []; + this.set = []; + this.make(); } - parts.push.apply(parts, p); - return parts; -} -function expand(str2, options = {}) { - if (!str2) { - return []; + hasMagic() { + if (this.options.magicalBraces && this.set.length > 1) { + return true; + } + for (const pattern of this.set) { + for (const part of pattern) { + if (typeof part !== "string") + return true; + } + } + return false; } - const { max = EXPANSION_MAX } = options; - if (str2.slice(0, 2) === "{}") { - str2 = "\\{\\}" + str2.slice(2); + debug(..._2) { } - return expand_(escapeBraces(str2), max, true).map(unescapeBraces); -} -function embrace(str2) { - return "{" + str2 + "}"; -} -function isPadded(el) { - return /^-?0\d/.test(el); -} -function lte(i, y) { - return i <= y; -} -function gte(i, y) { - return i >= y; -} -function expand_(str2, max, isTop) { - const expansions = []; - const m = balanced("{", "}", str2); - if (!m) - return [str2]; - const pre = m.pre; - const post = m.post.length ? expand_(m.post, max, false) : [""]; - if (/\$$/.test(m.pre)) { - for (let k2 = 0; k2 < post.length && k2 < max; k2++) { - const expansion = pre + "{" + m.body + "}" + post[k2]; - expansions.push(expansion); + make() { + const pattern = this.pattern; + const options = this.options; + if (!options.nocomment && pattern.charAt(0) === "#") { + this.comment = true; + return; } - } else { - const isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); - const isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); - const isSequence = isNumericSequence || isAlphaSequence; - const isOptions = m.body.indexOf(",") >= 0; - if (!isSequence && !isOptions) { - if (m.post.match(/,(?!,).*\}/)) { - str2 = m.pre + "{" + m.body + escClose + m.post; - return expand_(str2, max, true); - } - return [str2]; + if (!pattern) { + this.empty = true; + return; } - let n7; - if (isSequence) { - n7 = m.body.split(/\.\./); - } else { - n7 = parseCommaParts(m.body); - if (n7.length === 1 && n7[0] !== void 0) { - n7 = expand_(n7[0], max, false).map(embrace); - if (n7.length === 1) { - return post.map((p) => m.pre + n7[0] + p); - } - } + this.parseNegate(); + this.globSet = [...new Set(this.braceExpand())]; + if (options.debug) { + this.debug = (...args) => console.error(...args); } - let N2; - if (isSequence && n7[0] !== void 0 && n7[1] !== void 0) { - const x2 = numeric(n7[0]); - const y = numeric(n7[1]); - const width = Math.max(n7[0].length, n7[1].length); - let incr = n7.length === 3 && n7[2] !== void 0 ? Math.max(Math.abs(numeric(n7[2])), 1) : 1; - let test = lte; - const reverse = y < x2; - if (reverse) { - incr *= -1; - test = gte; - } - const pad = n7.some(isPadded); - N2 = []; - for (let i = x2; test(i, y); i += incr) { - let c; - if (isAlphaSequence) { - c = String.fromCharCode(i); - if (c === "\\") { - c = ""; - } - } else { - c = String(i); - if (pad) { - const need = width - c.length; - if (need > 0) { - const z = new Array(need + 1).join("0"); - if (i < 0) { - c = "-" + z + c.slice(1); - } else { - c = z + c; - } - } - } + this.debug(this.pattern, this.globSet); + const rawGlobParts = this.globSet.map((s) => this.slashSplit(s)); + this.globParts = this.preprocess(rawGlobParts); + this.debug(this.pattern, this.globParts); + let set2 = this.globParts.map((s, _2, __) => { + if (this.isWindows && this.windowsNoMagicRoot) { + const isUNC = s[0] === "" && s[1] === "" && (s[2] === "?" || !globMagic.test(s[2])) && !globMagic.test(s[3]); + const isDrive = /^[a-z]:/i.test(s[0]); + if (isUNC) { + return [ + ...s.slice(0, 4), + ...s.slice(4).map((ss2) => this.parse(ss2)) + ]; + } else if (isDrive) { + return [s[0], ...s.slice(1).map((ss2) => this.parse(ss2))]; } - N2.push(c); } - } else { - N2 = []; - for (let j2 = 0; j2 < n7.length; j2++) { - N2.push.apply(N2, expand_(n7[j2], max, false)); + return s.map((ss2) => this.parse(ss2)); + }); + this.debug(this.pattern, set2); + this.set = set2.filter((s) => s.indexOf(false) === -1); + if (this.isWindows) { + for (let i = 0; i < this.set.length; i++) { + const p = this.set[i]; + if (p[0] === "" && p[1] === "" && this.globParts[i][2] === "?" && typeof p[3] === "string" && /^[a-z]:$/i.test(p[3])) { + p[2] = "?"; + } } } - for (let j2 = 0; j2 < N2.length; j2++) { - for (let k2 = 0; k2 < post.length && expansions.length < max; k2++) { - const expansion = pre + N2[j2] + post[k2]; - if (!isTop || isSequence || expansion) { - expansions.push(expansion); + this.debug(this.pattern, this.set); + } + // various transforms to equivalent pattern sets that are + // faster to process in a filesystem walk. The goal is to + // eliminate what we can, and push all ** patterns as far + // to the right as possible, even if it increases the number + // of patterns that we have to process. + preprocess(globParts) { + if (this.options.noglobstar) { + for (let i = 0; i < globParts.length; i++) { + for (let j2 = 0; j2 < globParts[i].length; j2++) { + if (globParts[i][j2] === "**") { + globParts[i][j2] = "*"; + } } } } + const { optimizationLevel = 1 } = this.options; + if (optimizationLevel >= 2) { + globParts = this.firstPhasePreProcess(globParts); + globParts = this.secondPhasePreProcess(globParts); + } else if (optimizationLevel >= 1) { + globParts = this.levelOneOptimize(globParts); + } else { + globParts = this.adjascentGlobstarOptimize(globParts); + } + return globParts; } - return expansions; -} - -// node_modules/minimatch/dist/esm/assert-valid-pattern.js -var MAX_PATTERN_LENGTH = 1024 * 64; -var assertValidPattern = (pattern) => { - if (typeof pattern !== "string") { - throw new TypeError("invalid pattern"); - } - if (pattern.length > MAX_PATTERN_LENGTH) { - throw new TypeError("pattern is too long"); + // just get rid of adjascent ** portions + adjascentGlobstarOptimize(globParts) { + return globParts.map((parts) => { + let gs2 = -1; + while (-1 !== (gs2 = parts.indexOf("**", gs2 + 1))) { + let i = gs2; + while (parts[i + 1] === "**") { + i++; + } + if (i !== gs2) { + parts.splice(gs2, i - gs2); + } + } + return parts; + }); } -}; - -// node_modules/minimatch/dist/esm/brace-expressions.js -var posixClasses = { - "[:alnum:]": ["\\p{L}\\p{Nl}\\p{Nd}", true], - "[:alpha:]": ["\\p{L}\\p{Nl}", true], - "[:ascii:]": ["\\x00-\\x7f", false], - "[:blank:]": ["\\p{Zs}\\t", true], - "[:cntrl:]": ["\\p{Cc}", true], - "[:digit:]": ["\\p{Nd}", true], - "[:graph:]": ["\\p{Z}\\p{C}", true, true], - "[:lower:]": ["\\p{Ll}", true], - "[:print:]": ["\\p{C}", true], - "[:punct:]": ["\\p{P}", true], - "[:space:]": ["\\p{Z}\\t\\r\\n\\v\\f", true], - "[:upper:]": ["\\p{Lu}", true], - "[:word:]": ["\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}", true], - "[:xdigit:]": ["A-Fa-f0-9", false] -}; -var braceEscape = (s) => s.replace(/[[\]\\-]/g, "\\$&"); -var regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); -var rangesToString = (ranges) => ranges.join(""); -var parseClass = (glob, position) => { - const pos = position; - if (glob.charAt(pos) !== "[") { - throw new Error("not in a brace expression"); + // get rid of adjascent ** and resolve .. portions + levelOneOptimize(globParts) { + return globParts.map((parts) => { + parts = parts.reduce((set2, part) => { + const prev = set2[set2.length - 1]; + if (part === "**" && prev === "**") { + return set2; + } + if (part === "..") { + if (prev && prev !== ".." && prev !== "." && prev !== "**") { + set2.pop(); + return set2; + } + } + set2.push(part); + return set2; + }, []); + return parts.length === 0 ? [""] : parts; + }); } - const ranges = []; - const negs = []; - let i = pos + 1; - let sawStart = false; - let uflag = false; - let escaping = false; - let negate = false; - let endPos = pos; - let rangeStart = ""; - WHILE: while (i < glob.length) { - const c = glob.charAt(i); - if ((c === "!" || c === "^") && i === pos + 1) { - negate = true; - i++; - continue; - } - if (c === "]" && sawStart && !escaping) { - endPos = i + 1; - break; + levelTwoFileOptimize(parts) { + if (!Array.isArray(parts)) { + parts = this.slashSplit(parts); } - sawStart = true; - if (c === "\\") { - if (!escaping) { - escaping = true; - i++; - continue; + let didSomething = false; + do { + didSomething = false; + if (!this.preserveMultipleSlashes) { + for (let i = 1; i < parts.length - 1; i++) { + const p = parts[i]; + if (i === 1 && p === "" && parts[0] === "") + continue; + if (p === "." || p === "") { + didSomething = true; + parts.splice(i, 1); + i--; + } + } + if (parts[0] === "." && parts.length === 2 && (parts[1] === "." || parts[1] === "")) { + didSomething = true; + parts.pop(); + } } - } - if (c === "[" && !escaping) { - for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) { - if (glob.startsWith(cls, i)) { - if (rangeStart) { - return ["$.", false, glob.length - pos, true]; + let dd = 0; + while (-1 !== (dd = parts.indexOf("..", dd + 1))) { + const p = parts[dd - 1]; + if (p && p !== "." && p !== ".." && p !== "**") { + didSomething = true; + parts.splice(dd - 1, 2); + dd -= 2; + } + } + } while (didSomething); + return parts.length === 0 ? [""] : parts; + } + // First phase: single-pattern processing + //
 is 1 or more portions
+  //  is 1 or more portions
+  // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+  // 
/

/../ ->

/
+  // **/**/ -> **/
+  //
+  // **/*/ -> */**/ <== not valid because ** doesn't follow
+  // this WOULD be allowed if ** did follow symlinks, or * didn't
+  firstPhasePreProcess(globParts) {
+    let didSomething = false;
+    do {
+      didSomething = false;
+      for (let parts of globParts) {
+        let gs2 = -1;
+        while (-1 !== (gs2 = parts.indexOf("**", gs2 + 1))) {
+          let gss = gs2;
+          while (parts[gss + 1] === "**") {
+            gss++;
+          }
+          if (gss > gs2) {
+            parts.splice(gs2 + 1, gss - gs2);
+          }
+          let next = parts[gs2 + 1];
+          const p = parts[gs2 + 2];
+          const p2 = parts[gs2 + 3];
+          if (next !== "..")
+            continue;
+          if (!p || p === "." || p === ".." || !p2 || p2 === "." || p2 === "..") {
+            continue;
+          }
+          didSomething = true;
+          parts.splice(gs2, 1);
+          const other = parts.slice(0);
+          other[gs2] = "**";
+          globParts.push(other);
+          gs2--;
+        }
+        if (!this.preserveMultipleSlashes) {
+          for (let i = 1; i < parts.length - 1; i++) {
+            const p = parts[i];
+            if (i === 1 && p === "" && parts[0] === "")
+              continue;
+            if (p === "." || p === "") {
+              didSomething = true;
+              parts.splice(i, 1);
+              i--;
+            }
+          }
+          if (parts[0] === "." && parts.length === 2 && (parts[1] === "." || parts[1] === "")) {
+            didSomething = true;
+            parts.pop();
+          }
+        }
+        let dd = 0;
+        while (-1 !== (dd = parts.indexOf("..", dd + 1))) {
+          const p = parts[dd - 1];
+          if (p && p !== "." && p !== ".." && p !== "**") {
+            didSomething = true;
+            const needDot = dd === 1 && parts[dd + 1] === "**";
+            const splin = needDot ? ["."] : [];
+            parts.splice(dd - 1, 2, ...splin);
+            if (parts.length === 0)
+              parts.push("");
+            dd -= 2;
           }
-          i += cls.length;
-          if (neg)
-            negs.push(unip);
-          else
-            ranges.push(unip);
-          uflag = uflag || u;
-          continue WHILE;
         }
       }
-    }
-    escaping = false;
-    if (rangeStart) {
-      if (c > rangeStart) {
-        ranges.push(braceEscape(rangeStart) + "-" + braceEscape(c));
-      } else if (c === rangeStart) {
-        ranges.push(braceEscape(c));
+    } while (didSomething);
+    return globParts;
+  }
+  // second phase: multi-pattern dedupes
+  // {
/*/,
/

/} ->

/*/
+  // {
/,
/} -> 
/
+  // {
/**/,
/} -> 
/**/
+  //
+  // {
/**/,
/**/

/} ->

/**/
+  // ^-- not valid because ** doens't follow symlinks
+  secondPhasePreProcess(globParts) {
+    for (let i = 0; i < globParts.length - 1; i++) {
+      for (let j2 = i + 1; j2 < globParts.length; j2++) {
+        const matched = this.partsMatch(globParts[i], globParts[j2], !this.preserveMultipleSlashes);
+        if (matched) {
+          globParts[i] = [];
+          globParts[j2] = matched;
+          break;
+        }
       }
-      rangeStart = "";
-      i++;
-      continue;
-    }
-    if (glob.startsWith("-]", i + 1)) {
-      ranges.push(braceEscape(c + "-"));
-      i += 2;
-      continue;
-    }
-    if (glob.startsWith("-", i + 1)) {
-      rangeStart = c;
-      i += 2;
-      continue;
     }
-    ranges.push(braceEscape(c));
-    i++;
-  }
-  if (endPos < i) {
-    return ["", false, 0, false];
+    return globParts.filter((gs2) => gs2.length);
   }
-  if (!ranges.length && !negs.length) {
-    return ["$.", false, glob.length - pos, true];
-  }
-  if (negs.length === 0 && ranges.length === 1 && /^\\?.$/.test(ranges[0]) && !negate) {
-    const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
-    return [regexpEscape(r), false, endPos - pos, false];
-  }
-  const sranges = "[" + (negate ? "^" : "") + rangesToString(ranges) + "]";
-  const snegs = "[" + (negate ? "" : "^") + rangesToString(negs) + "]";
-  const comb = ranges.length && negs.length ? "(" + sranges + "|" + snegs + ")" : ranges.length ? sranges : snegs;
-  return [comb, uflag, endPos - pos, true];
-};
-
-// node_modules/minimatch/dist/esm/unescape.js
-var unescape = (s, { windowsPathsNoEscape = false, magicalBraces = true } = {}) => {
-  if (magicalBraces) {
-    return windowsPathsNoEscape ? s.replace(/\[([^\/\\])\]/g, "$1") : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, "$1$2").replace(/\\([^\/])/g, "$1");
-  }
-  return windowsPathsNoEscape ? s.replace(/\[([^\/\\{}])\]/g, "$1") : s.replace(/((?!\\).|^)\[([^\/\\{}])\]/g, "$1$2").replace(/\\([^\/{}])/g, "$1");
-};
-
-// node_modules/minimatch/dist/esm/ast.js
-var _a;
-var types = /* @__PURE__ */ new Set(["!", "?", "+", "*", "@"]);
-var isExtglobType = (c) => types.has(c);
-var isExtglobAST = (c) => isExtglobType(c.type);
-var adoptionMap = /* @__PURE__ */ new Map([
-  ["!", ["@"]],
-  ["?", ["?", "@"]],
-  ["@", ["@"]],
-  ["*", ["*", "+", "?", "@"]],
-  ["+", ["+", "@"]]
-]);
-var adoptionWithSpaceMap = /* @__PURE__ */ new Map([
-  ["!", ["?"]],
-  ["@", ["?"]],
-  ["+", ["?", "*"]]
-]);
-var adoptionAnyMap = /* @__PURE__ */ new Map([
-  ["!", ["?", "@"]],
-  ["?", ["?", "@"]],
-  ["@", ["?", "@"]],
-  ["*", ["*", "+", "?", "@"]],
-  ["+", ["+", "@", "?", "*"]]
-]);
-var usurpMap = /* @__PURE__ */ new Map([
-  ["!", /* @__PURE__ */ new Map([["!", "@"]])],
-  [
-    "?",
-    /* @__PURE__ */ new Map([
-      ["*", "*"],
-      ["+", "*"]
-    ])
-  ],
-  [
-    "@",
-    /* @__PURE__ */ new Map([
-      ["!", "!"],
-      ["?", "?"],
-      ["@", "@"],
-      ["*", "*"],
-      ["+", "+"]
-    ])
-  ],
-  [
-    "+",
-    /* @__PURE__ */ new Map([
-      ["?", "*"],
-      ["*", "*"]
-    ])
-  ]
-]);
-var startNoTraversal = "(?!(?:^|/)\\.\\.?(?:$|/))";
-var startNoDot = "(?!\\.)";
-var addPatternStart = /* @__PURE__ */ new Set(["[", "."]);
-var justDots = /* @__PURE__ */ new Set(["..", "."]);
-var reSpecials = new Set("().*{}+?[]^$\\!");
-var regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
-var qmark = "[^/]";
-var star = qmark + "*?";
-var starNoEmpty = qmark + "+?";
-var ID = 0;
-var AST = class {
-  type;
-  #root;
-  #hasMagic;
-  #uflag = false;
-  #parts = [];
-  #parent;
-  #parentIndex;
-  #negs;
-  #filledNegs = false;
-  #options;
-  #toString;
-  // set to true if it's an extglob with no children
-  // (which really means one child of '')
-  #emptyExt = false;
-  id = ++ID;
-  get depth() {
-    return (this.#parent?.depth ?? -1) + 1;
-  }
-  [/* @__PURE__ */ Symbol.for("nodejs.util.inspect.custom")]() {
-    return {
-      "@@type": "AST",
-      id: this.id,
-      type: this.type,
-      root: this.#root.id,
-      parent: this.#parent?.id,
-      depth: this.depth,
-      partsLength: this.#parts.length,
-      parts: this.#parts
-    };
-  }
-  constructor(type2, parent, options = {}) {
-    this.type = type2;
-    if (type2)
-      this.#hasMagic = true;
-    this.#parent = parent;
-    this.#root = this.#parent ? this.#parent.#root : this;
-    this.#options = this.#root === this ? options : this.#root.#options;
-    this.#negs = this.#root === this ? [] : this.#root.#negs;
-    if (type2 === "!" && !this.#root.#filledNegs)
-      this.#negs.push(this);
-    this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
-  }
-  get hasMagic() {
-    if (this.#hasMagic !== void 0)
-      return this.#hasMagic;
-    for (const p of this.#parts) {
-      if (typeof p === "string")
-        continue;
-      if (p.type || p.hasMagic)
-        return this.#hasMagic = true;
+  partsMatch(a, b, emptyGSMatch = false) {
+    let ai2 = 0;
+    let bi2 = 0;
+    let result = [];
+    let which = "";
+    while (ai2 < a.length && bi2 < b.length) {
+      if (a[ai2] === b[bi2]) {
+        result.push(which === "b" ? b[bi2] : a[ai2]);
+        ai2++;
+        bi2++;
+      } else if (emptyGSMatch && a[ai2] === "**" && b[bi2] === a[ai2 + 1]) {
+        result.push(a[ai2]);
+        ai2++;
+      } else if (emptyGSMatch && b[bi2] === "**" && a[ai2] === b[bi2 + 1]) {
+        result.push(b[bi2]);
+        bi2++;
+      } else if (a[ai2] === "*" && b[bi2] && (this.options.dot || !b[bi2].startsWith(".")) && b[bi2] !== "**") {
+        if (which === "b")
+          return false;
+        which = "a";
+        result.push(a[ai2]);
+        ai2++;
+        bi2++;
+      } else if (b[bi2] === "*" && a[ai2] && (this.options.dot || !a[ai2].startsWith(".")) && a[ai2] !== "**") {
+        if (which === "a")
+          return false;
+        which = "b";
+        result.push(b[bi2]);
+        ai2++;
+        bi2++;
+      } else {
+        return false;
+      }
     }
-    return this.#hasMagic;
+    return a.length === b.length && result;
   }
-  // reconstructs the pattern
-  toString() {
-    if (this.#toString !== void 0)
-      return this.#toString;
-    if (!this.type) {
-      return this.#toString = this.#parts.map((p) => String(p)).join("");
-    } else {
-      return this.#toString = this.type + "(" + this.#parts.map((p) => String(p)).join("|") + ")";
+  parseNegate() {
+    if (this.nonegate)
+      return;
+    const pattern = this.pattern;
+    let negate = false;
+    let negateOffset = 0;
+    for (let i = 0; i < pattern.length && pattern.charAt(i) === "!"; i++) {
+      negate = !negate;
+      negateOffset++;
     }
+    if (negateOffset)
+      this.pattern = pattern.slice(negateOffset);
+    this.negate = negate;
   }
-  #fillNegs() {
-    if (this !== this.#root)
-      throw new Error("should only call on root");
-    if (this.#filledNegs)
-      return this;
-    this.toString();
-    this.#filledNegs = true;
-    let n7;
-    while (n7 = this.#negs.pop()) {
-      if (n7.type !== "!")
-        continue;
-      let p = n7;
-      let pp = p.#parent;
-      while (pp) {
-        for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
-          for (const part of n7.#parts) {
-            if (typeof part === "string") {
-              throw new Error("string part in extglob AST??");
-            }
-            part.copyIn(pp.#parts[i]);
-          }
+  // set partial to true to test if, for example,
+  // "/a/b" matches the start of "/*/b/*/d"
+  // Partial means, if you run out of file before you run
+  // out of pattern, then that's fine, as long as all
+  // the parts match.
+  matchOne(file, pattern, partial = false) {
+    let fileStartIndex = 0;
+    let patternStartIndex = 0;
+    if (this.isWindows) {
+      const fileDrive = typeof file[0] === "string" && /^[a-z]:$/i.test(file[0]);
+      const fileUNC = !fileDrive && file[0] === "" && file[1] === "" && file[2] === "?" && /^[a-z]:$/i.test(file[3]);
+      const patternDrive = typeof pattern[0] === "string" && /^[a-z]:$/i.test(pattern[0]);
+      const patternUNC = !patternDrive && pattern[0] === "" && pattern[1] === "" && pattern[2] === "?" && typeof pattern[3] === "string" && /^[a-z]:$/i.test(pattern[3]);
+      const fdi = fileUNC ? 3 : fileDrive ? 0 : void 0;
+      const pdi = patternUNC ? 3 : patternDrive ? 0 : void 0;
+      if (typeof fdi === "number" && typeof pdi === "number") {
+        const [fd, pd] = [
+          file[fdi],
+          pattern[pdi]
+        ];
+        if (fd.toLowerCase() === pd.toLowerCase()) {
+          pattern[pdi] = fd;
+          patternStartIndex = pdi;
+          fileStartIndex = fdi;
         }
-        p = pp;
-        pp = p.#parent;
       }
     }
-    return this;
-  }
-  push(...parts) {
-    for (const p of parts) {
-      if (p === "")
-        continue;
-      if (typeof p !== "string" && !(p instanceof _a && p.#parent === this)) {
-        throw new Error("invalid part: " + p);
-      }
-      this.#parts.push(p);
+    const { optimizationLevel = 1 } = this.options;
+    if (optimizationLevel >= 2) {
+      file = this.levelTwoFileOptimize(file);
     }
-  }
-  toJSON() {
-    const ret = this.type === null ? this.#parts.slice().map((p) => typeof p === "string" ? p : p.toJSON()) : [this.type, ...this.#parts.map((p) => p.toJSON())];
-    if (this.isStart() && !this.type)
-      ret.unshift([]);
-    if (this.isEnd() && (this === this.#root || this.#root.#filledNegs && this.#parent?.type === "!")) {
-      ret.push({});
+    if (pattern.includes(GLOBSTAR)) {
+      return this.#matchGlobstar(file, pattern, partial, fileStartIndex, patternStartIndex);
     }
-    return ret;
+    return this.#matchOne(file, pattern, partial, fileStartIndex, patternStartIndex);
   }
-  isStart() {
-    if (this.#root === this)
-      return true;
-    if (!this.#parent?.isStart())
-      return false;
-    if (this.#parentIndex === 0)
-      return true;
-    const p = this.#parent;
-    for (let i = 0; i < this.#parentIndex; i++) {
-      const pp = p.#parts[i];
-      if (!(pp instanceof _a && pp.type === "!")) {
+  #matchGlobstar(file, pattern, partial, fileIndex, patternIndex) {
+    const firstgs = pattern.indexOf(GLOBSTAR, patternIndex);
+    const lastgs = pattern.lastIndexOf(GLOBSTAR);
+    const [head, body, tail] = partial ? [
+      pattern.slice(patternIndex, firstgs),
+      pattern.slice(firstgs + 1),
+      []
+    ] : [
+      pattern.slice(patternIndex, firstgs),
+      pattern.slice(firstgs + 1, lastgs),
+      pattern.slice(lastgs + 1)
+    ];
+    if (head.length) {
+      const fileHead = file.slice(fileIndex, fileIndex + head.length);
+      if (!this.#matchOne(fileHead, head, partial, 0, 0)) {
         return false;
       }
+      fileIndex += head.length;
+      patternIndex += head.length;
     }
-    return true;
-  }
-  isEnd() {
-    if (this.#root === this)
-      return true;
-    if (this.#parent?.type === "!")
-      return true;
-    if (!this.#parent?.isEnd())
-      return false;
-    if (!this.type)
-      return this.#parent?.isEnd();
-    const pl = this.#parent ? this.#parent.#parts.length : 0;
-    return this.#parentIndex === pl - 1;
-  }
-  copyIn(part) {
-    if (typeof part === "string")
-      this.push(part);
-    else
-      this.push(part.clone(this));
-  }
-  clone(parent) {
-    const c = new _a(this.type, parent);
-    for (const p of this.#parts) {
-      c.copyIn(p);
-    }
-    return c;
-  }
-  static #parseAST(str2, ast, pos, opt, extDepth) {
-    const maxDepth = opt.maxExtglobRecursion ?? 2;
-    let escaping = false;
-    let inBrace = false;
-    let braceStart = -1;
-    let braceNeg = false;
-    if (ast.type === null) {
-      let i2 = pos;
-      let acc2 = "";
-      while (i2 < str2.length) {
-        const c = str2.charAt(i2++);
-        if (escaping || c === "\\") {
-          escaping = !escaping;
-          acc2 += c;
-          continue;
-        }
-        if (inBrace) {
-          if (i2 === braceStart + 1) {
-            if (c === "^" || c === "!") {
-              braceNeg = true;
-            }
-          } else if (c === "]" && !(i2 === braceStart + 2 && braceNeg)) {
-            inBrace = false;
-          }
-          acc2 += c;
-          continue;
-        } else if (c === "[") {
-          inBrace = true;
-          braceStart = i2;
-          braceNeg = false;
-          acc2 += c;
-          continue;
+    let fileTailMatch = 0;
+    if (tail.length) {
+      if (tail.length + fileIndex > file.length)
+        return false;
+      let tailStart = file.length - tail.length;
+      if (this.#matchOne(file, tail, partial, tailStart, 0)) {
+        fileTailMatch = tail.length;
+      } else {
+        if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) {
+          return false;
         }
-        const doRecurse = !opt.noext && isExtglobType(c) && str2.charAt(i2) === "(" && extDepth <= maxDepth;
-        if (doRecurse) {
-          ast.push(acc2);
-          acc2 = "";
-          const ext2 = new _a(c, ast);
-          i2 = _a.#parseAST(str2, ext2, i2, opt, extDepth + 1);
-          ast.push(ext2);
-          continue;
+        tailStart--;
+        if (!this.#matchOne(file, tail, partial, tailStart, 0)) {
+          return false;
         }
-        acc2 += c;
+        fileTailMatch = tail.length + 1;
       }
-      ast.push(acc2);
-      return i2;
     }
-    let i = pos + 1;
-    let part = new _a(null, ast);
-    const parts = [];
-    let acc = "";
-    while (i < str2.length) {
-      const c = str2.charAt(i++);
-      if (escaping || c === "\\") {
-        escaping = !escaping;
-        acc += c;
-        continue;
-      }
-      if (inBrace) {
-        if (i === braceStart + 1) {
-          if (c === "^" || c === "!") {
-            braceNeg = true;
-          }
-        } else if (c === "]" && !(i === braceStart + 2 && braceNeg)) {
-          inBrace = false;
+    if (!body.length) {
+      let sawSome = !!fileTailMatch;
+      for (let i2 = fileIndex; i2 < file.length - fileTailMatch; i2++) {
+        const f = String(file[i2]);
+        sawSome = true;
+        if (f === "." || f === ".." || !this.options.dot && f.startsWith(".")) {
+          return false;
         }
-        acc += c;
-        continue;
-      } else if (c === "[") {
-        inBrace = true;
-        braceStart = i;
-        braceNeg = false;
-        acc += c;
-        continue;
       }
-      const doRecurse = !opt.noext && isExtglobType(c) && str2.charAt(i) === "(" && /* c8 ignore start - the maxDepth is sufficient here */
-      (extDepth <= maxDepth || ast && ast.#canAdoptType(c));
-      if (doRecurse) {
-        const depthAdd = ast && ast.#canAdoptType(c) ? 0 : 1;
-        part.push(acc);
-        acc = "";
-        const ext2 = new _a(c, part);
-        part.push(ext2);
-        i = _a.#parseAST(str2, ext2, i, opt, extDepth + depthAdd);
-        continue;
+      return partial || sawSome;
+    }
+    const bodySegments = [[[], 0]];
+    let currentBody = bodySegments[0];
+    let nonGsParts = 0;
+    const nonGsPartsSums = [0];
+    for (const b of body) {
+      if (b === GLOBSTAR) {
+        nonGsPartsSums.push(nonGsParts);
+        currentBody = [[], 0];
+        bodySegments.push(currentBody);
+      } else {
+        currentBody[0].push(b);
+        nonGsParts++;
       }
-      if (c === "|") {
-        part.push(acc);
-        acc = "";
-        parts.push(part);
-        part = new _a(null, ast);
-        continue;
+    }
+    let i = bodySegments.length - 1;
+    const fileLength = file.length - fileTailMatch;
+    for (const b of bodySegments) {
+      b[1] = fileLength - (nonGsPartsSums[i--] + b[0].length);
+    }
+    return !!this.#matchGlobStarBodySections(file, bodySegments, fileIndex, 0, partial, 0, !!fileTailMatch);
+  }
+  // return false for "nope, not matching"
+  // return null for "not matching, cannot keep trying"
+  #matchGlobStarBodySections(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) {
+    const bs2 = bodySegments[bodyIndex];
+    if (!bs2) {
+      for (let i = fileIndex; i < file.length; i++) {
+        sawTail = true;
+        const f = file[i];
+        if (f === "." || f === ".." || !this.options.dot && f.startsWith(".")) {
+          return false;
+        }
       }
-      if (c === ")") {
-        if (acc === "" && ast.#parts.length === 0) {
-          ast.#emptyExt = true;
+      return sawTail;
+    }
+    const [body, after] = bs2;
+    while (fileIndex <= after) {
+      const m = this.#matchOne(file.slice(0, fileIndex + body.length), body, partial, fileIndex, 0);
+      if (m && globStarDepth < this.maxGlobstarRecursion) {
+        const sub = this.#matchGlobStarBodySections(file, bodySegments, fileIndex + body.length, bodyIndex + 1, partial, globStarDepth + 1, sawTail);
+        if (sub !== false) {
+          return sub;
         }
-        part.push(acc);
-        acc = "";
-        ast.push(...parts, part);
-        return i;
       }
-      acc += c;
+      const f = file[fileIndex];
+      if (f === "." || f === ".." || !this.options.dot && f.startsWith(".")) {
+        return false;
+      }
+      fileIndex++;
     }
-    ast.type = null;
-    ast.#hasMagic = void 0;
-    ast.#parts = [str2.substring(pos - 1)];
-    return i;
-  }
-  #canAdoptWithSpace(child) {
-    return this.#canAdopt(child, adoptionWithSpaceMap);
+    return partial || null;
   }
-  #canAdopt(child, map2 = adoptionMap) {
-    if (!child || typeof child !== "object" || child.type !== null || child.#parts.length !== 1 || this.type === null) {
-      return false;
-    }
-    const gc = child.#parts[0];
-    if (!gc || typeof gc !== "object" || gc.type === null) {
-      return false;
+  #matchOne(file, pattern, partial, fileIndex, patternIndex) {
+    let fi2;
+    let pi2;
+    let pl;
+    let fl;
+    for (fi2 = fileIndex, pi2 = patternIndex, fl = file.length, pl = pattern.length; fi2 < fl && pi2 < pl; fi2++, pi2++) {
+      this.debug("matchOne loop");
+      let p = pattern[pi2];
+      let f = file[fi2];
+      this.debug(pattern, p, f);
+      if (p === false || p === GLOBSTAR) {
+        return false;
+      }
+      let hit;
+      if (typeof p === "string") {
+        hit = f === p;
+        this.debug("string match", p, f, hit);
+      } else {
+        hit = p.test(f);
+        this.debug("pattern match", p, f, hit);
+      }
+      if (!hit)
+        return false;
+    }
+    if (fi2 === fl && pi2 === pl) {
+      return true;
+    } else if (fi2 === fl) {
+      return partial;
+    } else if (pi2 === pl) {
+      return fi2 === fl - 1 && file[fi2] === "";
+    } else {
+      throw new Error("wtf?");
     }
-    return this.#canAdoptType(gc.type, map2);
   }
-  #canAdoptType(c, map2 = adoptionAnyMap) {
-    return !!map2.get(this.type)?.includes(c);
+  braceExpand() {
+    return braceExpand(this.pattern, this.options);
   }
-  #adoptWithSpace(child, index) {
-    const gc = child.#parts[0];
-    const blank = new _a(null, gc, this.options);
-    blank.#parts.push("");
-    gc.push(blank);
-    this.#adopt(child, index);
+  parse(pattern) {
+    assertValidPattern(pattern);
+    const options = this.options;
+    if (pattern === "**")
+      return GLOBSTAR;
+    if (pattern === "")
+      return "";
+    let m;
+    let fastTest = null;
+    if (m = pattern.match(starRE)) {
+      fastTest = options.dot ? starTestDot : starTest;
+    } else if (m = pattern.match(starDotExtRE)) {
+      fastTest = (options.nocase ? options.dot ? starDotExtTestNocaseDot : starDotExtTestNocase : options.dot ? starDotExtTestDot : starDotExtTest)(m[1]);
+    } else if (m = pattern.match(qmarksRE)) {
+      fastTest = (options.nocase ? options.dot ? qmarksTestNocaseDot : qmarksTestNocase : options.dot ? qmarksTestDot : qmarksTest)(m);
+    } else if (m = pattern.match(starDotStarRE)) {
+      fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+    } else if (m = pattern.match(dotStarRE)) {
+      fastTest = dotStarTest;
+    }
+    const re2 = AST.fromGlob(pattern, this.options).toMMPattern();
+    if (fastTest && typeof re2 === "object") {
+      Reflect.defineProperty(re2, "test", { value: fastTest });
+    }
+    return re2;
   }
-  #adopt(child, index) {
-    const gc = child.#parts[0];
-    this.#parts.splice(index, 1, ...gc.#parts);
-    for (const p of gc.#parts) {
-      if (typeof p === "object")
-        p.#parent = this;
+  makeRe() {
+    if (this.regexp || this.regexp === false)
+      return this.regexp;
+    const set2 = this.set;
+    if (!set2.length) {
+      this.regexp = false;
+      return this.regexp;
     }
-    this.#toString = void 0;
+    const options = this.options;
+    const twoStar = options.noglobstar ? star2 : options.dot ? twoStarDot : twoStarNoDot;
+    const flags = new Set(options.nocase ? ["i"] : []);
+    let re2 = set2.map((pattern) => {
+      const pp = pattern.map((p) => {
+        if (p instanceof RegExp) {
+          for (const f of p.flags.split(""))
+            flags.add(f);
+        }
+        return typeof p === "string" ? regExpEscape2(p) : p === GLOBSTAR ? GLOBSTAR : p._src;
+      });
+      pp.forEach((p, i) => {
+        const next = pp[i + 1];
+        const prev = pp[i - 1];
+        if (p !== GLOBSTAR || prev === GLOBSTAR) {
+          return;
+        }
+        if (prev === void 0) {
+          if (next !== void 0 && next !== GLOBSTAR) {
+            pp[i + 1] = "(?:\\/|" + twoStar + "\\/)?" + next;
+          } else {
+            pp[i] = twoStar;
+          }
+        } else if (next === void 0) {
+          pp[i - 1] = prev + "(?:\\/|\\/" + twoStar + ")?";
+        } else if (next !== GLOBSTAR) {
+          pp[i - 1] = prev + "(?:\\/|\\/" + twoStar + "\\/)" + next;
+          pp[i + 1] = GLOBSTAR;
+        }
+      });
+      const filtered = pp.filter((p) => p !== GLOBSTAR);
+      if (this.partial && filtered.length >= 1) {
+        const prefixes = [];
+        for (let i = 1; i <= filtered.length; i++) {
+          prefixes.push(filtered.slice(0, i).join("/"));
+        }
+        return "(?:" + prefixes.join("|") + ")";
+      }
+      return filtered.join("/");
+    }).join("|");
+    const [open, close] = set2.length > 1 ? ["(?:", ")"] : ["", ""];
+    re2 = "^" + open + re2 + close + "$";
+    if (this.partial) {
+      re2 = "^(?:\\/|" + open + re2.slice(1, -1) + close + ")$";
+    }
+    if (this.negate)
+      re2 = "^(?!" + re2 + ").+$";
+    try {
+      this.regexp = new RegExp(re2, [...flags].join(""));
+    } catch (ex) {
+      this.regexp = false;
+    }
+    return this.regexp;
   }
-  #canUsurpType(c) {
-    const m = usurpMap.get(this.type);
-    return !!m?.has(c);
+  slashSplit(p) {
+    if (this.preserveMultipleSlashes) {
+      return p.split("/");
+    } else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+      return ["", ...p.split(/\/+/)];
+    } else {
+      return p.split(/\/+/);
+    }
   }
-  #canUsurp(child) {
-    if (!child || typeof child !== "object" || child.type !== null || child.#parts.length !== 1 || this.type === null || this.#parts.length !== 1) {
+  match(f, partial = this.partial) {
+    this.debug("match", f, this.pattern);
+    if (this.comment) {
       return false;
     }
-    const gc = child.#parts[0];
-    if (!gc || typeof gc !== "object" || gc.type === null) {
-      return false;
+    if (this.empty) {
+      return f === "";
     }
-    return this.#canUsurpType(gc.type);
-  }
-  #usurp(child) {
-    const m = usurpMap.get(this.type);
-    const gc = child.#parts[0];
-    const nt2 = m?.get(gc.type);
-    if (!nt2)
-      return false;
-    this.#parts = gc.#parts;
-    for (const p of this.#parts) {
-      if (typeof p === "object") {
-        p.#parent = this;
+    if (f === "/" && partial) {
+      return true;
+    }
+    const options = this.options;
+    if (this.isWindows) {
+      f = f.split("\\").join("/");
+    }
+    const ff = this.slashSplit(f);
+    this.debug(this.pattern, "split", ff);
+    const set2 = this.set;
+    this.debug(this.pattern, "set", set2);
+    let filename = ff[ff.length - 1];
+    if (!filename) {
+      for (let i = ff.length - 2; !filename && i >= 0; i--) {
+        filename = ff[i];
       }
     }
-    this.type = nt2;
-    this.#toString = void 0;
-    this.#emptyExt = false;
+    for (let i = 0; i < set2.length; i++) {
+      const pattern = set2[i];
+      let file = ff;
+      if (options.matchBase && pattern.length === 1) {
+        file = [filename];
+      }
+      const hit = this.matchOne(file, pattern, partial);
+      if (hit) {
+        if (options.flipNegate) {
+          return true;
+        }
+        return !this.negate;
+      }
+    }
+    if (options.flipNegate) {
+      return false;
+    }
+    return this.negate;
   }
-  static fromGlob(pattern, options = {}) {
-    const ast = new _a(null, void 0, options);
-    _a.#parseAST(pattern, ast, 0, options, 0);
-    return ast;
+  static defaults(def) {
+    return minimatch.defaults(def).Minimatch;
   }
-  // returns the regular expression if there's magic, or the unescaped
-  // string if not.
-  toMMPattern() {
-    if (this !== this.#root)
-      return this.#root.toMMPattern();
-    const glob = this.toString();
-    const [re2, body, hasMagic, uflag] = this.toRegExpSource();
-    const anyMagic = hasMagic || this.#hasMagic || this.#options.nocase && !this.#options.nocaseMagicOnly && glob.toUpperCase() !== glob.toLowerCase();
-    if (!anyMagic) {
-      return body;
+};
+minimatch.AST = AST;
+minimatch.Minimatch = Minimatch;
+minimatch.escape = escape;
+minimatch.unescape = unescape;
+
+// src/paths-ignore.ts
+var DEFAULT_CONFIG_RELATIVE_PATHS = [
+  ".github/codeql/codeql-config.yml",
+  ".github/codeql/codeql-config.yaml"
+];
+var patternsCache = /* @__PURE__ */ new Map();
+function findCodeqlConfigFile(sourceRoot2) {
+  const envConfigPath = process.env.CODEQL_CONFIG_PATH;
+  if (envConfigPath) {
+    const resolvedRoot = (0, import_path4.resolve)(sourceRoot2);
+    const fullPath = (0, import_path4.resolve)(resolvedRoot, envConfigPath);
+    const rel = (0, import_path4.relative)(resolvedRoot, fullPath);
+    if (rel.startsWith("..") || (0, import_path4.resolve)(resolvedRoot, rel) !== fullPath) {
+      cdsExtractorLog(
+        "warn",
+        `CODEQL_CONFIG_PATH '${envConfigPath}' resolves outside the source root. Ignoring.`
+      );
+      return void 0;
     }
-    const flags = (this.#options.nocase ? "i" : "") + (uflag ? "u" : "");
-    return Object.assign(new RegExp(`^${re2}$`, flags), {
-      _src: re2,
-      _glob: glob
-    });
+    if ((0, import_fs4.existsSync)(fullPath)) {
+      cdsExtractorLog("info", `Using CodeQL config file from CODEQL_CONFIG_PATH: ${fullPath}`);
+      return fullPath;
+    }
+    cdsExtractorLog(
+      "warn",
+      `CODEQL_CONFIG_PATH is set to '${envConfigPath}', but no file exists at '${fullPath}'.`
+    );
+    return void 0;
   }
-  get options() {
-    return this.#options;
+  for (const configPath of DEFAULT_CONFIG_RELATIVE_PATHS) {
+    const fullPath = (0, import_path4.join)(sourceRoot2, configPath);
+    if ((0, import_fs4.existsSync)(fullPath)) {
+      return fullPath;
+    }
   }
-  // returns the string match, the regexp source, whether there's magic
-  // in the regexp (so a regular expression is required) and whether or
-  // not the uflag is needed for the regular expression (for posix classes)
-  // TODO: instead of injecting the start/end at this point, just return
-  // the BODY of the regexp, along with the start/end portions suitable
-  // for binding the start/end in either a joined full-path makeRe context
-  // (where we bind to (^|/), or a standalone matchPart context (where
-  // we bind to ^, and not /).  Otherwise slashes get duped!
-  //
-  // In part-matching mode, the start is:
-  // - if not isStart: nothing
-  // - if traversal possible, but not allowed: ^(?!\.\.?$)
-  // - if dots allowed or not possible: ^
-  // - if dots possible and not allowed: ^(?!\.)
-  // end is:
-  // - if not isEnd(): nothing
-  // - else: $
-  //
-  // In full-path matching mode, we put the slash at the START of the
-  // pattern, so start is:
-  // - if first pattern: same as part-matching mode
-  // - if not isStart(): nothing
-  // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
-  // - if dots allowed or not possible: /
-  // - if dots possible and not allowed: /(?!\.)
-  // end is:
-  // - if last pattern, same as part-matching mode
-  // - else nothing
-  //
-  // Always put the (?:$|/) on negated tails, though, because that has to be
-  // there to bind the end of the negated pattern portion, and it's easier to
-  // just stick it in now rather than try to inject it later in the middle of
-  // the pattern.
-  //
-  // We can just always return the same end, and leave it up to the caller
-  // to know whether it's going to be used joined or in parts.
-  // And, if the start is adjusted slightly, can do the same there:
-  // - if not isStart: nothing
-  // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
-  // - if dots allowed or not possible: (?:/|^)
-  // - if dots possible and not allowed: (?:/|^)(?!\.)
-  //
-  // But it's better to have a simpler binding without a conditional, for
-  // performance, so probably better to return both start options.
-  //
-  // Then the caller just ignores the end if it's not the first pattern,
-  // and the start always gets applied.
-  //
-  // But that's always going to be $ if it's the ending pattern, or nothing,
-  // so the caller can just attach $ at the end of the pattern when building.
-  //
-  // So the todo is:
-  // - better detect what kind of start is needed
-  // - return both flavors of starting pattern
-  // - attach $ at the end of the pattern when creating the actual RegExp
-  //
-  // Ah, but wait, no, that all only applies to the root when the first pattern
-  // is not an extglob. If the first pattern IS an extglob, then we need all
-  // that dot prevention biz to live in the extglob portions, because eg
-  // +(*|.x*) can match .xy but not .yx.
-  //
-  // So, return the two flavors if it's #root and the first child is not an
-  // AST, otherwise leave it to the child AST to handle it, and there,
-  // use the (?:^|/) style of start binding.
-  //
-  // Even simplified further:
-  // - Since the start for a join is eg /(?!\.) and the start for a part
-  // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
-  // or start or whatever) and prepend ^ or / at the Regexp construction.
-  toRegExpSource(allowDot) {
-    const dot = allowDot ?? !!this.#options.dot;
-    if (this.#root === this) {
-      this.#flatten();
-      this.#fillNegs();
-    }
-    if (!isExtglobAST(this)) {
-      const noEmpty = this.isStart() && this.isEnd() && !this.#parts.some((s) => typeof s !== "string");
-      const src = this.#parts.map((p) => {
-        const [re2, _2, hasMagic, uflag] = typeof p === "string" ? _a.#parseGlob(p, this.#hasMagic, noEmpty) : p.toRegExpSource(allowDot);
-        this.#hasMagic = this.#hasMagic || hasMagic;
-        this.#uflag = this.#uflag || uflag;
-        return re2;
-      }).join("");
-      let start2 = "";
-      if (this.isStart()) {
-        if (typeof this.#parts[0] === "string") {
-          const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
-          if (!dotTravAllowed) {
-            const aps = addPatternStart;
-            const needNoTrav = (
-              // dots are allowed, and the pattern starts with [ or .
-              dot && aps.has(src.charAt(0)) || // the pattern starts with \., and then [ or .
-              src.startsWith("\\.") && aps.has(src.charAt(2)) || // the pattern starts with \.\., and then [ or .
-              src.startsWith("\\.\\.") && aps.has(src.charAt(4))
-            );
-            const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
-            start2 = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : "";
-          }
-        }
-      }
-      let end = "";
-      if (this.isEnd() && this.#root.#filledNegs && this.#parent?.type === "!") {
-        end = "(?:$|\\/)";
-      }
-      const final2 = start2 + src + end;
-      return [
-        final2,
-        unescape(src),
-        this.#hasMagic = !!this.#hasMagic,
-        this.#uflag
-      ];
+  return void 0;
+}
+function getPathsIgnorePatterns(sourceRoot2) {
+  const cached = patternsCache.get(sourceRoot2);
+  if (cached !== void 0) {
+    return cached;
+  }
+  const configPath = findCodeqlConfigFile(sourceRoot2);
+  if (!configPath) {
+    patternsCache.set(sourceRoot2, []);
+    return [];
+  }
+  try {
+    const content = (0, import_fs4.readFileSync)(configPath, "utf8");
+    const config = load(content);
+    if (!config || !Array.isArray(config["paths-ignore"])) {
+      patternsCache.set(sourceRoot2, []);
+      return [];
     }
-    const repeated = this.type === "*" || this.type === "+";
-    const start = this.type === "!" ? "(?:(?!(?:" : "(?:";
-    let body = this.#partsToRegExp(dot);
-    if (this.isStart() && this.isEnd() && !body && this.type !== "!") {
-      const s = this.toString();
-      const me2 = this;
-      me2.#parts = [s];
-      me2.type = null;
-      me2.#hasMagic = void 0;
-      return [s, unescape(this.toString()), false, false];
+    const patterns = config["paths-ignore"].filter(
+      (p) => typeof p === "string" && p.length > 0
+    );
+    if (patterns.length > 0) {
+      cdsExtractorLog(
+        "info",
+        `Found ${patterns.length} paths-ignore pattern(s) in ${configPath}: ${patterns.join(", ")}`
+      );
     }
-    let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot ? "" : this.#partsToRegExp(true);
-    if (bodyDotAllowed === body) {
-      bodyDotAllowed = "";
+    patternsCache.set(sourceRoot2, patterns);
+    return patterns;
+  } catch (error) {
+    cdsExtractorLog("warn", `Failed to read CodeQL config file at ${configPath}: ${String(error)}`);
+    patternsCache.set(sourceRoot2, []);
+    return [];
+  }
+}
+function shouldIgnorePath(relativePath, patterns) {
+  const matchOptions = { dot: true, windowsPathsNoEscape: true };
+  for (const raw of patterns) {
+    const pattern = raw.replace(/\/+$/, "");
+    if (minimatch(relativePath, pattern, matchOptions)) {
+      return true;
     }
-    if (bodyDotAllowed) {
-      body = `(?:${body})(?:${bodyDotAllowed})*?`;
+    if (minimatch(relativePath, `${pattern}/**`, matchOptions)) {
+      return true;
     }
-    let final = "";
-    if (this.type === "!" && this.#emptyExt) {
-      final = (this.isStart() && !dot ? startNoDot : "") + starNoEmpty;
+  }
+  return false;
+}
+function filterIgnoredPaths(relativePaths, patterns) {
+  if (patterns.length === 0) {
+    return relativePaths;
+  }
+  return relativePaths.filter((p) => !shouldIgnorePath(p, patterns));
+}
+
+// src/environment.ts
+function getPlatformInfo() {
+  const osPlatform = (0, import_os.platform)();
+  const osPlatformArch = (0, import_os.arch)();
+  const isWindows = osPlatform === "win32";
+  const exeExtension = isWindows ? ".exe" : "";
+  return {
+    platform: osPlatform,
+    arch: osPlatformArch,
+    isWindows,
+    exeExtension
+  };
+}
+function npmExecutable() {
+  return getPlatformInfo().isWindows ? "npm.cmd" : "npm";
+}
+function npxExecutable() {
+  return getPlatformInfo().isWindows ? "npx.cmd" : "npx";
+}
+function getCodeQLExePath() {
+  const platformInfo2 = getPlatformInfo();
+  const codeqlExeName = platformInfo2.isWindows ? "codeql.exe" : "codeql";
+  const codeqlDist = process.env.CODEQL_DIST;
+  if (codeqlDist) {
+    const codeqlPathFromDist = (0, import_path5.resolve)((0, import_path5.join)(codeqlDist, codeqlExeName));
+    if ((0, import_fs5.existsSync)(codeqlPathFromDist)) {
+      cdsExtractorLog("info", `Using CodeQL executable from CODEQL_DIST: ${codeqlPathFromDist}`);
+      return codeqlPathFromDist;
     } else {
-      const close = this.type === "!" ? (
-        // !() must match something,but !(x) can match ''
-        "))" + (this.isStart() && !dot && !allowDot ? startNoDot : "") + star + ")"
-      ) : this.type === "@" ? ")" : this.type === "?" ? ")?" : this.type === "+" && bodyDotAllowed ? ")" : this.type === "*" && bodyDotAllowed ? `)?` : `)${this.type}`;
-      final = start + body + close;
+      cdsExtractorLog(
+        "error",
+        `CODEQL_DIST is set to '${codeqlDist}', but CodeQL executable was not found at '${codeqlPathFromDist}'. Please ensure this path is correct. Falling back to PATH-based discovery.`
+      );
     }
-    return [
-      final,
-      unescape(body),
-      this.#hasMagic = !!this.#hasMagic,
-      this.#uflag
-    ];
   }
-  #flatten() {
-    if (!isExtglobAST(this)) {
-      for (const p of this.#parts) {
-        if (typeof p === "object") {
-          p.#flatten();
+  cdsExtractorLog(
+    "info",
+    'CODEQL_DIST environment variable not set or invalid. Attempting to find CodeQL executable via system PATH using "codeql version --format=json".'
+  );
+  try {
+    const versionOutput = (0, import_child_process3.execFileSync)(codeqlExeName, ["version", "--format=json"], {
+      encoding: "utf8",
+      timeout: 5e3,
+      // 5 seconds timeout
+      stdio: "pipe"
+      // Suppress output to console
+    });
+    try {
+      const versionInfo = JSON.parse(versionOutput);
+      if (versionInfo && typeof versionInfo.unpackedLocation === "string" && versionInfo.unpackedLocation) {
+        const resolvedPathFromVersion = (0, import_path5.resolve)((0, import_path5.join)(versionInfo.unpackedLocation, codeqlExeName));
+        if ((0, import_fs5.existsSync)(resolvedPathFromVersion)) {
+          cdsExtractorLog(
+            "info",
+            `CodeQL executable found via 'codeql version --format=json' at: ${resolvedPathFromVersion}`
+          );
+          return resolvedPathFromVersion;
         }
+        cdsExtractorLog(
+          "warn",
+          `'codeql version --format=json' provided unpackedLocation '${versionInfo.unpackedLocation}', but executable not found at '${resolvedPathFromVersion}'.`
+        );
+      } else {
+        cdsExtractorLog(
+          "warn",
+          "Could not determine CodeQL executable path from 'codeql version --format=json' output. 'unpackedLocation' field missing, empty, or invalid."
+        );
       }
-    } else {
-      let iterations = 0;
-      let done = false;
-      do {
-        done = true;
-        for (let i = 0; i < this.#parts.length; i++) {
-          const c = this.#parts[i];
-          if (typeof c === "object") {
-            c.#flatten();
-            if (this.#canAdopt(c)) {
-              done = false;
-              this.#adopt(c, i);
-            } else if (this.#canAdoptWithSpace(c)) {
-              done = false;
-              this.#adoptWithSpace(c, i);
-            } else if (this.#canUsurp(c)) {
-              done = false;
-              this.#usurp(c);
-            }
-          }
-        }
-      } while (!done && ++iterations < 10);
+    } catch (parseError) {
+      cdsExtractorLog(
+        "warn",
+        `Failed to parse 'codeql version --format=json' output: ${String(parseError)}. Output was: ${versionOutput}`
+      );
     }
-    this.#toString = void 0;
+  } catch (error) {
+    let errorMessage = `INFO: Failed to find CodeQL executable via 'codeql version --format=json'. Error: ${String(error)}`;
+    if (error && typeof error === "object" && "code" in error && error.code === "ENOENT") {
+      errorMessage += `
+INFO: The command '${codeqlExeName}' was not found in your system PATH.`;
+    }
+    cdsExtractorLog("info", errorMessage);
   }
-  #partsToRegExp(dot) {
-    return this.#parts.map((p) => {
-      if (typeof p === "string") {
-        throw new Error("string type in extglob ast??");
-      }
-      const [re2, _2, _hasMagic, uflag] = p.toRegExpSource(dot);
-      this.#uflag = this.#uflag || uflag;
-      return re2;
-    }).filter((p) => !(this.isStart() && this.isEnd()) || !!p).join("|");
+  cdsExtractorLog(
+    "error",
+    'Failed to determine CodeQL executable path. Please ensure the CODEQL_DIST environment variable is set and points to a valid CodeQL distribution, or that the CodeQL CLI (codeql) is available in your system PATH and "codeql version --format=json" can provide its location.'
+  );
+  return "";
+}
+function getJavaScriptExtractorRoot(codeqlExePath2) {
+  let jsExtractorRoot = process.env.CODEQL_EXTRACTOR_JAVASCRIPT_ROOT ?? "";
+  if (jsExtractorRoot) {
+    cdsExtractorLog(
+      "info",
+      `Using JavaScript extractor root from environment variable CODEQL_EXTRACTOR_JAVASCRIPT_ROOT: ${jsExtractorRoot}`
+    );
+    return jsExtractorRoot;
   }
-  static #parseGlob(glob, hasMagic, noEmpty = false) {
-    let escaping = false;
-    let re2 = "";
-    let uflag = false;
-    let inStar = false;
-    for (let i = 0; i < glob.length; i++) {
-      const c = glob.charAt(i);
-      if (escaping) {
-        escaping = false;
-        re2 += (reSpecials.has(c) ? "\\" : "") + c;
-        continue;
-      }
-      if (c === "*") {
-        if (inStar)
-          continue;
-        inStar = true;
-        re2 += noEmpty && /^[*]+$/.test(glob) ? starNoEmpty : star;
-        hasMagic = true;
-        continue;
-      } else {
-        inStar = false;
-      }
-      if (c === "\\") {
-        if (i === glob.length - 1) {
-          re2 += "\\\\";
-        } else {
-          escaping = true;
-        }
-        continue;
-      }
-      if (c === "[") {
-        const [src, needUflag, consumed, magic] = parseClass(glob, i);
-        if (consumed) {
-          re2 += src;
-          uflag = uflag || needUflag;
-          i += consumed - 1;
-          hasMagic = hasMagic || magic;
-          continue;
-        }
-      }
-      if (c === "?") {
-        re2 += qmark;
-        hasMagic = true;
-        continue;
-      }
-      re2 += regExpEscape(c);
+  if (!codeqlExePath2) {
+    cdsExtractorLog(
+      "warn",
+      "Cannot resolve JavaScript extractor root because the CodeQL executable path was not provided or found."
+    );
+    return "";
+  }
+  try {
+    jsExtractorRoot = (0, import_child_process3.execFileSync)(
+      codeqlExePath2,
+      ["resolve", "extractor", "--language=javascript"],
+      { stdio: "pipe" }
+      // Suppress output from the command itself
+    ).toString().trim();
+    if (jsExtractorRoot) {
+      cdsExtractorLog("info", `JavaScript extractor root resolved to: ${jsExtractorRoot}`);
+    } else {
+      cdsExtractorLog(
+        "warn",
+        `'codeql resolve extractor --language=javascript' using '${codeqlExePath2}' returned an empty path.`
+      );
     }
-    return [re2, unescape(glob), !!hasMagic, uflag];
+  } catch (error) {
+    cdsExtractorLog(
+      "error",
+      `Error resolving JavaScript extractor root using '${codeqlExePath2}': ${String(error)}`
+    );
+    jsExtractorRoot = "";
   }
-};
-_a = AST;
-
-// node_modules/minimatch/dist/esm/escape.js
-var escape = (s, { windowsPathsNoEscape = false, magicalBraces = false } = {}) => {
-  if (magicalBraces) {
-    return windowsPathsNoEscape ? s.replace(/[?*()[\]{}]/g, "[$&]") : s.replace(/[?*()[\]\\{}]/g, "\\$&");
+  return jsExtractorRoot;
+}
+function setupJavaScriptExtractorEnv() {
+  process.env.CODEQL_EXTRACTOR_JAVASCRIPT_WIP_DATABASE = process.env.CODEQL_EXTRACTOR_CDS_WIP_DATABASE;
+  process.env.CODEQL_EXTRACTOR_JAVASCRIPT_DIAGNOSTIC_DIR = process.env.CODEQL_EXTRACTOR_CDS_DIAGNOSTIC_DIR;
+  process.env.CODEQL_EXTRACTOR_JAVASCRIPT_LOG_DIR = process.env.CODEQL_EXTRACTOR_CDS_LOG_DIR;
+  process.env.CODEQL_EXTRACTOR_JAVASCRIPT_SCRATCH_DIR = process.env.CODEQL_EXTRACTOR_CDS_SCRATCH_DIR;
+  process.env.CODEQL_EXTRACTOR_JAVASCRIPT_TRAP_DIR = process.env.CODEQL_EXTRACTOR_CDS_TRAP_DIR;
+  process.env.CODEQL_EXTRACTOR_JAVASCRIPT_SOURCE_ARCHIVE_DIR = process.env.CODEQL_EXTRACTOR_CDS_SOURCE_ARCHIVE_DIR;
+}
+function getAutobuildScriptPath(jsExtractorRoot) {
+  if (!jsExtractorRoot) return "";
+  const platformInfo2 = getPlatformInfo();
+  const autobuildScriptName = platformInfo2.isWindows ? "autobuild.cmd" : "autobuild.sh";
+  return (0, import_path5.resolve)((0, import_path5.join)(jsExtractorRoot, "tools", autobuildScriptName));
+}
+function configureLgtmIndexFilters() {
+  let excludeFilters = "";
+  if (process.env.LGTM_INDEX_FILTERS) {
+    cdsExtractorLog(
+      "info",
+      `Found $LGTM_INDEX_FILTERS already set to:
+${process.env.LGTM_INDEX_FILTERS}`
+    );
+    const allowedExcludePatterns = ["exclude:**/*", "exclude:**/*.*"];
+    excludeFilters = "\n" + process.env.LGTM_INDEX_FILTERS.split("\n").filter(
+      (line) => line.startsWith("exclude") && !allowedExcludePatterns.some((pattern) => line.includes(pattern))
+    ).join("\n");
   }
-  return windowsPathsNoEscape ? s.replace(/[?*()[\]]/g, "[$&]") : s.replace(/[?*()[\]\\]/g, "\\$&");
-};
-
-// node_modules/minimatch/dist/esm/index.js
-var minimatch = (p, pattern, options = {}) => {
-  assertValidPattern(pattern);
-  if (!options.nocomment && pattern.charAt(0) === "#") {
-    return false;
+  const lgtmIndexFiltersPatterns = [
+    "exclude:**/*.*",
+    "include:**/*.cds.json",
+    "include:**/*.cds",
+    `include:**/${cdsExtractorMarkerFileName}`,
+    "exclude:**/node_modules/**/*.*"
+  ].join("\n");
+  process.env.LGTM_INDEX_FILTERS = lgtmIndexFiltersPatterns + excludeFilters;
+  process.env.LGTM_INDEX_TYPESCRIPT = "NONE";
+  process.env.LGTM_INDEX_FILETYPES = ".cds:JSON";
+}
+function applyPathsIgnoreToLgtmFilters(sourceRoot2) {
+  const patterns = getPathsIgnorePatterns(sourceRoot2);
+  if (patterns.length === 0) {
+    return;
   }
-  return new Minimatch(pattern, options).match(p);
-};
-var starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
-var starDotExtTest = (ext2) => (f) => !f.startsWith(".") && f.endsWith(ext2);
-var starDotExtTestDot = (ext2) => (f) => f.endsWith(ext2);
-var starDotExtTestNocase = (ext2) => {
-  ext2 = ext2.toLowerCase();
-  return (f) => !f.startsWith(".") && f.toLowerCase().endsWith(ext2);
-};
-var starDotExtTestNocaseDot = (ext2) => {
-  ext2 = ext2.toLowerCase();
-  return (f) => f.toLowerCase().endsWith(ext2);
-};
-var starDotStarRE = /^\*+\.\*+$/;
-var starDotStarTest = (f) => !f.startsWith(".") && f.includes(".");
-var starDotStarTestDot = (f) => f !== "." && f !== ".." && f.includes(".");
-var dotStarRE = /^\.\*+$/;
-var dotStarTest = (f) => f !== "." && f !== ".." && f.startsWith(".");
-var starRE = /^\*+$/;
-var starTest = (f) => f.length !== 0 && !f.startsWith(".");
-var starTestDot = (f) => f.length !== 0 && f !== "." && f !== "..";
-var qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
-var qmarksTestNocase = ([$0, ext2 = ""]) => {
-  const noext = qmarksTestNoExt([$0]);
-  if (!ext2)
-    return noext;
-  ext2 = ext2.toLowerCase();
-  return (f) => noext(f) && f.toLowerCase().endsWith(ext2);
-};
-var qmarksTestNocaseDot = ([$0, ext2 = ""]) => {
-  const noext = qmarksTestNoExtDot([$0]);
-  if (!ext2)
-    return noext;
-  ext2 = ext2.toLowerCase();
-  return (f) => noext(f) && f.toLowerCase().endsWith(ext2);
-};
-var qmarksTestDot = ([$0, ext2 = ""]) => {
-  const noext = qmarksTestNoExtDot([$0]);
-  return !ext2 ? noext : (f) => noext(f) && f.endsWith(ext2);
-};
-var qmarksTest = ([$0, ext2 = ""]) => {
-  const noext = qmarksTestNoExt([$0]);
-  return !ext2 ? noext : (f) => noext(f) && f.endsWith(ext2);
-};
-var qmarksTestNoExt = ([$0]) => {
-  const len = $0.length;
-  return (f) => f.length === len && !f.startsWith(".");
-};
-var qmarksTestNoExtDot = ([$0]) => {
-  const len = $0.length;
-  return (f) => f.length === len && f !== "." && f !== "..";
-};
-var defaultPlatform = typeof process === "object" && process ? typeof process.env === "object" && process.env && process.env.__MINIMATCH_TESTING_PLATFORM__ || process.platform : "posix";
-var path = {
-  win32: { sep: "\\" },
-  posix: { sep: "/" }
-};
-var sep2 = defaultPlatform === "win32" ? path.win32.sep : path.posix.sep;
-minimatch.sep = sep2;
-var GLOBSTAR = /* @__PURE__ */ Symbol("globstar **");
-minimatch.GLOBSTAR = GLOBSTAR;
-var qmark2 = "[^/]";
-var star2 = qmark2 + "*?";
-var twoStarDot = "(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?";
-var twoStarNoDot = "(?:(?!(?:\\/|^)\\.).)*?";
-var filter = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
-minimatch.filter = filter;
-var ext = (a, b = {}) => Object.assign({}, a, b);
-var defaults = (def) => {
-  if (!def || typeof def !== "object" || !Object.keys(def).length) {
-    return minimatch;
+  const excludeLines = patterns.map((p) => `exclude:${p}`).join("\n");
+  const current = process.env.LGTM_INDEX_FILTERS ?? "";
+  process.env.LGTM_INDEX_FILTERS = current + "\n" + excludeLines;
+  cdsExtractorLog(
+    "info",
+    `Applied ${patterns.length} paths-ignore pattern(s) to LGTM_INDEX_FILTERS`
+  );
+}
+function setupAndValidateEnvironment(sourceRoot2) {
+  const errorMessages2 = [];
+  const platformInfo2 = getPlatformInfo();
+  const codeqlExePath2 = getCodeQLExePath();
+  if (!codeqlExePath2) {
+    errorMessages2.push(
+      "Failed to find CodeQL executable. Ensure CODEQL_DIST is set and valid, or CodeQL CLI is in PATH."
+    );
   }
-  const orig = minimatch;
-  const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
-  return Object.assign(m, {
-    Minimatch: class Minimatch extends orig.Minimatch {
-      constructor(pattern, options = {}) {
-        super(pattern, ext(def, options));
-      }
-      static defaults(options) {
-        return orig.defaults(ext(def, options)).Minimatch;
-      }
-    },
-    AST: class AST extends orig.AST {
-      /* c8 ignore start */
-      constructor(type2, parent, options = {}) {
-        super(type2, parent, ext(def, options));
-      }
-      /* c8 ignore stop */
-      static fromGlob(pattern, options = {}) {
-        return orig.AST.fromGlob(pattern, ext(def, options));
-      }
-    },
-    unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
-    escape: (s, options = {}) => orig.escape(s, ext(def, options)),
-    filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
-    defaults: (options) => orig.defaults(ext(def, options)),
-    makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
-    braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
-    match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
-    sep: orig.sep,
-    GLOBSTAR
-  });
-};
-minimatch.defaults = defaults;
-var braceExpand = (pattern, options = {}) => {
-  assertValidPattern(pattern);
-  if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
-    return [pattern];
-  }
-  return expand(pattern, { max: options.braceExpandMax });
-};
-minimatch.braceExpand = braceExpand;
-var makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
-minimatch.makeRe = makeRe;
-var match = (list, pattern, options = {}) => {
-  const mm = new Minimatch(pattern, options);
-  list = list.filter((f) => mm.match(f));
-  if (mm.options.nonull && !list.length) {
-    list.push(pattern);
+  if (!dirExists(sourceRoot2)) {
+    errorMessages2.push(`Project root directory '${sourceRoot2}' does not exist.`);
   }
-  return list;
-};
-minimatch.match = match;
-var globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
-var regExpEscape2 = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
-var Minimatch = class {
-  options;
-  set;
-  pattern;
-  windowsPathsNoEscape;
-  nonegate;
-  negate;
-  comment;
-  empty;
-  preserveMultipleSlashes;
-  partial;
-  globSet;
-  globParts;
-  nocase;
-  isWindows;
-  platform;
-  windowsNoMagicRoot;
-  maxGlobstarRecursion;
-  regexp;
-  constructor(pattern, options = {}) {
-    assertValidPattern(pattern);
-    options = options || {};
-    this.options = options;
-    this.maxGlobstarRecursion = options.maxGlobstarRecursion ?? 200;
-    this.pattern = pattern;
-    this.platform = options.platform || defaultPlatform;
-    this.isWindows = this.platform === "win32";
-    const awe = "allowWindowsEscape";
-    this.windowsPathsNoEscape = !!options.windowsPathsNoEscape || options[awe] === false;
-    if (this.windowsPathsNoEscape) {
-      this.pattern = this.pattern.replace(/\\/g, "/");
+  const jsExtractorRoot = getJavaScriptExtractorRoot(codeqlExePath2);
+  if (!jsExtractorRoot) {
+    if (codeqlExePath2) {
+      errorMessages2.push(
+        "Failed to determine JavaScript extractor root using the found CodeQL executable."
+      );
+    } else {
+      errorMessages2.push(
+        "Cannot determine JavaScript extractor root because CodeQL executable was not found."
+      );
     }
-    this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
-    this.regexp = null;
-    this.negate = false;
-    this.nonegate = !!options.nonegate;
-    this.comment = false;
-    this.empty = false;
-    this.partial = !!options.partial;
-    this.nocase = !!this.options.nocase;
-    this.windowsNoMagicRoot = options.windowsNoMagicRoot !== void 0 ? options.windowsNoMagicRoot : !!(this.isWindows && this.nocase);
-    this.globSet = [];
-    this.globParts = [];
-    this.set = [];
-    this.make();
   }
-  hasMagic() {
-    if (this.options.magicalBraces && this.set.length > 1) {
-      return true;
-    }
-    for (const pattern of this.set) {
-      for (const part of pattern) {
-        if (typeof part !== "string")
-          return true;
-      }
-    }
-    return false;
+  if (jsExtractorRoot) {
+    process.env.CODEQL_EXTRACTOR_JAVASCRIPT_ROOT = jsExtractorRoot;
+    setupJavaScriptExtractorEnv();
   }
-  debug(..._2) {
+  const autobuildScriptPath2 = jsExtractorRoot ? getAutobuildScriptPath(jsExtractorRoot) : "";
+  return {
+    success: errorMessages2.length === 0,
+    errorMessages: errorMessages2,
+    codeqlExePath: codeqlExePath2,
+    // Will be '' if not found
+    jsExtractorRoot,
+    // Will be '' if not found
+    autobuildScriptPath: autobuildScriptPath2,
+    platformInfo: platformInfo2
+  };
+}
+
+// src/cds/compiler/compile.ts
+function parseCommandForSpawn(commandString) {
+  const parts = commandString.trim().split(/\s+/);
+  const executable = parts[0];
+  const baseArgs = parts.slice(1);
+  return { executable, baseArgs };
+}
+function determineCompilationTargets(project, sourceRoot2) {
+  const projectAbsolutePath = (0, import_path6.join)(sourceRoot2, project.projectDir);
+  const rootCdsFiles = project.cdsFiles.filter((file) => (0, import_path6.dirname)((0, import_path6.join)(sourceRoot2, file)) === projectAbsolutePath).map((file) => (0, import_path6.basename)(file));
+  if (rootCdsFiles.includes("index.cds")) {
+    return ["index.cds"];
   }
-  make() {
-    const pattern = this.pattern;
-    const options = this.options;
-    if (!options.nocomment && pattern.charAt(0) === "#") {
-      this.comment = true;
-      return;
-    }
-    if (!pattern) {
-      this.empty = true;
-      return;
-    }
-    this.parseNegate();
-    this.globSet = [...new Set(this.braceExpand())];
-    if (options.debug) {
-      this.debug = (...args) => console.error(...args);
-    }
-    this.debug(this.pattern, this.globSet);
-    const rawGlobParts = this.globSet.map((s) => this.slashSplit(s));
-    this.globParts = this.preprocess(rawGlobParts);
-    this.debug(this.pattern, this.globParts);
-    let set2 = this.globParts.map((s, _2, __) => {
-      if (this.isWindows && this.windowsNoMagicRoot) {
-        const isUNC = s[0] === "" && s[1] === "" && (s[2] === "?" || !globMagic.test(s[2])) && !globMagic.test(s[3]);
-        const isDrive = /^[a-z]:/i.test(s[0]);
-        if (isUNC) {
-          return [
-            ...s.slice(0, 4),
-            ...s.slice(4).map((ss2) => this.parse(ss2))
-          ];
-        } else if (isDrive) {
-          return [s[0], ...s.slice(1).map((ss2) => this.parse(ss2))];
-        }
-      }
-      return s.map((ss2) => this.parse(ss2));
-    });
-    this.debug(this.pattern, set2);
-    this.set = set2.filter((s) => s.indexOf(false) === -1);
-    if (this.isWindows) {
-      for (let i = 0; i < this.set.length; i++) {
-        const p = this.set[i];
-        if (p[0] === "" && p[1] === "" && this.globParts[i][2] === "?" && typeof p[3] === "string" && /^[a-z]:$/i.test(p[3])) {
-          p[2] = "?";
-        }
-      }
-    }
-    this.debug(this.pattern, this.set);
+  const capDirectories = ["db", "srv", "app"];
+  const existingCapDirs = capDirectories.filter((dir) => dirExists((0, import_path6.join)(projectAbsolutePath, dir)));
+  if (existingCapDirs.length > 0) {
+    return existingCapDirs;
   }
-  // various transforms to equivalent pattern sets that are
-  // faster to process in a filesystem walk.  The goal is to
-  // eliminate what we can, and push all ** patterns as far
-  // to the right as possible, even if it increases the number
-  // of patterns that we have to process.
-  preprocess(globParts) {
-    if (this.options.noglobstar) {
-      for (let i = 0; i < globParts.length; i++) {
-        for (let j2 = 0; j2 < globParts[i].length; j2++) {
-          if (globParts[i][j2] === "**") {
-            globParts[i][j2] = "*";
-          }
-        }
-      }
+  if (rootCdsFiles.length > 0) {
+    return rootCdsFiles;
+  }
+  return project.cdsFiles.map((file) => (0, import_path6.relative)(projectAbsolutePath, (0, import_path6.join)(sourceRoot2, file)));
+}
+function compileCdsToJson(cdsFilePath, sourceRoot2, cdsCommand, cacheDir, projectMap, projectDir) {
+  try {
+    const resolvedCdsFilePath = (0, import_path6.resolve)(cdsFilePath);
+    if (!fileExists(resolvedCdsFilePath)) {
+      throw new Error(`Expected CDS file '${resolvedCdsFilePath}' does not exist.`);
     }
-    const { optimizationLevel = 1 } = this.options;
-    if (optimizationLevel >= 2) {
-      globParts = this.firstPhasePreProcess(globParts);
-      globParts = this.secondPhasePreProcess(globParts);
-    } else if (optimizationLevel >= 1) {
-      globParts = this.levelOneOptimize(globParts);
-    } else {
-      globParts = this.adjascentGlobstarOptimize(globParts);
+    const cdsVersion = getCdsVersion(cdsCommand, cacheDir);
+    const versionInfo = cdsVersion ? `with CDS v${cdsVersion}` : "";
+    const projectBaseDir = (0, import_path6.join)(sourceRoot2, projectDir);
+    const spawnOptions = createSpawnOptions(projectBaseDir, cdsCommand, cacheDir);
+    if (!projectMap || !projectDir || !projectMap.has(projectDir)) {
+      throw new Error(
+        `Project directory '${projectDir}' not found in projectMap. Ensure the project is properly initialized.`
+      );
     }
-    return globParts;
+    const project = projectMap.get(projectDir);
+    return compileProject(sourceRoot2, projectDir, cdsCommand, spawnOptions, versionInfo, project);
+  } catch (error) {
+    return { success: false, message: String(error) };
   }
-  // just get rid of adjascent ** portions
-  adjascentGlobstarOptimize(globParts) {
-    return globParts.map((parts) => {
-      let gs2 = -1;
-      while (-1 !== (gs2 = parts.indexOf("**", gs2 + 1))) {
-        let i = gs2;
-        while (parts[i + 1] === "**") {
-          i++;
-        }
-        if (i !== gs2) {
-          parts.splice(gs2, i - gs2);
-        }
-      }
-      return parts;
-    });
+}
+function compileProject(sourceRoot2, projectDir, cdsCommand, spawnOptions, versionInfo, project) {
+  cdsExtractorLog("info", `Compiling CDS project '${projectDir}' using ${versionInfo}...`);
+  const compilationTargets = determineCompilationTargets(project, sourceRoot2);
+  if (compilationTargets.length === 0) {
+    throw new Error(
+      `Project directory '${projectDir}' does not contain any CDS files and cannot be compiled`
+    );
   }
-  // get rid of adjascent ** and resolve .. portions
-  levelOneOptimize(globParts) {
-    return globParts.map((parts) => {
-      parts = parts.reduce((set2, part) => {
-        const prev = set2[set2.length - 1];
-        if (part === "**" && prev === "**") {
-          return set2;
-        }
-        if (part === "..") {
-          if (prev && prev !== ".." && prev !== "." && prev !== "**") {
-            set2.pop();
-            return set2;
-          }
-        }
-        set2.push(part);
-        return set2;
-      }, []);
-      return parts.length === 0 ? [""] : parts;
-    });
+  const projectJsonOutPath = (0, import_path6.join)(sourceRoot2, projectDir, modelCdsJsonFile);
+  const compileArgs = [
+    "compile",
+    ...compilationTargets,
+    "--to",
+    "json",
+    "--dest",
+    modelCdsJsonFile,
+    "--locations",
+    "--log-level",
+    "warn"
+  ];
+  cdsExtractorLog("info", `Compiling CDS project targets: ${compilationTargets.join(", ")}`);
+  cdsExtractorLog(
+    "info",
+    `Running compilation task for CDS project '${projectDir}': command='${cdsCommand}' args='${JSON.stringify(compileArgs)}'`
+  );
+  const { executable, baseArgs } = parseCommandForSpawn(cdsCommand);
+  const allArgs = [...baseArgs, ...compileArgs];
+  const result = (0, import_child_process4.spawnSync)(executable, allArgs, spawnOptions);
+  if (result.error) {
+    cdsExtractorLog("error", `SpawnSync error: ${result.error.message}`);
+    throw new Error(`Error executing CDS compiler: ${result.error.message}`);
   }
-  levelTwoFileOptimize(parts) {
-    if (!Array.isArray(parts)) {
-      parts = this.slashSplit(parts);
-    }
-    let didSomething = false;
-    do {
-      didSomething = false;
-      if (!this.preserveMultipleSlashes) {
-        for (let i = 1; i < parts.length - 1; i++) {
-          const p = parts[i];
-          if (i === 1 && p === "" && parts[0] === "")
-            continue;
-          if (p === "." || p === "") {
-            didSomething = true;
-            parts.splice(i, 1);
-            i--;
-          }
-        }
-        if (parts[0] === "." && parts.length === 2 && (parts[1] === "." || parts[1] === "")) {
-          didSomething = true;
-          parts.pop();
-        }
+  if (result.stderr && result.stderr.length > 0) {
+    cdsExtractorLog("warn", `CDS stderr output: ${result.stderr.toString()}`);
+  }
+  if (result.status !== 0) {
+    cdsExtractorLog("error", `CDS command failed with status ${result.status}`);
+    cdsExtractorLog(
+      "error",
+      `Command: ${cdsCommand} ${compileArgs.map((arg) => arg.includes(" ") ? `"${arg}"` : arg).join(" ")}`
+    );
+    cdsExtractorLog("error", `Stdout: ${result.stdout?.toString() || "No stdout"}`);
+    cdsExtractorLog("error", `Stderr: ${result.stderr?.toString() || "No stderr"}`);
+    throw new Error(
+      `Could not compile the CAP project ${projectDir}.
+Reported error(s):
+\`\`\`
+${result.stderr?.toString() || "Unknown error"}
+\`\`\``
+    );
+  }
+  if (!fileExists(projectJsonOutPath) && !dirExists(projectJsonOutPath)) {
+    throw new Error(
+      `CAP project '${projectDir}' was not compiled to JSON. This is likely because the project structure is invalid.`
+    );
+  }
+  if (dirExists(projectJsonOutPath)) {
+    cdsExtractorLog(
+      "info",
+      `CDS compiler generated JSON to output directory: ${projectJsonOutPath}`
+    );
+    recursivelyRenameJsonFiles(projectJsonOutPath);
+  } else {
+    cdsExtractorLog("info", `CDS compiler generated JSON to file: ${projectJsonOutPath}`);
+  }
+  normalizeLocationPathsInFile(projectJsonOutPath);
+  return {
+    success: true,
+    outputPath: projectJsonOutPath,
+    compiledAsProject: true,
+    message: "Project was compiled using project-aware compilation"
+  };
+}
+function createSpawnOptions(projectBaseDir, cdsCommand, cacheDir) {
+  const binPathNative = `node_modules${import_path6.sep}.bin${import_path6.sep}`;
+  const binPathPosix = "node_modules/.bin/";
+  const isDirectBinary = cdsCommand.includes(binPathNative) || cdsCommand.includes(binPathPosix);
+  const useShell = getPlatformInfo().isWindows && !isDirectBinary;
+  const spawnOptions = {
+    cwd: projectBaseDir,
+    // CRITICAL: Always use project base directory as cwd to ensure correct path generation
+    shell: useShell,
+    stdio: "pipe",
+    env: { ...process.env }
+  };
+  if (cacheDir && !isDirectBinary) {
+    const nodePath = (0, import_path6.join)(cacheDir, "node_modules");
+    spawnOptions.env = {
+      ...process.env,
+      NODE_PATH: `${nodePath}${import_path6.delimiter}${process.env.NODE_PATH ?? ""}`,
+      PATH: `${(0, import_path6.join)(nodePath, ".bin")}${import_path6.delimiter}${process.env.PATH}`,
+      // Add NPM configuration to ensure dependencies are resolved from the cache directory
+      npm_config_prefix: cacheDir,
+      // Ensure we don't pick up global CDS installations that might conflict
+      npm_config_global: "false",
+      // Clear any existing CDS environment variables that might interfere
+      CDS_HOME: cacheDir
+    };
+  } else if (isDirectBinary) {
+    const cleanEnv = { ...process.env };
+    delete cleanEnv.NODE_PATH;
+    delete cleanEnv.npm_config_prefix;
+    delete cleanEnv.npm_config_global;
+    delete cleanEnv.CDS_HOME;
+    spawnOptions.env = cleanEnv;
+  }
+  return spawnOptions;
+}
+
+// src/cds/compiler/validator.ts
+var import_fs6 = require("fs");
+var import_path7 = require("path");
+function identifyTasksRequiringRetry(dependencyGraph2) {
+  const tasksRequiringRetry = /* @__PURE__ */ new Map();
+  for (const [projectDir, project] of dependencyGraph2.projects.entries()) {
+    const failedTasks = [];
+    for (const task of project.compilationTasks) {
+      if (task.retryInfo?.hasBeenRetried) {
+        continue;
       }
-      let dd = 0;
-      while (-1 !== (dd = parts.indexOf("..", dd + 1))) {
-        const p = parts[dd - 1];
-        if (p && p !== "." && p !== ".." && p !== "**") {
-          didSomething = true;
-          parts.splice(dd - 1, 2);
-          dd -= 2;
+      const validationResult2 = validateTaskOutputs(task, dependencyGraph2.sourceRootDir);
+      if (!validationResult2.isValid) {
+        failedTasks.push(task);
+        cdsExtractorLog(
+          "info",
+          `Task ${task.id} requires retry: ${validationResult2.validFileCount}/${validationResult2.expectedFileCount} output files valid (status: ${task.status})`
+        );
+        if (task.status === "success") {
+          cdsExtractorLog(
+            "warn",
+            `Task ${task.id} was marked as successful but output files are missing or invalid - updating status to failed`
+          );
+          task.status = "failed";
         }
       }
-    } while (didSomething);
-    return parts.length === 0 ? [""] : parts;
+    }
+    if (failedTasks.length > 0) {
+      tasksRequiringRetry.set(projectDir, failedTasks);
+    }
   }
-  // First phase: single-pattern processing
-  // 
 is 1 or more portions
-  //  is 1 or more portions
-  // 

is any portion other than ., .., '', or ** - // is . or '' - // - // **/.. is *brutal* for filesystem walking performance, because - // it effectively resets the recursive walk each time it occurs, - // and ** cannot be reduced out by a .. pattern part like a regexp - // or most strings (other than .., ., and '') can be. - // - //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - //

// -> 
/
-  // 
/

/../ ->

/
-  // **/**/ -> **/
-  //
-  // **/*/ -> */**/ <== not valid because ** doesn't follow
-  // this WOULD be allowed if ** did follow symlinks, or * didn't
-  firstPhasePreProcess(globParts) {
-    let didSomething = false;
-    do {
-      didSomething = false;
-      for (let parts of globParts) {
-        let gs2 = -1;
-        while (-1 !== (gs2 = parts.indexOf("**", gs2 + 1))) {
-          let gss = gs2;
-          while (parts[gss + 1] === "**") {
-            gss++;
-          }
-          if (gss > gs2) {
-            parts.splice(gs2 + 1, gss - gs2);
-          }
-          let next = parts[gs2 + 1];
-          const p = parts[gs2 + 2];
-          const p2 = parts[gs2 + 3];
-          if (next !== "..")
-            continue;
-          if (!p || p === "." || p === ".." || !p2 || p2 === "." || p2 === "..") {
-            continue;
-          }
-          didSomething = true;
-          parts.splice(gs2, 1);
-          const other = parts.slice(0);
-          other[gs2] = "**";
-          globParts.push(other);
-          gs2--;
-        }
-        if (!this.preserveMultipleSlashes) {
-          for (let i = 1; i < parts.length - 1; i++) {
-            const p = parts[i];
-            if (i === 1 && p === "" && parts[0] === "")
-              continue;
-            if (p === "." || p === "") {
-              didSomething = true;
-              parts.splice(i, 1);
-              i--;
-            }
-          }
-          if (parts[0] === "." && parts.length === 2 && (parts[1] === "." || parts[1] === "")) {
-            didSomething = true;
-            parts.pop();
-          }
-        }
-        let dd = 0;
-        while (-1 !== (dd = parts.indexOf("..", dd + 1))) {
-          const p = parts[dd - 1];
-          if (p && p !== "." && p !== ".." && p !== "**") {
-            didSomething = true;
-            const needDot = dd === 1 && parts[dd + 1] === "**";
-            const splin = needDot ? ["."] : [];
-            parts.splice(dd - 1, 2, ...splin);
-            if (parts.length === 0)
-              parts.push("");
-            dd -= 2;
-          }
+  if (tasksRequiringRetry.size > 0) {
+    const totalFailedTasks = Array.from(tasksRequiringRetry.values()).reduce(
+      (sum, tasks) => sum + tasks.length,
+      0
+    );
+    cdsExtractorLog(
+      "info",
+      `Identified ${totalFailedTasks} task(s) requiring retry across ${tasksRequiringRetry.size} project(s)`
+    );
+  }
+  return tasksRequiringRetry;
+}
+function updateCdsDependencyGraphStatus(dependencyGraph2, sourceRootDir) {
+  let successfulTasks = 0;
+  let failedTasks = 0;
+  let tasksSuccessfullyRetried = 0;
+  for (const project of dependencyGraph2.projects.values()) {
+    for (const task of project.compilationTasks) {
+      const validationResult2 = validateTaskOutputs(task, sourceRootDir);
+      const isValid = validationResult2.isValid;
+      if (isValid) {
+        task.status = "success";
+        successfulTasks++;
+        if (task.retryInfo?.hasBeenRetried) {
+          tasksSuccessfullyRetried++;
         }
+      } else {
+        task.status = "failed";
+        failedTasks++;
       }
-    } while (didSomething);
-    return globParts;
+    }
+  }
+  dependencyGraph2.statusSummary.successfulCompilations = successfulTasks;
+  dependencyGraph2.statusSummary.failedCompilations = failedTasks;
+  dependencyGraph2.retryStatus.totalTasksSuccessfullyRetried = tasksSuccessfullyRetried;
+  dependencyGraph2.retryStatus.totalTasksRequiringRetry = failedTasks;
+  return {
+    tasksValidated: successfulTasks + failedTasks,
+    successfulTasks,
+    failedTasks,
+    tasksSuccessfullyRetried
+  };
+}
+function validateOutputFile(filePath) {
+  const result = {
+    isValid: false,
+    filePath,
+    exists: false
+  };
+  if (!fileExists(filePath)) {
+    result.error = "File does not exist";
+    return result;
+  }
+  result.exists = true;
+  if (filePath.endsWith(".cds.json") || filePath.endsWith(".json")) {
+    try {
+      const content = (0, import_fs6.readFileSync)(filePath, "utf8");
+      if (!content.trim()) {
+        result.error = "File is empty";
+        return result;
+      }
+      const parsed = JSON.parse(content);
+      if (typeof parsed !== "object" || parsed === null) {
+        result.error = "File does not contain a valid JSON object";
+        return result;
+      }
+      result.hasValidJson = true;
+      result.isValid = true;
+    } catch (error) {
+      result.error = `Invalid JSON content: ${String(error)}`;
+      return result;
+    }
+  } else {
+    result.isValid = true;
+  }
+  return result;
+}
+function validateTaskOutputs(task, sourceRoot2) {
+  const fileResults = [];
+  const expectedOutput = task.expectedOutputFile;
+  const absolutePath = (0, import_path7.isAbsolute)(expectedOutput) ? expectedOutput : (0, import_path7.join)(sourceRoot2, expectedOutput);
+  const fileResult = validateOutputFile(absolutePath);
+  fileResults.push(fileResult);
+  const validFileCount = fileResults.filter((r) => r.isValid).length;
+  const expectedFileCount = 1;
+  const isValid = validFileCount === expectedFileCount && expectedFileCount > 0;
+  return {
+    isValid,
+    task,
+    fileResults,
+    validFileCount,
+    expectedFileCount
+  };
+}
+
+// src/diagnostics.ts
+var import_child_process5 = require("child_process");
+var import_path8 = require("path");
+function convertToRelativePath(filePath, sourceRoot2) {
+  if (!filePath || typeof filePath !== "string" || !sourceRoot2 || typeof sourceRoot2 !== "string") {
+    return ".";
+  }
+  try {
+    const resolvedSourceRoot = (0, import_path8.resolve)(sourceRoot2);
+    const resolvedFilePath = (0, import_path8.isAbsolute)(filePath) ? (0, import_path8.resolve)(filePath) : (0, import_path8.resolve)(resolvedSourceRoot, filePath);
+    if (resolvedFilePath === resolvedSourceRoot) {
+      return ".";
+    }
+    const relativePath = (0, import_path8.relative)(resolvedSourceRoot, resolvedFilePath);
+    if (relativePath.startsWith("..")) {
+      return ".";
+    }
+    return relativePath;
+  } catch {
+    return ".";
+  }
+}
+function addDiagnostic(filePath, message, codeqlExePath2, sourceId, sourceName, severity, logPrefix, sourceRoot2) {
+  const finalFilePath = sourceRoot2 ? convertToRelativePath(filePath, sourceRoot2) : (0, import_path8.resolve)(filePath);
+  let finalMessage = message;
+  if (sourceRoot2 && finalFilePath === "." && filePath !== sourceRoot2) {
+    const resolvedSourceRoot = (0, import_path8.resolve)(sourceRoot2);
+    const resolvedFilePath = (0, import_path8.isAbsolute)(filePath) ? (0, import_path8.resolve)(filePath) : (0, import_path8.resolve)(resolvedSourceRoot, filePath);
+    if (resolvedFilePath !== resolvedSourceRoot) {
+      finalMessage = `${message}
+
+**Note**: The file \`${filePath}\` is located outside the scanned source directory and cannot be linked directly in this diagnostic. This diagnostic is associated with the repository root instead.`;
+    }
+  }
+  try {
+    (0, import_child_process5.execFileSync)(codeqlExePath2, [
+      "database",
+      "add-diagnostic",
+      "--extractor-name=cds",
+      "--ready-for-status-page",
+      `--source-id=${sourceId}`,
+      `--source-name=${sourceName}`,
+      `--severity=${severity}`,
+      `--markdown-message=${finalMessage}`,
+      `--file-path=${finalFilePath}`,
+      "--",
+      `${process.env.CODEQL_EXTRACTOR_CDS_WIP_DATABASE ?? ""}`
+    ]);
+    cdsExtractorLog("info", `Added ${severity} diagnostic for ${logPrefix}: ${filePath}`);
+    return true;
+  } catch (err) {
+    cdsExtractorLog(
+      "error",
+      `Failed to add ${severity} diagnostic for ${logPrefix}=${filePath} : ${String(err)}`
+    );
+    return false;
+  }
+}
+function addCdsIndexerDiagnostic(projectDir, errorMessage, codeqlExePath2, sourceRoot2) {
+  return addDiagnostic(
+    projectDir,
+    errorMessage,
+    codeqlExePath2,
+    "cds/indexer-failure",
+    "Failure running @sap/cds-indexer for a SAP CAP CDS project",
+    "warning" /* Warning */,
+    "project directory",
+    sourceRoot2
+  );
+}
+function addCompilationDiagnostic(cdsFilePath, errorMessage, codeqlExePath2, sourceRoot2) {
+  return addDiagnostic(
+    cdsFilePath,
+    errorMessage,
+    codeqlExePath2,
+    "cds/compilation-failure",
+    "Failure to compile one or more SAP CAP CDS files",
+    "error" /* Error */,
+    "source file",
+    sourceRoot2
+  );
+}
+function addDependencyGraphDiagnostic(sourceRoot2, errorMessage, codeqlExePath2) {
+  return addDiagnostic(
+    sourceRoot2,
+    errorMessage,
+    codeqlExePath2,
+    "cds/dependency-graph-failure",
+    "CDS project dependency graph build failure",
+    "error" /* Error */,
+    "source root",
+    sourceRoot2
+  );
+}
+function addDependencyInstallationDiagnostic(sourceRoot2, errorMessage, codeqlExePath2) {
+  return addDiagnostic(
+    sourceRoot2,
+    errorMessage,
+    codeqlExePath2,
+    "cds/dependency-installation-failure",
+    "CDS dependency installation failure",
+    "error" /* Error */,
+    "source root",
+    sourceRoot2
+  );
+}
+function addEnvironmentSetupDiagnostic(sourceRoot2, errorMessage, codeqlExePath2) {
+  const contextFile = sourceRoot2;
+  return addDiagnostic(
+    contextFile,
+    errorMessage,
+    codeqlExePath2,
+    "cds/environment-setup-failure",
+    "CDS extractor environment setup failure",
+    "error" /* Error */,
+    "source root",
+    sourceRoot2
+  );
+}
+function addJavaScriptExtractorDiagnostic(filePath, errorMessage, codeqlExePath2, sourceRoot2) {
+  return addDiagnostic(
+    filePath,
+    errorMessage,
+    codeqlExePath2,
+    "cds/js-extractor-failure",
+    "Failure in JavaScript extractor for SAP CAP CDS files",
+    "error" /* Error */,
+    "extraction file",
+    sourceRoot2
+  );
+}
+function addNoCdsProjectsDiagnostic(sourceRoot2, message, codeqlExePath2) {
+  return addDiagnostic(
+    sourceRoot2,
+    message,
+    codeqlExePath2,
+    "cds/no-cds-projects",
+    "No CDS projects detected in source",
+    "warning" /* Warning */,
+    "source root",
+    sourceRoot2
+  );
+}
+
+// src/packageManager/cacheInstaller.ts
+var import_child_process7 = require("child_process");
+var import_crypto = require("crypto");
+var import_fs7 = require("fs");
+var import_path9 = require("path");
+
+// src/packageManager/versionResolver.ts
+var import_child_process6 = require("child_process");
+var availableVersionsCache = /* @__PURE__ */ new Map();
+var cacheStats = {
+  hits: 0,
+  misses: 0,
+  get hitRate() {
+    const total = this.hits + this.misses;
+    return total > 0 ? (this.hits / total * 100).toFixed(1) : "0.0";
+  }
+};
+function checkVersionCompatibility(cdsVersion, cdsDkVersion) {
+  if (cdsVersion === "latest" || cdsDkVersion === "latest") {
+    return { isCompatible: true };
+  }
+  const parsedCds = parseSemanticVersion(cdsVersion);
+  const parsedCdsDk = parseSemanticVersion(cdsDkVersion);
+  if (!parsedCds || !parsedCdsDk) {
+    return {
+      isCompatible: false,
+      warning: "Unable to parse version numbers for compatibility check"
+    };
+  }
+  const majorVersionsMatch = parsedCds.major === parsedCdsDk.major;
+  const minorVersionsMatch = parsedCds.minor === parsedCdsDk.minor;
+  if (!majorVersionsMatch) {
+    return {
+      isCompatible: false,
+      warning: `Major version mismatch: @sap/cds ${cdsVersion} and @sap/cds-dk ${cdsDkVersion} may not be compatible`
+    };
+  }
+  if (!minorVersionsMatch) {
+    return {
+      isCompatible: true,
+      warning: `Minor version difference: @sap/cds ${cdsVersion} and @sap/cds-dk ${cdsDkVersion} - consider aligning versions for best compatibility`
+    };
+  }
+  return { isCompatible: true };
+}
+function compareVersions(a, b) {
+  if (a.major !== b.major) return a.major - b.major;
+  if (a.minor !== b.minor) return a.minor - b.minor;
+  if (a.patch !== b.patch) return a.patch - b.patch;
+  if (a.prerelease && !b.prerelease) return -1;
+  if (!a.prerelease && b.prerelease) return 1;
+  if (a.prerelease && b.prerelease) {
+    return a.prerelease.localeCompare(b.prerelease);
+  }
+  return 0;
+}
+function findBestAvailableVersion(availableVersions, requiredVersion) {
+  const parsedVersions = availableVersions.map((v2) => parseSemanticVersion(v2)).filter((v2) => v2 !== null);
+  if (parsedVersions.length === 0) {
+    return null;
+  }
+  const satisfyingVersions = parsedVersions.filter((v2) => satisfiesRange(v2, requiredVersion));
+  if (satisfyingVersions.length > 0) {
+    satisfyingVersions.sort((a, b) => compareVersions(b, a));
+    return satisfyingVersions[0].original;
+  }
+  parsedVersions.sort((a, b) => compareVersions(b, a));
+  return parsedVersions[0].original;
+}
+function getAvailableVersions(packageName) {
+  if (availableVersionsCache.has(packageName)) {
+    cacheStats.hits++;
+    return availableVersionsCache.get(packageName);
+  }
+  cacheStats.misses++;
+  try {
+    const output = (0, import_child_process6.execSync)(`npm view ${packageName} versions --json`, {
+      encoding: "utf8",
+      timeout: 3e4
+      // 30 second timeout
+    });
+    const versions = JSON.parse(output);
+    let versionArray = [];
+    if (Array.isArray(versions)) {
+      versionArray = versions.filter((v2) => typeof v2 === "string");
+    } else if (typeof versions === "string") {
+      versionArray = [versions];
+    }
+    availableVersionsCache.set(packageName, versionArray);
+    return versionArray;
+  } catch (error) {
+    cdsExtractorLog("warn", `Failed to fetch versions for ${packageName}: ${String(error)}`);
+    availableVersionsCache.set(packageName, []);
+    return [];
+  }
+}
+function parseSemanticVersion(version) {
+  if (version === "latest") {
+    return {
+      major: 999,
+      minor: 999,
+      patch: 999,
+      original: version
+    };
+  }
+  const cleanVersion = version.replace(/^[\^~>=<]+/, "");
+  const semverRegex = /^(\d+)\.(\d+)\.(\d+)(?:-([a-zA-Z0-9.-]+))?(?:\+([a-zA-Z0-9.-]+))?$/;
+  const match2 = cleanVersion.match(semverRegex);
+  if (!match2) {
+    return null;
+  }
+  return {
+    major: parseInt(match2[1], 10),
+    minor: parseInt(match2[2], 10),
+    patch: parseInt(match2[3], 10),
+    prerelease: match2[4],
+    build: match2[5],
+    original: version
+  };
+}
+function isSatisfyingVersion(resolvedVersion, requestedVersion) {
+  if (resolvedVersion === requestedVersion || requestedVersion === "latest") {
+    return true;
+  }
+  const parsedResolved = parseSemanticVersion(resolvedVersion);
+  if (!parsedResolved) {
+    return false;
+  }
+  return satisfiesRange(parsedResolved, requestedVersion);
+}
+function resolveCdsVersions2(cdsVersion, cdsDkVersion) {
+  const cdsVersions = getAvailableVersions("@sap/cds");
+  const cdsDkVersions = getAvailableVersions("@sap/cds-dk");
+  const resolvedCdsVersion = findBestAvailableVersion(cdsVersions, cdsVersion);
+  const resolvedCdsDkVersion = findBestAvailableVersion(cdsDkVersions, cdsDkVersion);
+  const cdsExactMatch = resolvedCdsVersion === cdsVersion || cdsVersion === "latest" && resolvedCdsVersion !== null;
+  const cdsDkExactMatch = resolvedCdsDkVersion === cdsDkVersion || cdsDkVersion === "latest" && resolvedCdsDkVersion !== null;
+  const cdsSatisfiesRange = resolvedCdsVersion ? isSatisfyingVersion(resolvedCdsVersion, cdsVersion) : false;
+  const cdsDkSatisfiesRange = resolvedCdsDkVersion ? isSatisfyingVersion(resolvedCdsDkVersion, cdsDkVersion) : false;
+  const isFallback = !cdsSatisfiesRange || !cdsDkSatisfiesRange;
+  let warning;
+  if (resolvedCdsVersion && resolvedCdsDkVersion) {
+    const compatibility = checkVersionCompatibility(resolvedCdsVersion, resolvedCdsDkVersion);
+    const shouldShowWarning = isFallback || !cdsExactMatch || !cdsDkExactMatch || compatibility.warning && !compatibility.isCompatible;
+    if (compatibility.warning && shouldShowWarning) {
+      warning = compatibility.warning;
+    }
+  }
+  return {
+    resolvedCdsVersion,
+    resolvedCdsDkVersion,
+    cdsExactMatch,
+    cdsDkExactMatch,
+    warning,
+    isFallback
+  };
+}
+function satisfiesRange(version, range2) {
+  if (range2 === "latest") {
+    return true;
+  }
+  const rangeVersion = parseSemanticVersion(range2);
+  if (!rangeVersion) {
+    return false;
+  }
+  if (range2.startsWith("^")) {
+    return version.major === rangeVersion.major && compareVersions(version, rangeVersion) >= 0;
+  } else if (range2.startsWith("~")) {
+    return version.major === rangeVersion.major && version.minor === rangeVersion.minor && compareVersions(version, rangeVersion) >= 0;
+  } else if (range2.startsWith(">=")) {
+    return compareVersions(version, rangeVersion) >= 0;
+  } else if (range2.startsWith(">")) {
+    return compareVersions(version, rangeVersion) > 0;
+  } else if (range2.startsWith("<=")) {
+    return compareVersions(version, rangeVersion) <= 0;
+  } else if (range2.startsWith("<")) {
+    return compareVersions(version, rangeVersion) < 0;
+  } else {
+    return compareVersions(version, rangeVersion) === 0;
+  }
+}
+
+// src/packageManager/cacheInstaller.ts
+var cacheSubDirName = ".cds-extractor-cache";
+function addDependencyVersionWarning(packageJsonPath, warningMessage, codeqlExePath2) {
+  try {
+    (0, import_child_process7.execFileSync)(codeqlExePath2, [
+      "database",
+      "add-diagnostic",
+      "--extractor-name=cds",
+      "--ready-for-status-page",
+      "--source-id=cds/dependency-version-fallback",
+      "--source-name=Using fallback versions for SAP CAP CDS dependencies",
+      `--severity=${"warning" /* Warning */}`,
+      `--markdown-message=${warningMessage}`,
+      `--file-path=${(0, import_path9.resolve)(packageJsonPath)}`,
+      "--",
+      `${process.env.CODEQL_EXTRACTOR_CDS_WIP_DATABASE ?? ""}`
+    ]);
+    cdsExtractorLog("info", `Added warning diagnostic for dependency fallback: ${packageJsonPath}`);
+    return true;
+  } catch (err) {
+    cdsExtractorLog(
+      "error",
+      `Failed to add warning diagnostic for ${packageJsonPath}: ${String(err)}`
+    );
+    return false;
   }
-  // second phase: multi-pattern dedupes
-  // {
/*/,
/

/} ->

/*/
-  // {
/,
/} -> 
/
-  // {
/**/,
/} -> 
/**/
-  //
-  // {
/**/,
/**/

/} ->

/**/
-  // ^-- not valid because ** doens't follow symlinks
-  secondPhasePreProcess(globParts) {
-    for (let i = 0; i < globParts.length - 1; i++) {
-      for (let j2 = i + 1; j2 < globParts.length; j2++) {
-        const matched = this.partsMatch(globParts[i], globParts[j2], !this.preserveMultipleSlashes);
-        if (matched) {
-          globParts[i] = [];
-          globParts[j2] = matched;
-          break;
-        }
-      }
+}
+function findNearestNpmrc(startDir) {
+  let current = (0, import_path9.resolve)(startDir);
+  while (true) {
+    const candidate = (0, import_path9.join)(current, ".npmrc");
+    if ((0, import_fs7.existsSync)(candidate)) {
+      return candidate;
     }
-    return globParts.filter((gs2) => gs2.length);
-  }
-  partsMatch(a, b, emptyGSMatch = false) {
-    let ai2 = 0;
-    let bi2 = 0;
-    let result = [];
-    let which = "";
-    while (ai2 < a.length && bi2 < b.length) {
-      if (a[ai2] === b[bi2]) {
-        result.push(which === "b" ? b[bi2] : a[ai2]);
-        ai2++;
-        bi2++;
-      } else if (emptyGSMatch && a[ai2] === "**" && b[bi2] === a[ai2 + 1]) {
-        result.push(a[ai2]);
-        ai2++;
-      } else if (emptyGSMatch && b[bi2] === "**" && a[ai2] === b[bi2 + 1]) {
-        result.push(b[bi2]);
-        bi2++;
-      } else if (a[ai2] === "*" && b[bi2] && (this.options.dot || !b[bi2].startsWith(".")) && b[bi2] !== "**") {
-        if (which === "b")
-          return false;
-        which = "a";
-        result.push(a[ai2]);
-        ai2++;
-        bi2++;
-      } else if (b[bi2] === "*" && a[ai2] && (this.options.dot || !a[ai2].startsWith(".")) && a[ai2] !== "**") {
-        if (which === "a")
-          return false;
-        which = "b";
-        result.push(b[bi2]);
-        ai2++;
-        bi2++;
-      } else {
-        return false;
-      }
+    const parent = (0, import_path9.dirname)(current);
+    if (parent === current) {
+      return void 0;
     }
-    return a.length === b.length && result;
+    current = parent;
   }
-  parseNegate() {
-    if (this.nonegate)
-      return;
-    const pattern = this.pattern;
-    let negate = false;
-    let negateOffset = 0;
-    for (let i = 0; i < pattern.length && pattern.charAt(i) === "!"; i++) {
-      negate = !negate;
-      negateOffset++;
+}
+function copyNpmrcToCache(cacheDir, projectDir) {
+  const npmrcPath = findNearestNpmrc(projectDir);
+  if (!npmrcPath) {
+    return;
+  }
+  const dest = (0, import_path9.join)(cacheDir, ".npmrc");
+  try {
+    (0, import_fs7.copyFileSync)(npmrcPath, dest);
+    cdsExtractorLog("info", `Copied .npmrc from '${npmrcPath}' to cache directory '${cacheDir}'`);
+  } catch (err) {
+    cdsExtractorLog(
+      "warn",
+      `Failed to copy .npmrc to cache directory: ${err instanceof Error ? err.message : String(err)}`
+    );
+  }
+}
+function cacheInstallDependencies(dependencyGraph2, sourceRoot2, codeqlExePath2) {
+  if (dependencyGraph2.projects.size === 0) {
+    cdsExtractorLog("info", "No CDS projects found for dependency installation.");
+    cdsExtractorLog(
+      "info",
+      "This is expected if the source contains no CAP/CDS projects and should be handled by the caller."
+    );
+    return /* @__PURE__ */ new Map();
+  }
+  const dependencyCombinations = extractUniqueDependencyCombinations(dependencyGraph2.projects);
+  if (dependencyCombinations.length === 0) {
+    cdsExtractorLog(
+      "error",
+      "No CDS dependencies found in any project. This means projects were detected but lack proper @sap/cds dependencies."
+    );
+    cdsExtractorLog(
+      "info",
+      "Will attempt to use system-installed CDS tools if available, but compilation may fail."
+    );
+    return /* @__PURE__ */ new Map();
+  }
+  cdsExtractorLog(
+    "info",
+    `Found ${dependencyCombinations.length} unique CDS dependency combination(s).`
+  );
+  for (const combination of dependencyCombinations) {
+    const { cdsVersion, cdsDkVersion, hash, resolvedCdsVersion, resolvedCdsDkVersion, isFallback } = combination;
+    const actualCdsVersion = resolvedCdsVersion ?? cdsVersion;
+    const actualCdsDkVersion = resolvedCdsDkVersion ?? cdsDkVersion;
+    const fallbackNote = isFallback ? " (using fallback versions)" : "";
+    const indexerNote = combination.cdsIndexerVersion ? `, @sap/cds-indexer@${combination.cdsIndexerVersion}` : "";
+    cdsExtractorLog(
+      "info",
+      `Dependency combination ${hash.substring(0, 8)}: @sap/cds@${actualCdsVersion}, @sap/cds-dk@${actualCdsDkVersion}${indexerNote}${fallbackNote}`
+    );
+  }
+  const cacheRootDir = (0, import_path9.join)(sourceRoot2, cacheSubDirName);
+  cdsExtractorLog(
+    "info",
+    `Using cache directory '${cacheSubDirName}' within source root directory '${cacheRootDir}'`
+  );
+  if (!(0, import_fs7.existsSync)(cacheRootDir)) {
+    try {
+      (0, import_fs7.mkdirSync)(cacheRootDir, { recursive: true });
+      cdsExtractorLog("info", `Created cache directory: ${cacheRootDir}`);
+    } catch (err) {
+      cdsExtractorLog(
+        "warn",
+        `Failed to create cache directory: ${err instanceof Error ? err.message : String(err)}`
+      );
+      cdsExtractorLog("info", "Skipping dependency installation due to cache directory failure.");
+      return /* @__PURE__ */ new Map();
     }
-    if (negateOffset)
-      this.pattern = pattern.slice(negateOffset);
-    this.negate = negate;
+  } else {
+    cdsExtractorLog("info", `Cache directory already exists: ${cacheRootDir}`);
   }
-  // set partial to true to test if, for example,
-  // "/a/b" matches the start of "/*/b/*/d"
-  // Partial means, if you run out of file before you run
-  // out of pattern, then that's fine, as long as all
-  // the parts match.
-  matchOne(file, pattern, partial = false) {
-    let fileStartIndex = 0;
-    let patternStartIndex = 0;
-    if (this.isWindows) {
-      const fileDrive = typeof file[0] === "string" && /^[a-z]:$/i.test(file[0]);
-      const fileUNC = !fileDrive && file[0] === "" && file[1] === "" && file[2] === "?" && /^[a-z]:$/i.test(file[3]);
-      const patternDrive = typeof pattern[0] === "string" && /^[a-z]:$/i.test(pattern[0]);
-      const patternUNC = !patternDrive && pattern[0] === "" && pattern[1] === "" && pattern[2] === "?" && typeof pattern[3] === "string" && /^[a-z]:$/i.test(pattern[3]);
-      const fdi = fileUNC ? 3 : fileDrive ? 0 : void 0;
-      const pdi = patternUNC ? 3 : patternDrive ? 0 : void 0;
-      if (typeof fdi === "number" && typeof pdi === "number") {
-        const [fd, pd] = [
-          file[fdi],
-          pattern[pdi]
-        ];
-        if (fd.toLowerCase() === pd.toLowerCase()) {
-          pattern[pdi] = fd;
-          patternStartIndex = pdi;
-          fileStartIndex = fdi;
-        }
+  const projectCacheDirMap2 = /* @__PURE__ */ new Map();
+  let successfulInstallations = 0;
+  for (const combination of dependencyCombinations) {
+    const { cdsVersion, cdsDkVersion, hash } = combination;
+    const { resolvedCdsVersion, resolvedCdsDkVersion } = combination;
+    const cacheDirName = `cds-${hash}`;
+    const cacheDir = (0, import_path9.join)(cacheRootDir, cacheDirName);
+    cdsExtractorLog(
+      "info",
+      `Processing dependency combination ${hash.substring(0, 8)} in cache directory: ${cacheDirName}`
+    );
+    if (!(0, import_fs7.existsSync)(cacheDir)) {
+      try {
+        (0, import_fs7.mkdirSync)(cacheDir, { recursive: true });
+        cdsExtractorLog("info", `Created cache subdirectory: ${cacheDirName}`);
+      } catch (err) {
+        cdsExtractorLog(
+          "error",
+          `Failed to create cache directory for combination ${hash.substring(0, 8)} (${cacheDirName}): ${err instanceof Error ? err.message : String(err)}`
+        );
+        continue;
+      }
+      const actualCdsVersion = resolvedCdsVersion ?? cdsVersion;
+      const actualCdsDkVersion = resolvedCdsDkVersion ?? cdsDkVersion;
+      const cacheDeps = {
+        "@sap/cds": actualCdsVersion,
+        "@sap/cds-dk": actualCdsDkVersion
+      };
+      if (combination.cdsIndexerVersion) {
+        cacheDeps["@sap/cds-indexer"] = combination.cdsIndexerVersion;
+        cdsExtractorLog(
+          "info",
+          `Including @sap/cds-indexer@${combination.cdsIndexerVersion} in cache for combination ${hash.substring(0, 8)}`
+        );
+      }
+      const packageJson = {
+        name: `cds-extractor-cache-${hash}`,
+        version: "1.0.0",
+        private: true,
+        dependencies: cacheDeps
+      };
+      try {
+        (0, import_fs7.writeFileSync)((0, import_path9.join)(cacheDir, "package.json"), JSON.stringify(packageJson, null, 2));
+        cdsExtractorLog("info", `Created package.json in cache subdirectory: ${cacheDirName}`);
+      } catch (err) {
+        cdsExtractorLog(
+          "error",
+          `Failed to create package.json in cache directory ${cacheDirName}: ${err instanceof Error ? err.message : String(err)}`
+        );
+        continue;
       }
     }
-    const { optimizationLevel = 1 } = this.options;
-    if (optimizationLevel >= 2) {
-      file = this.levelTwoFileOptimize(file);
+    const npmrcProjectDir = Array.from(dependencyGraph2.projects.values()).map((project) => project.projectDir).find((projectDir) => projectDir && (0, import_fs7.existsSync)((0, import_path9.join)(sourceRoot2, projectDir, ".npmrc")));
+    if (npmrcProjectDir) {
+      copyNpmrcToCache(cacheDir, (0, import_path9.join)(sourceRoot2, npmrcProjectDir));
+    }
+    const samplePackageJsonPath = Array.from(dependencyGraph2.projects.values()).find(
+      (project) => project.packageJson
+    )?.projectDir;
+    const packageJsonPath = samplePackageJsonPath ? (0, import_path9.join)(sourceRoot2, samplePackageJsonPath, "package.json") : void 0;
+    const installSuccess = installDependenciesInCache(
+      cacheDir,
+      combination,
+      cacheDirName,
+      packageJsonPath,
+      codeqlExePath2
+    );
+    if (!installSuccess) {
+      cdsExtractorLog(
+        "warn",
+        `Skipping failed dependency combination ${hash.substring(0, 8)} (cache directory: ${cacheDirName})`
+      );
+      continue;
+    }
+    successfulInstallations++;
+    for (const [projectDir, project] of Array.from(dependencyGraph2.projects.entries())) {
+      if (!project.packageJson) {
+        continue;
+      }
+      const p_cdsVersion = project.packageJson.dependencies?.["@sap/cds"] ?? "latest";
+      const p_cdsDkVersion = project.packageJson.devDependencies?.["@sap/cds-dk"] ?? p_cdsVersion;
+      const p_cdsIndexerVersion = project.packageJson.dependencies?.["@sap/cds-indexer"] ?? project.packageJson.devDependencies?.["@sap/cds-indexer"] ?? void 0;
+      const projectResolvedVersions = resolveCdsVersions2(p_cdsVersion, p_cdsDkVersion);
+      const projectActualCdsVersion = projectResolvedVersions.resolvedCdsVersion ?? p_cdsVersion;
+      const projectActualCdsDkVersion = projectResolvedVersions.resolvedCdsDkVersion ?? p_cdsDkVersion;
+      const combinationActualCdsVersion = combination.resolvedCdsVersion ?? combination.cdsVersion;
+      const combinationActualCdsDkVersion = combination.resolvedCdsDkVersion ?? combination.cdsDkVersion;
+      if (projectActualCdsVersion === combinationActualCdsVersion && projectActualCdsDkVersion === combinationActualCdsDkVersion && p_cdsIndexerVersion === combination.cdsIndexerVersion) {
+        projectCacheDirMap2.set(projectDir, cacheDir);
+      }
     }
-    if (pattern.includes(GLOBSTAR)) {
-      return this.#matchGlobstar(file, pattern, partial, fileStartIndex, patternStartIndex);
+  }
+  if (successfulInstallations === 0) {
+    cdsExtractorLog("error", "Failed to install any dependency combinations.");
+    if (dependencyCombinations.length > 0) {
+      cdsExtractorLog(
+        "error",
+        `All ${dependencyCombinations.length} dependency combination(s) failed to install. This will likely cause compilation failures.`
+      );
     }
-    return this.#matchOne(file, pattern, partial, fileStartIndex, patternStartIndex);
+  } else if (successfulInstallations < dependencyCombinations.length) {
+    cdsExtractorLog(
+      "warn",
+      `Successfully installed ${successfulInstallations} out of ${dependencyCombinations.length} dependency combinations.`
+    );
+  } else {
+    cdsExtractorLog("info", "All dependency combinations installed successfully.");
   }
-  #matchGlobstar(file, pattern, partial, fileIndex, patternIndex) {
-    const firstgs = pattern.indexOf(GLOBSTAR, patternIndex);
-    const lastgs = pattern.lastIndexOf(GLOBSTAR);
-    const [head, body, tail] = partial ? [
-      pattern.slice(patternIndex, firstgs),
-      pattern.slice(firstgs + 1),
-      []
-    ] : [
-      pattern.slice(patternIndex, firstgs),
-      pattern.slice(firstgs + 1, lastgs),
-      pattern.slice(lastgs + 1)
-    ];
-    if (head.length) {
-      const fileHead = file.slice(fileIndex, fileIndex + head.length);
-      if (!this.#matchOne(fileHead, head, partial, 0, 0)) {
-        return false;
-      }
-      fileIndex += head.length;
-      patternIndex += head.length;
+  if (projectCacheDirMap2.size > 0) {
+    cdsExtractorLog("info", `Project to cache directory mappings:`);
+    for (const [projectDir, cacheDir] of Array.from(projectCacheDirMap2.entries())) {
+      const cacheDirName = (0, import_path9.join)(cacheDir).split("/").pop() ?? "unknown";
+      cdsExtractorLog("info", `  ${projectDir} \u2192 ${cacheDirName}`);
     }
-    let fileTailMatch = 0;
-    if (tail.length) {
-      if (tail.length + fileIndex > file.length)
-        return false;
-      let tailStart = file.length - tail.length;
-      if (this.#matchOne(file, tail, partial, tailStart, 0)) {
-        fileTailMatch = tail.length;
+  } else {
+    cdsExtractorLog(
+      "warn",
+      "No project to cache directory mappings created. Projects may not have compatible dependencies installed."
+    );
+  }
+  return projectCacheDirMap2;
+}
+function extractUniqueDependencyCombinations(projects) {
+  const combinations = /* @__PURE__ */ new Map();
+  for (const project of Array.from(projects.values())) {
+    if (!project.packageJson) {
+      continue;
+    }
+    const cdsVersion = project.packageJson.dependencies?.["@sap/cds"] ?? "latest";
+    const cdsDkVersion = project.packageJson.devDependencies?.["@sap/cds-dk"] ?? cdsVersion;
+    const cdsIndexerVersion = project.packageJson.dependencies?.["@sap/cds-indexer"] ?? project.packageJson.devDependencies?.["@sap/cds-indexer"] ?? void 0;
+    cdsExtractorLog(
+      "info",
+      `Resolving available dependency versions for project '${project.projectDir}' with dependencies: [@sap/cds@${cdsVersion}, @sap/cds-dk@${cdsDkVersion}]`
+    );
+    const resolvedVersions = resolveCdsVersions2(cdsVersion, cdsDkVersion);
+    const { resolvedCdsVersion, resolvedCdsDkVersion, ...rest } = resolvedVersions;
+    if (resolvedCdsVersion && resolvedCdsDkVersion) {
+      let statusMsg;
+      if (resolvedVersions.cdsExactMatch && resolvedVersions.cdsDkExactMatch) {
+        statusMsg = " (exact match)";
+      } else if (!resolvedVersions.isFallback) {
+        statusMsg = " (compatible versions)";
       } else {
-        if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) {
-          return false;
-        }
-        tailStart--;
-        if (!this.#matchOne(file, tail, partial, tailStart, 0)) {
-          return false;
-        }
-        fileTailMatch = tail.length + 1;
+        statusMsg = " (using fallback versions)";
       }
+      cdsExtractorLog(
+        "info",
+        `Resolved to: @sap/cds@${resolvedCdsVersion}, @sap/cds-dk@${resolvedCdsDkVersion}${statusMsg}`
+      );
+    } else {
+      cdsExtractorLog(
+        "error",
+        `Failed to resolve CDS dependencies: @sap/cds@${cdsVersion}, @sap/cds-dk@${cdsDkVersion}`
+      );
     }
-    if (!body.length) {
-      let sawSome = !!fileTailMatch;
-      for (let i2 = fileIndex; i2 < file.length - fileTailMatch; i2++) {
-        const f = String(file[i2]);
-        sawSome = true;
-        if (f === "." || f === ".." || !this.options.dot && f.startsWith(".")) {
-          return false;
-        }
-      }
-      return partial || sawSome;
+    const actualCdsVersion = resolvedCdsVersion ?? cdsVersion;
+    const actualCdsDkVersion = resolvedCdsDkVersion ?? cdsDkVersion;
+    const hashInput = cdsIndexerVersion ? `${actualCdsVersion}|${actualCdsDkVersion}|${cdsIndexerVersion}` : `${actualCdsVersion}|${actualCdsDkVersion}`;
+    const hash = (0, import_crypto.createHash)("sha256").update(hashInput).digest("hex");
+    if (!combinations.has(hash)) {
+      combinations.set(hash, {
+        cdsVersion,
+        cdsDkVersion,
+        cdsIndexerVersion,
+        hash,
+        resolvedCdsVersion: resolvedCdsVersion ?? void 0,
+        resolvedCdsDkVersion: resolvedCdsDkVersion ?? void 0,
+        ...rest
+      });
     }
-    const bodySegments = [[[], 0]];
-    let currentBody = bodySegments[0];
-    let nonGsParts = 0;
-    const nonGsPartsSums = [0];
-    for (const b of body) {
-      if (b === GLOBSTAR) {
-        nonGsPartsSums.push(nonGsParts);
-        currentBody = [[], 0];
-        bodySegments.push(currentBody);
-      } else {
-        currentBody[0].push(b);
-        nonGsParts++;
-      }
+  }
+  return Array.from(combinations.values());
+}
+function installDependenciesInCache(cacheDir, combination, cacheDirName, packageJsonPath, codeqlExePath2) {
+  const { resolvedCdsVersion, resolvedCdsDkVersion, isFallback, warning } = combination;
+  const nodeModulesExists = (0, import_fs7.existsSync)((0, import_path9.join)(cacheDir, "node_modules", "@sap", "cds")) && (0, import_fs7.existsSync)((0, import_path9.join)(cacheDir, "node_modules", "@sap", "cds-dk"));
+  if (nodeModulesExists) {
+    cdsExtractorLog(
+      "info",
+      `Using cached dependencies for @sap/cds@${resolvedCdsVersion} and @sap/cds-dk@${resolvedCdsDkVersion} from ${cacheDirName}`
+    );
+    if (isFallback && warning && packageJsonPath && codeqlExePath2) {
+      addDependencyVersionWarning(packageJsonPath, warning, codeqlExePath2);
     }
-    let i = bodySegments.length - 1;
-    const fileLength = file.length - fileTailMatch;
-    for (const b of bodySegments) {
-      b[1] = fileLength - (nonGsPartsSums[i--] + b[0].length);
+    return true;
+  }
+  if (!resolvedCdsVersion || !resolvedCdsDkVersion) {
+    cdsExtractorLog("error", "Cannot install dependencies: no compatible versions found");
+    return false;
+  }
+  cdsExtractorLog(
+    "info",
+    `Installing @sap/cds@${resolvedCdsVersion} and @sap/cds-dk@${resolvedCdsDkVersion} in cache directory: ${cacheDirName}`
+  );
+  if (isFallback && warning) {
+    cdsExtractorLog("warn", warning);
+  }
+  try {
+    (0, import_child_process7.execFileSync)(npmExecutable(), ["install", "--quiet", "--no-audit", "--no-fund"], {
+      cwd: cacheDir,
+      stdio: "inherit"
+    });
+    if (isFallback && warning && packageJsonPath && codeqlExePath2) {
+      addDependencyVersionWarning(packageJsonPath, warning, codeqlExePath2);
     }
-    return !!this.#matchGlobStarBodySections(file, bodySegments, fileIndex, 0, partial, 0, !!fileTailMatch);
+    return true;
+  } catch (err) {
+    const errorMessage = `Failed to install resolved dependencies in cache directory ${cacheDir}: ${err instanceof Error ? err.message : String(err)}`;
+    cdsExtractorLog("error", errorMessage);
+    return false;
   }
-  // return false for "nope, not matching"
-  // return null for "not matching, cannot keep trying"
-  #matchGlobStarBodySections(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) {
-    const bs2 = bodySegments[bodyIndex];
-    if (!bs2) {
-      for (let i = fileIndex; i < file.length; i++) {
-        sawTail = true;
-        const f = file[i];
-        if (f === "." || f === ".." || !this.options.dot && f.startsWith(".")) {
-          return false;
+}
+
+// src/packageManager/projectInstaller.ts
+var import_child_process8 = require("child_process");
+var import_path10 = require("path");
+function needsFullDependencyInstallation(project) {
+  if (project.retryStatus?.fullDependenciesInstalled) {
+    return false;
+  }
+  const hasFailedTasks = project.compilationTasks.some(
+    (task) => task.status === "failed" && !task.retryInfo?.hasBeenRetried
+  );
+  return hasFailedTasks && project.packageJson !== void 0;
+}
+function projectInstallDependencies(project, sourceRoot2) {
+  const startTime = Date.now();
+  const projectPath = (0, import_path10.join)(sourceRoot2, project.projectDir);
+  const result = {
+    success: false,
+    projectDir: projectPath,
+    warnings: [],
+    durationMs: 0,
+    timedOut: false
+  };
+  try {
+    if (!project.packageJson) {
+      result.error = "No package.json found for project";
+      return result;
+    }
+    cdsExtractorLog(
+      "info",
+      `Installing full dependencies for project ${project.projectDir} in project's node_modules`
+    );
+    try {
+      (0, import_child_process8.execFileSync)(
+        npmExecutable(),
+        ["install", "--ignore-scripts", "--quiet", "--no-audit", "--no-fund"],
+        {
+          cwd: projectPath,
+          stdio: "inherit",
+          timeout: 12e4
+          // 2-minute timeout
         }
+      );
+      result.success = true;
+      cdsExtractorLog(
+        "info",
+        `Successfully installed full dependencies for project ${project.projectDir}`
+      );
+    } catch (execError) {
+      if (execError instanceof Error && "signal" in execError && execError.signal === "SIGTERM") {
+        result.timedOut = true;
+        result.error = "Dependency installation timed out";
+      } else {
+        result.error = `npm install failed: ${String(execError)}`;
       }
-      return sawTail;
+      result.warnings.push(
+        `Dependency installation failed but will still attempt retry compilation: ${result.error}`
+      );
+      cdsExtractorLog("warn", result.warnings[0]);
     }
-    const [body, after] = bs2;
-    while (fileIndex <= after) {
-      const m = this.#matchOne(file.slice(0, fileIndex + body.length), body, partial, fileIndex, 0);
-      if (m && globStarDepth < this.maxGlobstarRecursion) {
-        const sub = this.#matchGlobStarBodySections(file, bodySegments, fileIndex + body.length, bodyIndex + 1, partial, globStarDepth + 1, sawTail);
-        if (sub !== false) {
-          return sub;
+  } catch (error) {
+    result.error = `Failed to install full dependencies: ${String(error)}`;
+    cdsExtractorLog("error", result.error);
+  } finally {
+    result.durationMs = Date.now() - startTime;
+  }
+  return result;
+}
+
+// src/cds/compiler/retry.ts
+function addCompilationDiagnosticsForFailedTasks(dependencyGraph2, codeqlExePath2, sourceRoot2) {
+  for (const project of dependencyGraph2.projects.values()) {
+    for (const task of project.compilationTasks) {
+      if (task.status === "failed") {
+        const shouldAddDiagnostic = task.retryInfo?.hasBeenRetried ?? !task.retryInfo;
+        if (shouldAddDiagnostic) {
+          for (const sourceFile of task.sourceFiles) {
+            addCompilationDiagnostic(
+              sourceFile,
+              task.errorSummary ?? "Compilation failed",
+              codeqlExePath2,
+              sourceRoot2
+            );
+          }
         }
       }
-      const f = file[fileIndex];
-      if (f === "." || f === ".." || !this.options.dot && f.startsWith(".")) {
-        return false;
-      }
-      fileIndex++;
     }
-    return partial || null;
   }
-  #matchOne(file, pattern, partial, fileIndex, patternIndex) {
-    let fi2;
-    let pi2;
-    let pl;
-    let fl;
-    for (fi2 = fileIndex, pi2 = patternIndex, fl = file.length, pl = pattern.length; fi2 < fl && pi2 < pl; fi2++, pi2++) {
-      this.debug("matchOne loop");
-      let p = pattern[pi2];
-      let f = file[fi2];
-      this.debug(pattern, p, f);
-      if (p === false || p === GLOBSTAR) {
-        return false;
+}
+function orchestrateRetryAttempts(dependencyGraph2, codeqlExePath2) {
+  const startTime = Date.now();
+  let dependencyInstallationStartTime = 0;
+  let dependencyInstallationEndTime = 0;
+  let retryCompilationStartTime = 0;
+  let retryCompilationEndTime = 0;
+  const result = {
+    success: true,
+    projectsWithRetries: [],
+    totalTasksRequiringRetry: 0,
+    totalSuccessfulRetries: 0,
+    totalFailedRetries: 0,
+    projectsWithSuccessfulDependencyInstallation: [],
+    projectsWithFailedDependencyInstallation: [],
+    retryDurationMs: 0,
+    dependencyInstallationDurationMs: 0,
+    retryCompilationDurationMs: 0
+  };
+  try {
+    cdsExtractorLog("info", "Identifying tasks requiring retry...");
+    const tasksRequiringRetry = identifyTasksRequiringRetry(dependencyGraph2);
+    if (tasksRequiringRetry.size === 0) {
+      cdsExtractorLog("info", "No tasks require retry - all compilations successful");
+      return result;
+    }
+    result.totalTasksRequiringRetry = Array.from(tasksRequiringRetry.values()).reduce(
+      (sum, tasks) => sum + tasks.length,
+      0
+    );
+    dependencyGraph2.retryStatus.totalTasksRequiringRetry = result.totalTasksRequiringRetry;
+    dependencyInstallationStartTime = Date.now();
+    for (const [projectDir, failedTasks] of tasksRequiringRetry) {
+      const project = dependencyGraph2.projects.get(projectDir);
+      if (!project) {
+        continue;
       }
-      let hit;
-      if (typeof p === "string") {
-        hit = f === p;
-        this.debug("string match", p, f, hit);
-      } else {
-        hit = p.test(f);
-        this.debug("pattern match", p, f, hit);
+      if (needsFullDependencyInstallation(project)) {
+        try {
+          const installResult = projectInstallDependencies(project, dependencyGraph2.sourceRootDir);
+          project.retryStatus ??= {
+            fullDependenciesInstalled: false,
+            tasksRequiringRetry: failedTasks.length,
+            tasksRetried: 0,
+            installationErrors: []
+          };
+          if (installResult.success) {
+            project.retryStatus.fullDependenciesInstalled = true;
+            result.projectsWithSuccessfulDependencyInstallation.push(projectDir);
+            dependencyGraph2.retryStatus.projectsWithFullDependencies.add(projectDir);
+          } else {
+            project.retryStatus.installationErrors = [
+              ...project.retryStatus.installationErrors ?? [],
+              installResult.error ?? "Unknown installation error"
+            ];
+            result.projectsWithFailedDependencyInstallation.push(projectDir);
+          }
+          if (installResult.warnings.length > 0) {
+            for (const warning of installResult.warnings) {
+              dependencyGraph2.errors.warnings.push({
+                phase: "retry_dependency_installation",
+                message: warning,
+                timestamp: /* @__PURE__ */ new Date(),
+                context: projectDir
+              });
+            }
+          }
+        } catch (error) {
+          const errorMessage = `Failed to install full dependencies for project ${projectDir}: ${String(error)}`;
+          cdsExtractorLog("error", errorMessage);
+          dependencyGraph2.errors.critical.push({
+            phase: "retry_dependency_installation",
+            message: errorMessage,
+            timestamp: /* @__PURE__ */ new Date()
+          });
+          result.projectsWithFailedDependencyInstallation.push(projectDir);
+        }
       }
-      if (!hit)
-        return false;
+      dependencyGraph2.retryStatus.projectsRequiringFullDependencies.add(projectDir);
     }
-    if (fi2 === fl && pi2 === pl) {
-      return true;
-    } else if (fi2 === fl) {
-      return partial;
-    } else if (pi2 === pl) {
-      return fi2 === fl - 1 && file[fi2] === "";
-    } else {
-      throw new Error("wtf?");
+    dependencyInstallationEndTime = Date.now();
+    result.dependencyInstallationDurationMs = dependencyInstallationEndTime - dependencyInstallationStartTime;
+    cdsExtractorLog("info", "Executing retry compilation attempts...");
+    retryCompilationStartTime = Date.now();
+    for (const [projectDir, failedTasks] of tasksRequiringRetry) {
+      const project = dependencyGraph2.projects.get(projectDir);
+      if (!project) {
+        continue;
+      }
+      const retryExecutionResult = retryCompilationTasksForProject(
+        failedTasks,
+        project,
+        dependencyGraph2
+      );
+      result.projectsWithRetries.push(projectDir);
+      result.totalSuccessfulRetries += retryExecutionResult.successfulRetries;
+      result.totalFailedRetries += retryExecutionResult.failedRetries;
+      if (project.retryStatus) {
+        project.retryStatus.tasksRetried = retryExecutionResult.retriedTasks.length;
+      }
     }
+    retryCompilationEndTime = Date.now();
+    result.retryCompilationDurationMs = retryCompilationEndTime - retryCompilationStartTime;
+    updateCdsDependencyGraphStatus(dependencyGraph2, dependencyGraph2.sourceRootDir);
+    updateDependencyGraphWithRetryResults(dependencyGraph2, result);
+    addCompilationDiagnosticsForFailedTasks(
+      dependencyGraph2,
+      codeqlExePath2,
+      dependencyGraph2.sourceRootDir
+    );
+    result.success = result.totalSuccessfulRetries > 0 || result.totalTasksRequiringRetry === 0;
+  } catch (error) {
+    const errorMessage = `Retry orchestration failed: ${String(error)}`;
+    cdsExtractorLog("error", errorMessage);
+    dependencyGraph2.errors.critical.push({
+      phase: "retry_orchestration",
+      message: errorMessage,
+      timestamp: /* @__PURE__ */ new Date()
+    });
+    result.success = false;
+  } finally {
+    result.retryDurationMs = Date.now() - startTime;
   }
-  braceExpand() {
-    return braceExpand(this.pattern, this.options);
-  }
-  parse(pattern) {
-    assertValidPattern(pattern);
-    const options = this.options;
-    if (pattern === "**")
-      return GLOBSTAR;
-    if (pattern === "")
-      return "";
-    let m;
-    let fastTest = null;
-    if (m = pattern.match(starRE)) {
-      fastTest = options.dot ? starTestDot : starTest;
-    } else if (m = pattern.match(starDotExtRE)) {
-      fastTest = (options.nocase ? options.dot ? starDotExtTestNocaseDot : starDotExtTestNocase : options.dot ? starDotExtTestDot : starDotExtTest)(m[1]);
-    } else if (m = pattern.match(qmarksRE)) {
-      fastTest = (options.nocase ? options.dot ? qmarksTestNocaseDot : qmarksTestNocase : options.dot ? qmarksTestDot : qmarksTest)(m);
-    } else if (m = pattern.match(starDotStarRE)) {
-      fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
-    } else if (m = pattern.match(dotStarRE)) {
-      fastTest = dotStarTest;
+  return result;
+}
+function retryCompilationTask(task, retryCommand, projectDir, dependencyGraph2) {
+  const startTime = /* @__PURE__ */ new Date();
+  const attemptId = `${task.id}_retry_${startTime.getTime()}`;
+  const cdsCommandString = retryCommand.originalCommand;
+  const attempt = {
+    id: attemptId,
+    cdsCommand: cdsCommandString,
+    cacheDir: projectDir,
+    timestamp: startTime,
+    result: {
+      success: false,
+      timestamp: startTime
     }
-    const re2 = AST.fromGlob(pattern, this.options).toMMPattern();
-    if (fastTest && typeof re2 === "object") {
-      Reflect.defineProperty(re2, "test", { value: fastTest });
+  };
+  try {
+    const primarySourceFile = task.sourceFiles[0];
+    const compilationResult = compileCdsToJson(
+      primarySourceFile,
+      dependencyGraph2.sourceRootDir,
+      cdsCommandString,
+      projectDir,
+      // Convert CDS projects to BasicCdsProject format expected by compileCdsToJson
+      new Map(
+        Array.from(dependencyGraph2.projects.entries()).map(([key, value]) => [
+          key,
+          {
+            cdsFiles: value.cdsFiles,
+            compilationTargets: value.compilationTargets,
+            expectedOutputFile: value.expectedOutputFile,
+            projectDir: value.projectDir,
+            dependencies: value.dependencies,
+            imports: value.imports,
+            packageJson: value.packageJson
+          }
+        ])
+      ),
+      task.projectDir
+    );
+    attempt.result = {
+      ...compilationResult,
+      timestamp: startTime
+    };
+  } catch (error) {
+    attempt.error = {
+      message: String(error),
+      stack: error instanceof Error ? error.stack : void 0
+    };
+  }
+  return attempt;
+}
+function retryCompilationTasksForProject(tasksToRetry, project, dependencyGraph2) {
+  const startTime = Date.now();
+  const result = {
+    projectDir: project.projectDir,
+    retriedTasks: [],
+    successfulRetries: 0,
+    failedRetries: 0,
+    fullDependenciesAvailable: Boolean(project.retryStatus?.fullDependenciesInstalled),
+    executionDurationMs: 0,
+    retryErrors: []
+  };
+  cdsExtractorLog(
+    "info",
+    `Retrying ${tasksToRetry.length} task(s) for project ${project.projectDir} using ${result.fullDependenciesAvailable ? "full" : "minimal"} dependencies`
+  );
+  for (const task of tasksToRetry) {
+    try {
+      task.retryInfo = {
+        hasBeenRetried: true,
+        retryReason: "Output validation failed",
+        fullDependenciesInstalled: result.fullDependenciesAvailable,
+        retryTimestamp: /* @__PURE__ */ new Date()
+      };
+      const retryAttempt = retryCompilationTask(
+        task,
+        task.retryCommand,
+        project.projectDir,
+        dependencyGraph2
+      );
+      task.retryInfo.retryAttempt = retryAttempt;
+      task.attempts.push(retryAttempt);
+      result.retriedTasks.push(task);
+      if (retryAttempt.result.success) {
+        task.status = "success";
+        result.successfulRetries++;
+        cdsExtractorLog("info", `Retry successful for task ${task.id}`);
+      } else {
+        task.status = "failed";
+        task.errorSummary = retryAttempt.error?.message ?? "Retry compilation failed";
+        result.failedRetries++;
+        result.retryErrors.push(task.errorSummary);
+        cdsExtractorLog("warn", `Retry failed for task ${task.id}: ${task.errorSummary}`);
+      }
+    } catch (error) {
+      const errorMessage = `Failed to retry task ${task.id}: ${String(error)}`;
+      result.retryErrors.push(errorMessage);
+      result.failedRetries++;
+      task.status = "failed";
+      task.errorSummary = errorMessage;
+      cdsExtractorLog("error", errorMessage);
     }
-    return re2;
   }
-  makeRe() {
-    if (this.regexp || this.regexp === false)
-      return this.regexp;
-    const set2 = this.set;
-    if (!set2.length) {
-      this.regexp = false;
-      return this.regexp;
+  result.executionDurationMs = Date.now() - startTime;
+  cdsExtractorLog(
+    "info",
+    `Retry execution completed for project ${project.projectDir}: ${result.successfulRetries} successful, ${result.failedRetries} failed`
+  );
+  return result;
+}
+function updateDependencyGraphWithRetryResults(dependencyGraph2, retryResults) {
+  dependencyGraph2.retryStatus.totalRetryAttempts = retryResults.totalSuccessfulRetries + retryResults.totalFailedRetries;
+}
+
+// src/cds/compiler/graph.ts
+function attemptCompilation(task, cdsCommand, cacheDir, dependencyGraph2) {
+  const startTime = /* @__PURE__ */ new Date();
+  const attemptId = `${task.id}_${startTime.getTime()}`;
+  const attempt = {
+    id: attemptId,
+    cdsCommand,
+    cacheDir,
+    timestamp: startTime,
+    result: {
+      success: false,
+      timestamp: startTime
     }
-    const options = this.options;
-    const twoStar = options.noglobstar ? star2 : options.dot ? twoStarDot : twoStarNoDot;
-    const flags = new Set(options.nocase ? ["i"] : []);
-    let re2 = set2.map((pattern) => {
-      const pp = pattern.map((p) => {
-        if (p instanceof RegExp) {
-          for (const f of p.flags.split(""))
-            flags.add(f);
-        }
-        return typeof p === "string" ? regExpEscape2(p) : p === GLOBSTAR ? GLOBSTAR : p._src;
-      });
-      pp.forEach((p, i) => {
-        const next = pp[i + 1];
-        const prev = pp[i - 1];
-        if (p !== GLOBSTAR || prev === GLOBSTAR) {
-          return;
-        }
-        if (prev === void 0) {
-          if (next !== void 0 && next !== GLOBSTAR) {
-            pp[i + 1] = "(?:\\/|" + twoStar + "\\/)?" + next;
-          } else {
-            pp[i] = twoStar;
+  };
+  try {
+    const primarySourceFile = task.sourceFiles[0];
+    const compilationResult = compileCdsToJson(
+      primarySourceFile,
+      dependencyGraph2.sourceRootDir,
+      cdsCommand,
+      cacheDir,
+      // Convert CDS projects to BasicCdsProject format expected by compileCdsToJson
+      new Map(
+        Array.from(dependencyGraph2.projects.entries()).map(([key, value]) => [
+          key,
+          {
+            cdsFiles: value.cdsFiles,
+            compilationTargets: value.compilationTargets,
+            expectedOutputFile: value.expectedOutputFile,
+            projectDir: value.projectDir,
+            dependencies: value.dependencies,
+            imports: value.imports,
+            packageJson: value.packageJson,
+            compilationConfig: value.compilationConfig
           }
-        } else if (next === void 0) {
-          pp[i - 1] = prev + "(?:\\/|\\/" + twoStar + ")?";
-        } else if (next !== GLOBSTAR) {
-          pp[i - 1] = prev + "(?:\\/|\\/" + twoStar + "\\/)" + next;
-          pp[i + 1] = GLOBSTAR;
-        }
-      });
-      const filtered = pp.filter((p) => p !== GLOBSTAR);
-      if (this.partial && filtered.length >= 1) {
-        const prefixes = [];
-        for (let i = 1; i <= filtered.length; i++) {
-          prefixes.push(filtered.slice(0, i).join("/"));
-        }
-        return "(?:" + prefixes.join("|") + ")";
-      }
-      return filtered.join("/");
-    }).join("|");
-    const [open, close] = set2.length > 1 ? ["(?:", ")"] : ["", ""];
-    re2 = "^" + open + re2 + close + "$";
-    if (this.partial) {
-      re2 = "^(?:\\/|" + open + re2.slice(1, -1) + close + ")$";
-    }
-    if (this.negate)
-      re2 = "^(?!" + re2 + ").+$";
-    try {
-      this.regexp = new RegExp(re2, [...flags].join(""));
-    } catch (ex) {
-      this.regexp = false;
+        ])
+      ),
+      task.projectDir
+    );
+    const endTime = /* @__PURE__ */ new Date();
+    attempt.result = {
+      ...compilationResult,
+      timestamp: endTime,
+      durationMs: endTime.getTime() - startTime.getTime(),
+      commandUsed: cdsCommand,
+      cacheDir
+    };
+    if (compilationResult.success && compilationResult.outputPath) {
+      dependencyGraph2.statusSummary.jsonFilesGenerated++;
     }
-    return this.regexp;
+  } catch (error) {
+    const endTime = /* @__PURE__ */ new Date();
+    attempt.error = {
+      message: String(error),
+      stack: error instanceof Error ? error.stack : void 0
+    };
+    attempt.result.timestamp = endTime;
+    attempt.result.durationMs = endTime.getTime() - startTime.getTime();
   }
-  slashSplit(p) {
-    if (this.preserveMultipleSlashes) {
-      return p.split("/");
-    } else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
-      return ["", ...p.split(/\/+/)];
-    } else {
-      return p.split(/\/+/);
-    }
+  task.attempts.push(attempt);
+  return attempt;
+}
+function createCompilationTask(type2, sourceFiles, expectedOutputFile, projectDir) {
+  const defaultPrimaryCommand = {
+    executable: "cds",
+    args: [],
+    originalCommand: "cds"
+  };
+  const defaultRetryCommand = {
+    executable: "npx",
+    args: ["cds"],
+    originalCommand: "npx cds"
+  };
+  return {
+    id: `${type2}_${projectDir}_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`,
+    type: type2,
+    status: "pending",
+    sourceFiles,
+    expectedOutputFile,
+    projectDir,
+    attempts: [],
+    dependencies: [],
+    primaryCommand: defaultPrimaryCommand,
+    retryCommand: defaultRetryCommand
+  };
+}
+function createCompilationConfig(cdsCommand, cacheDir) {
+  return {
+    cdsCommand,
+    cacheDir,
+    versionCompatibility: {
+      isCompatible: true
+      // Will be validated during planning
+    },
+    maxRetryAttempts: 3
+  };
+}
+function executeCompilationTask(task, project, dependencyGraph2, _codeqlExePath) {
+  task.status = "in_progress";
+  const config = project.enhancedCompilationConfig;
+  if (!config) {
+    throw new Error(`No compilation configuration found for project ${project.projectDir}`);
   }
-  match(f, partial = this.partial) {
-    this.debug("match", f, this.pattern);
-    if (this.comment) {
-      return false;
-    }
-    if (this.empty) {
-      return f === "";
-    }
-    if (f === "/" && partial) {
-      return true;
-    }
-    const options = this.options;
-    if (this.isWindows) {
-      f = f.split("\\").join("/");
-    }
-    const ff = this.slashSplit(f);
-    this.debug(this.pattern, "split", ff);
-    const set2 = this.set;
-    this.debug(this.pattern, "set", set2);
-    let filename = ff[ff.length - 1];
-    if (!filename) {
-      for (let i = ff.length - 2; !filename && i >= 0; i--) {
-        filename = ff[i];
-      }
-    }
-    for (let i = 0; i < set2.length; i++) {
-      const pattern = set2[i];
-      let file = ff;
-      if (options.matchBase && pattern.length === 1) {
-        file = [filename];
-      }
-      const hit = this.matchOne(file, pattern, partial);
-      if (hit) {
-        if (options.flipNegate) {
-          return true;
-        }
-        return !this.negate;
-      }
+  const compilationAttempt = attemptCompilation(
+    task,
+    config.cdsCommand,
+    config.cacheDir,
+    dependencyGraph2
+  );
+  if (compilationAttempt.result.success) {
+    task.status = "success";
+    return;
+  }
+  const lastError = compilationAttempt.error ? new Error(compilationAttempt.error.message) : new Error("Compilation failed");
+  task.status = "failed";
+  task.errorSummary = lastError?.message || "Compilation failed";
+  cdsExtractorLog("error", `Compilation failed for task ${task.id}: ${task.errorSummary}`);
+}
+function executeCompilationTasks(dependencyGraph2, codeqlExePath2) {
+  cdsExtractorLog("info", "Starting compilation execution for all projects...");
+  dependencyGraph2.currentPhase = "compiling";
+  const compilationStartTime = /* @__PURE__ */ new Date();
+  const allTasks = [];
+  for (const project of dependencyGraph2.projects.values()) {
+    for (const task of project.compilationTasks) {
+      allTasks.push({ task, project });
     }
-    if (options.flipNegate) {
-      return false;
+  }
+  cdsExtractorLog("info", `Executing ${allTasks.length} compilation task(s)...`);
+  for (const { task, project } of allTasks) {
+    try {
+      executeCompilationTask(task, project, dependencyGraph2, codeqlExePath2);
+    } catch (error) {
+      const errorMessage = `Failed to execute compilation task ${task.id}: ${String(error)}`;
+      cdsExtractorLog("error", errorMessage);
+      dependencyGraph2.errors.critical.push({
+        phase: "compiling",
+        message: errorMessage,
+        timestamp: /* @__PURE__ */ new Date(),
+        stack: error instanceof Error ? error.stack : void 0
+      });
+      task.status = "failed";
+      task.errorSummary = errorMessage;
+      dependencyGraph2.statusSummary.failedCompilations++;
     }
-    return this.negate;
   }
-  static defaults(def) {
-    return minimatch.defaults(def).Minimatch;
+  for (const project of dependencyGraph2.projects.values()) {
+    const allTasksCompleted = project.compilationTasks.every(
+      (task) => task.status === "success" || task.status === "failed"
+    );
+    if (allTasksCompleted) {
+      const hasFailedTasks = project.compilationTasks.some((task) => task.status === "failed");
+      project.status = hasFailedTasks ? "failed" : "completed";
+      project.timestamps.compilationCompleted = /* @__PURE__ */ new Date();
+    }
   }
-};
-minimatch.AST = AST;
-minimatch.Minimatch = Minimatch;
-minimatch.escape = escape;
-minimatch.unescape = unescape;
-
-// src/paths-ignore.ts
-var DEFAULT_CONFIG_RELATIVE_PATHS = [
-  ".github/codeql/codeql-config.yml",
-  ".github/codeql/codeql-config.yaml"
-];
-var patternsCache = /* @__PURE__ */ new Map();
-function findCodeqlConfigFile(sourceRoot2) {
-  const envConfigPath = process.env.CODEQL_CONFIG_PATH;
-  if (envConfigPath) {
-    const resolvedRoot = (0, import_path11.resolve)(sourceRoot2);
-    const fullPath = (0, import_path11.resolve)(resolvedRoot, envConfigPath);
-    const rel = (0, import_path11.relative)(resolvedRoot, fullPath);
-    if (rel.startsWith("..") || (0, import_path11.resolve)(resolvedRoot, rel) !== fullPath) {
+  const compilationEndTime = /* @__PURE__ */ new Date();
+  dependencyGraph2.statusSummary.performance.compilationDurationMs = compilationEndTime.getTime() - compilationStartTime.getTime();
+  cdsExtractorLog(
+    "info",
+    `Compilation execution completed. Success: ${dependencyGraph2.statusSummary.successfulCompilations}, Failed: ${dependencyGraph2.statusSummary.failedCompilations}`
+  );
+}
+function orchestrateCompilation(dependencyGraph2, projectCacheDirMap2, codeqlExePath2) {
+  try {
+    planCompilationTasks(dependencyGraph2, projectCacheDirMap2);
+    executeCompilationTasks(dependencyGraph2, codeqlExePath2);
+    updateCdsDependencyGraphStatus(dependencyGraph2, dependencyGraph2.sourceRootDir);
+    cdsExtractorLog("info", "Starting retry orchestration phase...");
+    const retryResults = orchestrateRetryAttempts(dependencyGraph2, codeqlExePath2);
+    updateCdsDependencyGraphStatus(dependencyGraph2, dependencyGraph2.sourceRootDir);
+    if (retryResults.totalTasksRequiringRetry > 0) {
       cdsExtractorLog(
-        "warn",
-        `CODEQL_CONFIG_PATH '${envConfigPath}' resolves outside the source root. Ignoring.`
+        "info",
+        `Retry phase completed: ${retryResults.totalTasksRequiringRetry} tasks retried, ${retryResults.totalSuccessfulRetries} successful, ${retryResults.totalFailedRetries} failed`
       );
-      return void 0;
-    }
-    if ((0, import_fs6.existsSync)(fullPath)) {
-      cdsExtractorLog("info", `Using CodeQL config file from CODEQL_CONFIG_PATH: ${fullPath}`);
-      return fullPath;
+    } else {
+      cdsExtractorLog("info", "Retry phase completed: no tasks required retry");
     }
-    cdsExtractorLog(
-      "warn",
-      `CODEQL_CONFIG_PATH is set to '${envConfigPath}', but no file exists at '${fullPath}'.`
-    );
-    return void 0;
+    const hasFailures = dependencyGraph2.statusSummary.failedCompilations > 0 || dependencyGraph2.errors.critical.length > 0;
+    dependencyGraph2.statusSummary.overallSuccess = !hasFailures;
+    dependencyGraph2.currentPhase = hasFailures ? "failed" : "completed";
+    const statusReport = generateStatusReport(dependencyGraph2);
+    cdsExtractorLog("info", "CDS Extractor Status Report : Post-Compilation...\n" + statusReport);
+  } catch (error) {
+    const errorMessage = `Compilation orchestration failed: ${String(error)}`;
+    cdsExtractorLog("error", errorMessage);
+    dependencyGraph2.errors.critical.push({
+      phase: "compiling",
+      message: errorMessage,
+      timestamp: /* @__PURE__ */ new Date(),
+      stack: error instanceof Error ? error.stack : void 0
+    });
+    dependencyGraph2.currentPhase = "failed";
+    dependencyGraph2.statusSummary.overallSuccess = false;
+    throw error;
   }
-  for (const configPath of DEFAULT_CONFIG_RELATIVE_PATHS) {
-    const fullPath = (0, import_path11.join)(sourceRoot2, configPath);
-    if ((0, import_fs6.existsSync)(fullPath)) {
-      return fullPath;
+}
+function planCompilationTasks(dependencyGraph2, projectCacheDirMap2) {
+  cdsExtractorLog("info", "Planning compilation tasks for all projects...");
+  dependencyGraph2.currentPhase = "compilation_planning";
+  for (const [projectDir, project] of dependencyGraph2.projects.entries()) {
+    try {
+      const cacheDir = projectCacheDirMap2.get(projectDir);
+      const commands = determineVersionAwareCdsCommands(
+        cacheDir,
+        dependencyGraph2.sourceRootDir,
+        projectDir,
+        dependencyGraph2
+      );
+      const cdsCommand = determineCdsCommand(cacheDir, dependencyGraph2.sourceRootDir);
+      const compilationConfig = createCompilationConfig(cdsCommand, cacheDir);
+      project.enhancedCompilationConfig = compilationConfig;
+      const task = createCompilationTask(
+        "project",
+        project.cdsFiles,
+        project.expectedOutputFile,
+        projectDir
+      );
+      task.primaryCommand = commands.primaryCommand;
+      task.retryCommand = commands.retryCommand;
+      project.compilationTasks = [task];
+      project.status = "compilation_planned";
+      project.timestamps.compilationStarted = /* @__PURE__ */ new Date();
+      cdsExtractorLog(
+        "info",
+        `Planned ${project.compilationTasks.length} compilation task(s) for project ${projectDir}`
+      );
+    } catch (error) {
+      const errorMessage = `Failed to plan compilation for project ${projectDir}: ${String(error)}`;
+      cdsExtractorLog("error", errorMessage);
+      dependencyGraph2.errors.critical.push({
+        phase: "compilation_planning",
+        message: errorMessage,
+        timestamp: /* @__PURE__ */ new Date(),
+        stack: error instanceof Error ? error.stack : void 0
+      });
+      project.status = "failed";
     }
   }
-  return void 0;
+  const totalTasks = Array.from(dependencyGraph2.projects.values()).reduce(
+    (sum, project) => sum + project.compilationTasks.length,
+    0
+  );
+  dependencyGraph2.statusSummary.totalCompilationTasks = totalTasks;
+  cdsExtractorLog("info", `Compilation planning completed. Total tasks: ${totalTasks}`);
 }
-function getPathsIgnorePatterns(sourceRoot2) {
-  const cached = patternsCache.get(sourceRoot2);
-  if (cached !== void 0) {
-    return cached;
-  }
-  const configPath = findCodeqlConfigFile(sourceRoot2);
-  if (!configPath) {
-    patternsCache.set(sourceRoot2, []);
-    return [];
+
+// src/cds/compiler/project.ts
+var import_path11 = require("path");
+
+// src/cds/indexer.ts
+var import_child_process9 = require("child_process");
+var import_path12 = require("path");
+var CDS_INDEXER_TIMEOUT_MS = 6e5;
+var CDS_INDEXER_PACKAGE = "@sap/cds-indexer";
+function projectUsesCdsIndexer(project) {
+  if (!project.packageJson) {
+    return false;
   }
+  const inDeps = project.packageJson.dependencies?.[CDS_INDEXER_PACKAGE] !== void 0;
+  const inDevDeps = project.packageJson.devDependencies?.[CDS_INDEXER_PACKAGE] !== void 0;
+  return inDeps || inDevDeps;
+}
+function runCdsIndexer(project, sourceRoot2, cacheDir) {
+  const projectAbsPath = (0, import_path12.join)(sourceRoot2, project.projectDir);
+  const startTime = Date.now();
+  const result = {
+    success: false,
+    projectDir: project.projectDir,
+    durationMs: 0,
+    timedOut: false
+  };
   try {
-    const content = (0, import_fs6.readFileSync)(configPath, "utf8");
-    const config = load(content);
-    if (!config || !Array.isArray(config["paths-ignore"])) {
-      patternsCache.set(sourceRoot2, []);
-      return [];
+    const nodePaths = [];
+    if (cacheDir) {
+      nodePaths.push((0, import_path12.join)(cacheDir, "node_modules"));
     }
-    const patterns = config["paths-ignore"].filter(
-      (p) => typeof p === "string" && p.length > 0
+    nodePaths.push((0, import_path12.join)(projectAbsPath, "node_modules"));
+    const env = {
+      ...process.env,
+      NODE_PATH: nodePaths.join(import_path12.delimiter)
+    };
+    cdsExtractorLog(
+      "info",
+      `Running ${CDS_INDEXER_PACKAGE} for project '${project.projectDir}'...`
     );
-    if (patterns.length > 0) {
-      cdsExtractorLog(
-        "info",
-        `Found ${patterns.length} paths-ignore pattern(s) in ${configPath}: ${patterns.join(", ")}`
-      );
+    const spawnResult = (0, import_child_process9.spawnSync)(npxExecutable(), ["--yes", CDS_INDEXER_PACKAGE], {
+      cwd: projectAbsPath,
+      env,
+      stdio: "pipe",
+      timeout: CDS_INDEXER_TIMEOUT_MS
+    });
+    result.durationMs = Date.now() - startTime;
+    if (spawnResult.signal === "SIGTERM" || spawnResult.signal === "SIGKILL") {
+      result.timedOut = true;
+      result.error = `${CDS_INDEXER_PACKAGE} timed out after ${CDS_INDEXER_TIMEOUT_MS}ms for project '${project.projectDir}'`;
+      cdsExtractorLog("warn", result.error);
+      return result;
     }
-    patternsCache.set(sourceRoot2, patterns);
-    return patterns;
+    if (spawnResult.error) {
+      result.error = `${CDS_INDEXER_PACKAGE} failed to start for project '${project.projectDir}': ${String(spawnResult.error)}`;
+      cdsExtractorLog("warn", result.error);
+      return result;
+    }
+    if (spawnResult.status !== 0) {
+      const stderr = spawnResult.stderr?.toString().trim() ?? "";
+      const stdout = spawnResult.stdout?.toString().trim() ?? "";
+      const output = stderr || stdout || "unknown error";
+      result.error = `${CDS_INDEXER_PACKAGE} failed for project '${project.projectDir}' (exit code ${spawnResult.status}): ${output}`;
+      cdsExtractorLog("warn", result.error);
+      return result;
+    }
+    result.success = true;
+    cdsExtractorLog(
+      "info",
+      `Successfully ran ${CDS_INDEXER_PACKAGE} for project '${project.projectDir}' (${result.durationMs}ms)`
+    );
   } catch (error) {
-    cdsExtractorLog("warn", `Failed to read CodeQL config file at ${configPath}: ${String(error)}`);
-    patternsCache.set(sourceRoot2, []);
-    return [];
+    result.durationMs = Date.now() - startTime;
+    result.error = `${CDS_INDEXER_PACKAGE} threw an unexpected error for project '${project.projectDir}': ${String(error)}`;
+    cdsExtractorLog("error", result.error);
   }
+  return result;
 }
-function shouldIgnorePath(relativePath, patterns) {
-  const matchOptions = { dot: true, windowsPathsNoEscape: true };
-  for (const raw of patterns) {
-    const pattern = raw.replace(/\/+$/, "");
-    if (minimatch(relativePath, pattern, matchOptions)) {
-      return true;
+function orchestrateCdsIndexer(dependencyGraph2, sourceRoot2, projectCacheDirMap2, codeqlExePath2) {
+  const summary = {
+    totalProjects: dependencyGraph2.projects.size,
+    projectsRequiringIndexer: 0,
+    successfulRuns: 0,
+    failedRuns: 0,
+    results: []
+  };
+  for (const [projectDir, project] of dependencyGraph2.projects.entries()) {
+    if (!projectUsesCdsIndexer(project)) {
+      continue;
     }
-    if (minimatch(relativePath, `${pattern}/**`, matchOptions)) {
-      return true;
+    summary.projectsRequiringIndexer++;
+    const cacheDir = projectCacheDirMap2.get(projectDir);
+    const result = runCdsIndexer(project, sourceRoot2, cacheDir);
+    summary.results.push(result);
+    if (result.success) {
+      summary.successfulRuns++;
+      const installResult = projectInstallDependencies(project, sourceRoot2);
+      if (!installResult.success) {
+        cdsExtractorLog(
+          "warn",
+          `Full dependency installation failed for project '${projectDir}' after successful cds-indexer run: ${installResult.error ?? "unknown error"}`
+        );
+      }
+    } else {
+      summary.failedRuns++;
+      if (codeqlExePath2) {
+        addCdsIndexerDiagnostic(
+          projectDir,
+          result.error ?? `${CDS_INDEXER_PACKAGE} failed for project '${projectDir}'`,
+          codeqlExePath2,
+          sourceRoot2
+        );
+      }
     }
   }
-  return false;
-}
-function filterIgnoredPaths(relativePaths, patterns) {
-  if (patterns.length === 0) {
-    return relativePaths;
+  if (summary.projectsRequiringIndexer > 0) {
+    cdsExtractorLog(
+      "info",
+      `CDS indexer summary: ${summary.projectsRequiringIndexer} project(s) required indexer, ${summary.successfulRuns} succeeded, ${summary.failedRuns} failed`
+    );
+  } else {
+    cdsExtractorLog("info", "No projects require @sap/cds-indexer.");
   }
-  return relativePaths.filter((p) => !shouldIgnorePath(p, patterns));
+  return summary;
 }
 
+// src/cds/parser/graph.ts
+var import_path14 = require("path");
+
 // src/cds/parser/functions.ts
+var import_fs8 = require("fs");
+var import_path13 = require("path");
 function determineCdsFilesForProjectDir(sourceRootDir, projectDir) {
   if (!sourceRootDir || !projectDir) {
     throw new Error(
@@ -9749,11 +9984,12 @@ function determineCdsFilesForProjectDir(sourceRootDir, projectDir) {
     );
   }
   try {
-    const cdsFiles = Ui((0, import_path12.join)(projectDir, "**/*.cds"), {
+    const cdsFiles = Ui((0, import_path13.join)(projectDir, "**/*.cds"), {
       nodir: true,
-      ignore: ["**/node_modules/**", "**/*.testproj/**"]
+      ignore: ["**/node_modules/**", "**/*.testproj/**"],
+      windowsPathsNoEscape: true
     });
-    const relativePaths = cdsFiles.map((file) => (0, import_path12.relative)(sourceRootDir, file));
+    const relativePaths = cdsFiles.map((file) => (0, import_path13.relative)(sourceRootDir, file));
     const pathsIgnorePatterns = getPathsIgnorePatterns(sourceRootDir);
     if (pathsIgnorePatterns.length > 0) {
       const filtered = filterIgnoredPaths(relativePaths, pathsIgnorePatterns);
@@ -9761,7 +9997,7 @@ function determineCdsFilesForProjectDir(sourceRootDir, projectDir) {
       if (ignoredCount > 0) {
         cdsExtractorLog(
           "info",
-          `Filtered ${ignoredCount} CDS file(s) matching paths-ignore patterns in project ${(0, import_path12.relative)(sourceRootDir, projectDir) || "."}`
+          `Filtered ${ignoredCount} CDS file(s) matching paths-ignore patterns in project ${(0, import_path13.relative)(sourceRootDir, projectDir) || "."}`
         );
       }
       return filtered;
@@ -9773,24 +10009,26 @@ function determineCdsFilesForProjectDir(sourceRootDir, projectDir) {
   }
 }
 function determineCdsProjectsUnderSourceDir(sourceRootDir) {
-  if (!sourceRootDir || !(0, import_fs7.existsSync)(sourceRootDir)) {
+  if (!sourceRootDir || !(0, import_fs8.existsSync)(sourceRootDir)) {
     throw new Error(`Source root directory '${sourceRootDir}' does not exist.`);
   }
   const foundProjects = /* @__PURE__ */ new Set();
-  const packageJsonFiles = Ui((0, import_path12.join)(sourceRootDir, "**/package.json"), {
+  const packageJsonFiles = Ui((0, import_path13.join)(sourceRootDir, "**/package.json"), {
     nodir: true,
-    ignore: ["**/node_modules/**", "**/*.testproj/**"]
+    ignore: ["**/node_modules/**", "**/*.testproj/**"],
+    windowsPathsNoEscape: true
   });
-  const cdsFiles = Ui((0, import_path12.join)(sourceRootDir, "**/*.cds"), {
+  const cdsFiles = Ui((0, import_path13.join)(sourceRootDir, "**/*.cds"), {
     nodir: true,
-    ignore: ["**/node_modules/**", "**/*.testproj/**"]
+    ignore: ["**/node_modules/**", "**/*.testproj/**"],
+    windowsPathsNoEscape: true
   });
   const candidateDirectories = /* @__PURE__ */ new Set();
   for (const packageJsonFile of packageJsonFiles) {
-    candidateDirectories.add((0, import_path12.dirname)(packageJsonFile));
+    candidateDirectories.add((0, import_path13.dirname)(packageJsonFile));
   }
   for (const cdsFile of cdsFiles) {
-    const cdsDir = (0, import_path12.dirname)(cdsFile);
+    const cdsDir = (0, import_path13.dirname)(cdsFile);
     const projectRoot = findProjectRootFromCdsFile(cdsDir, sourceRootDir);
     if (projectRoot) {
       candidateDirectories.add(projectRoot);
@@ -9800,14 +10038,14 @@ function determineCdsProjectsUnderSourceDir(sourceRootDir) {
   }
   for (const dir of candidateDirectories) {
     if (isLikelyCdsProject(dir)) {
-      const relativePath = (0, import_path12.relative)(sourceRootDir, dir);
+      const relativePath = (0, import_path13.relative)(sourceRootDir, dir);
       const projectDir = relativePath || ".";
       let shouldAdd = true;
       const existingProjects = Array.from(foundProjects);
       for (const existingProject of existingProjects) {
-        const existingAbsPath = (0, import_path12.join)(sourceRootDir, existingProject);
-        if (dir.startsWith(existingAbsPath + import_path12.sep)) {
-          const parentPackageJsonPath = (0, import_path12.join)(existingAbsPath, "package.json");
+        const existingAbsPath = (0, import_path13.join)(sourceRootDir, existingProject);
+        if (dir.startsWith(existingAbsPath + import_path13.sep)) {
+          const parentPackageJsonPath = (0, import_path13.join)(existingAbsPath, "package.json");
           const parentPackageJson = readPackageJsonFile(parentPackageJsonPath);
           const isParentMonorepo = parentPackageJson?.workspaces && Array.isArray(parentPackageJson.workspaces) && parentPackageJson.workspaces.length > 0;
           if (isParentMonorepo && (hasStandardCdsContent(existingAbsPath) || hasDirectCdsContent(existingAbsPath))) {
@@ -9817,8 +10055,8 @@ function determineCdsProjectsUnderSourceDir(sourceRootDir) {
           }
           break;
         }
-        if (existingAbsPath.startsWith(dir + import_path12.sep)) {
-          const currentPackageJsonPath = (0, import_path12.join)(dir, "package.json");
+        if (existingAbsPath.startsWith(dir + import_path13.sep)) {
+          const currentPackageJsonPath = (0, import_path13.join)(dir, "package.json");
           const currentPackageJson = readPackageJsonFile(currentPackageJsonPath);
           const isCurrentMonorepo = currentPackageJson?.workspaces && Array.isArray(currentPackageJson.workspaces) && currentPackageJson.workspaces.length > 0;
           if (!(isCurrentMonorepo && isLikelyCdsProject(existingAbsPath))) {
@@ -9834,10 +10072,10 @@ function determineCdsProjectsUnderSourceDir(sourceRootDir) {
   return Array.from(foundProjects).sort();
 }
 function extractCdsImports(filePath) {
-  if (!(0, import_fs7.existsSync)(filePath)) {
+  if (!(0, import_fs8.existsSync)(filePath)) {
     throw new Error(`File does not exist: ${filePath}`);
   }
-  const content = (0, import_fs7.readFileSync)(filePath, "utf8");
+  const content = (0, import_fs8.readFileSync)(filePath, "utf8");
   const imports = [];
   const usingRegex = /using\s+(?:{[^}]+}|[\w.]+(?:\s+as\s+[\w.]+)?)\s+from\s+['"`]([^'"`]+)['"`]\s*;/g;
   let match2;
@@ -9859,32 +10097,32 @@ function findProjectRootFromCdsFile(cdsFileDir, sourceRootDir) {
   let currentDir = cdsFileDir;
   while (currentDir.startsWith(sourceRootDir)) {
     if (isLikelyCdsProject(currentDir)) {
-      const currentDirName = (0, import_path12.basename)(currentDir);
+      const currentDirName = (0, import_path13.basename)(currentDir);
       const isStandardSubdir = ["srv", "db", "app"].includes(currentDirName);
       if (isStandardSubdir) {
-        const parentDir3 = (0, import_path12.dirname)(currentDir);
+        const parentDir3 = (0, import_path13.dirname)(currentDir);
         if (parentDir3 !== currentDir && parentDir3.startsWith(sourceRootDir) && !parentDir3.includes("node_modules") && !parentDir3.includes(".testproj") && isLikelyCdsProject(parentDir3)) {
           return parentDir3;
         }
       }
-      const parentDir2 = (0, import_path12.dirname)(currentDir);
+      const parentDir2 = (0, import_path13.dirname)(currentDir);
       if (parentDir2 !== currentDir && parentDir2.startsWith(sourceRootDir) && !parentDir2.includes("node_modules") && !parentDir2.includes(".testproj")) {
-        const hasDbDir2 = (0, import_fs7.existsSync)((0, import_path12.join)(parentDir2, "db")) && (0, import_fs7.statSync)((0, import_path12.join)(parentDir2, "db")).isDirectory();
-        const hasSrvDir2 = (0, import_fs7.existsSync)((0, import_path12.join)(parentDir2, "srv")) && (0, import_fs7.statSync)((0, import_path12.join)(parentDir2, "srv")).isDirectory();
-        const hasAppDir2 = (0, import_fs7.existsSync)((0, import_path12.join)(parentDir2, "app")) && (0, import_fs7.statSync)((0, import_path12.join)(parentDir2, "app")).isDirectory();
+        const hasDbDir2 = (0, import_fs8.existsSync)((0, import_path13.join)(parentDir2, "db")) && (0, import_fs8.statSync)((0, import_path13.join)(parentDir2, "db")).isDirectory();
+        const hasSrvDir2 = (0, import_fs8.existsSync)((0, import_path13.join)(parentDir2, "srv")) && (0, import_fs8.statSync)((0, import_path13.join)(parentDir2, "srv")).isDirectory();
+        const hasAppDir2 = (0, import_fs8.existsSync)((0, import_path13.join)(parentDir2, "app")) && (0, import_fs8.statSync)((0, import_path13.join)(parentDir2, "app")).isDirectory();
         if (hasDbDir2 && hasSrvDir2 || hasSrvDir2 && hasAppDir2) {
           return parentDir2;
         }
       }
       return currentDir;
     }
-    const hasDbDir = (0, import_fs7.existsSync)((0, import_path12.join)(currentDir, "db")) && (0, import_fs7.statSync)((0, import_path12.join)(currentDir, "db")).isDirectory();
-    const hasSrvDir = (0, import_fs7.existsSync)((0, import_path12.join)(currentDir, "srv")) && (0, import_fs7.statSync)((0, import_path12.join)(currentDir, "srv")).isDirectory();
-    const hasAppDir = (0, import_fs7.existsSync)((0, import_path12.join)(currentDir, "app")) && (0, import_fs7.statSync)((0, import_path12.join)(currentDir, "app")).isDirectory();
+    const hasDbDir = (0, import_fs8.existsSync)((0, import_path13.join)(currentDir, "db")) && (0, import_fs8.statSync)((0, import_path13.join)(currentDir, "db")).isDirectory();
+    const hasSrvDir = (0, import_fs8.existsSync)((0, import_path13.join)(currentDir, "srv")) && (0, import_fs8.statSync)((0, import_path13.join)(currentDir, "srv")).isDirectory();
+    const hasAppDir = (0, import_fs8.existsSync)((0, import_path13.join)(currentDir, "app")) && (0, import_fs8.statSync)((0, import_path13.join)(currentDir, "app")).isDirectory();
     if (hasDbDir && hasSrvDir || hasSrvDir && hasAppDir) {
       return currentDir;
     }
-    const parentDir = (0, import_path12.dirname)(currentDir);
+    const parentDir = (0, import_path13.dirname)(currentDir);
     if (parentDir === currentDir) {
       break;
     }
@@ -9905,7 +10143,7 @@ function isLikelyCdsProject(dir) {
       if (!hasCdsFiles) {
         return false;
       }
-      const packageJsonPath = (0, import_path12.join)(dir, "package.json");
+      const packageJsonPath = (0, import_path13.join)(dir, "package.json");
       const packageJson = readPackageJsonFile(packageJsonPath);
       if (packageJson?.workspaces && Array.isArray(packageJson.workspaces) && packageJson.workspaces.length > 0) {
         if (!hasCdsFiles) {
@@ -9921,10 +10159,13 @@ function isLikelyCdsProject(dir) {
   }
 }
 function hasStandardCdsContent(dir) {
-  const standardLocations = [(0, import_path12.join)(dir, "db"), (0, import_path12.join)(dir, "srv"), (0, import_path12.join)(dir, "app")];
+  const standardLocations = [(0, import_path13.join)(dir, "db"), (0, import_path13.join)(dir, "srv"), (0, import_path13.join)(dir, "app")];
   for (const location of standardLocations) {
-    if ((0, import_fs7.existsSync)(location) && (0, import_fs7.statSync)(location).isDirectory()) {
-      const cdsFiles = Ui((0, import_path12.join)(location, "**/*.cds"), { nodir: true });
+    if ((0, import_fs8.existsSync)(location) && (0, import_fs8.statSync)(location).isDirectory()) {
+      const cdsFiles = Ui((0, import_path13.join)(location, "**/*.cds"), {
+        nodir: true,
+        windowsPathsNoEscape: true
+      });
       if (cdsFiles.length > 0) {
         return true;
       }
@@ -9933,15 +10174,15 @@ function hasStandardCdsContent(dir) {
   return false;
 }
 function hasDirectCdsContent(dir) {
-  const directCdsFiles = Ui((0, import_path12.join)(dir, "*.cds"));
+  const directCdsFiles = Ui((0, import_path13.join)(dir, "*.cds"), { windowsPathsNoEscape: true });
   return directCdsFiles.length > 0;
 }
 function readPackageJsonFile(filePath) {
-  if (!(0, import_fs7.existsSync)(filePath)) {
+  if (!(0, import_fs8.existsSync)(filePath)) {
     return void 0;
   }
   try {
-    const content = (0, import_fs7.readFileSync)(filePath, "utf8");
+    const content = (0, import_fs8.readFileSync)(filePath, "utf8");
     const packageJson = JSON.parse(content);
     return packageJson;
   } catch (error) {
@@ -9953,36 +10194,36 @@ function determineCdsFilesToCompile(sourceRootDir, project) {
   if (!project.cdsFiles || project.cdsFiles.length === 0) {
     return {
       compilationTargets: [],
-      expectedOutputFile: (0, import_path12.join)(project.projectDir, modelCdsJsonFile)
+      expectedOutputFile: (0, import_path13.join)(project.projectDir, modelCdsJsonFile)
     };
   }
-  const absoluteProjectDir = (0, import_path12.join)(sourceRootDir, project.projectDir);
+  const absoluteProjectDir = (0, import_path13.join)(sourceRootDir, project.projectDir);
   const capDirectories = ["db", "srv", "app"];
-  const existingCapDirs = capDirectories.filter((dir) => (0, import_fs7.existsSync)((0, import_path12.join)(absoluteProjectDir, dir)));
+  const existingCapDirs = capDirectories.filter((dir) => (0, import_fs8.existsSync)((0, import_path13.join)(absoluteProjectDir, dir)));
   if (existingCapDirs.length > 0) {
     return {
       compilationTargets: existingCapDirs,
-      expectedOutputFile: (0, import_path12.join)(project.projectDir, modelCdsJsonFile)
+      expectedOutputFile: (0, import_path13.join)(project.projectDir, modelCdsJsonFile)
     };
   }
-  const rootCdsFiles = project.cdsFiles.filter((file) => (0, import_path12.dirname)((0, import_path12.join)(sourceRootDir, file)) === absoluteProjectDir).map((file) => (0, import_path12.basename)(file));
+  const rootCdsFiles = project.cdsFiles.filter((file) => (0, import_path13.dirname)((0, import_path13.join)(sourceRootDir, file)) === absoluteProjectDir).map((file) => (0, import_path13.basename)(file));
   if (rootCdsFiles.length > 0) {
     return {
       compilationTargets: rootCdsFiles,
-      expectedOutputFile: (0, import_path12.join)(project.projectDir, modelCdsJsonFile)
+      expectedOutputFile: (0, import_path13.join)(project.projectDir, modelCdsJsonFile)
     };
   }
   const compilationTargets = project.cdsFiles.map(
-    (file) => (0, import_path12.relative)(absoluteProjectDir, (0, import_path12.join)(sourceRootDir, file))
+    (file) => (0, import_path13.relative)(absoluteProjectDir, (0, import_path13.join)(sourceRootDir, file))
   );
   return {
     compilationTargets,
-    expectedOutputFile: (0, import_path12.join)(project.projectDir, modelCdsJsonFile)
+    expectedOutputFile: (0, import_path13.join)(project.projectDir, modelCdsJsonFile)
   };
 }
 function hasPackageJsonWithCapDeps(dir) {
   try {
-    const packageJsonPath = (0, import_path12.join)(dir, "package.json");
+    const packageJsonPath = (0, import_path13.join)(dir, "package.json");
     const packageJson = readPackageJsonFile(packageJsonPath);
     if (packageJson) {
       const dependencies = {
@@ -10013,7 +10254,7 @@ function buildBasicCdsProjectDependencyGraph(sourceRootDir) {
       cdsExtractorLog("info", `Skipping project '${projectDir}' \u2014 matches paths-ignore pattern`);
       continue;
     }
-    const absoluteProjectDir = (0, import_path13.join)(sourceRootDir, projectDir);
+    const absoluteProjectDir = (0, import_path14.join)(sourceRootDir, projectDir);
     const cdsFiles = determineCdsFilesForProjectDir(sourceRootDir, absoluteProjectDir);
     if (cdsFiles.length === 0) {
       cdsExtractorLog(
@@ -10022,14 +10263,14 @@ function buildBasicCdsProjectDependencyGraph(sourceRootDir) {
       );
       continue;
     }
-    const packageJsonPath = (0, import_path13.join)(absoluteProjectDir, "package.json");
+    const packageJsonPath = (0, import_path14.join)(absoluteProjectDir, "package.json");
     const packageJson = readPackageJsonFile(packageJsonPath);
     projectMap.set(projectDir, {
       projectDir,
       cdsFiles,
       compilationTargets: [],
       // Will be populated in the third pass
-      expectedOutputFile: (0, import_path13.join)(projectDir, modelCdsJsonFile),
+      expectedOutputFile: (0, import_path14.join)(projectDir, modelCdsJsonFile),
       packageJson,
       dependencies: [],
       imports: /* @__PURE__ */ new Map()
@@ -10038,18 +10279,18 @@ function buildBasicCdsProjectDependencyGraph(sourceRootDir) {
   cdsExtractorLog("info", "Analyzing dependencies between CDS projects...");
   for (const [projectDir, project] of projectMap.entries()) {
     for (const relativeFilePath of project.cdsFiles) {
-      const absoluteFilePath = (0, import_path13.join)(sourceRootDir, relativeFilePath);
+      const absoluteFilePath = (0, import_path14.join)(sourceRootDir, relativeFilePath);
       try {
         const imports = extractCdsImports(absoluteFilePath);
         const enrichedImports = [];
         for (const importInfo of imports) {
           const enrichedImport = { ...importInfo };
           if (importInfo.isRelative) {
-            const importedFilePath = (0, import_path13.resolve)((0, import_path13.dirname)(absoluteFilePath), importInfo.path);
+            const importedFilePath = (0, import_path14.resolve)((0, import_path14.dirname)(absoluteFilePath), importInfo.path);
             const normalizedImportedPath = importedFilePath.endsWith(".cds") ? importedFilePath : `${importedFilePath}.cds`;
             try {
-              const relativeToDirPath = (0, import_path13.dirname)(relativeFilePath);
-              const resolvedPath = (0, import_path13.resolve)((0, import_path13.join)(sourceRootDir, relativeToDirPath), importInfo.path);
+              const relativeToDirPath = (0, import_path14.dirname)(relativeFilePath);
+              const resolvedPath = (0, import_path14.resolve)((0, import_path14.join)(sourceRootDir, relativeToDirPath), importInfo.path);
               const normalizedResolvedPath = resolvedPath.endsWith(".cds") ? resolvedPath : `${resolvedPath}.cds`;
               if (normalizedResolvedPath.startsWith(sourceRootDir)) {
                 enrichedImport.resolvedPath = normalizedResolvedPath.substring(sourceRootDir.length).replace(/^[/\\]/, "");
@@ -10062,10 +10303,10 @@ function buildBasicCdsProjectDependencyGraph(sourceRootDir) {
             }
             for (const [otherProjectDir, otherProject] of projectMap.entries()) {
               if (otherProjectDir === projectDir) continue;
-              const otherProjectAbsoluteDir = (0, import_path13.join)(sourceRootDir, otherProjectDir);
+              const otherProjectAbsoluteDir = (0, import_path14.join)(sourceRootDir, otherProjectDir);
               const isInOtherProject = otherProject.cdsFiles.some((otherFile) => {
-                const otherAbsolutePath = (0, import_path13.join)(sourceRootDir, otherFile);
-                return otherAbsolutePath === normalizedImportedPath || normalizedImportedPath.startsWith(otherProjectAbsoluteDir + import_path13.sep);
+                const otherAbsolutePath = (0, import_path14.join)(sourceRootDir, otherFile);
+                return otherAbsolutePath === normalizedImportedPath || normalizedImportedPath.startsWith(otherProjectAbsoluteDir + import_path14.sep);
               });
               if (isInOtherProject) {
                 project.dependencies ??= [];
@@ -10108,8 +10349,8 @@ function buildBasicCdsProjectDependencyGraph(sourceRootDir) {
         "warn",
         `Error determining files to compile for project ${project.projectDir}: ${String(error)}`
       );
-      project.compilationTargets = project.cdsFiles.map((file) => (0, import_path13.basename)(file));
-      project.expectedOutputFile = (0, import_path13.join)(project.projectDir, modelCdsJsonFile);
+      project.compilationTargets = project.cdsFiles.map((file) => (0, import_path14.basename)(file));
+      project.expectedOutputFile = (0, import_path14.join)(project.projectDir, modelCdsJsonFile);
     }
   }
   return projectMap;
@@ -10239,226 +10480,6 @@ function buildCdsProjectDependencyGraph(sourceRootDir) {
 
 // src/codeql.ts
 var import_child_process10 = require("child_process");
-
-// src/environment.ts
-var import_child_process9 = require("child_process");
-var import_fs8 = require("fs");
-var import_os = require("os");
-var import_path14 = require("path");
-function getPlatformInfo() {
-  const osPlatform = (0, import_os.platform)();
-  const osPlatformArch = (0, import_os.arch)();
-  const isWindows = osPlatform === "win32";
-  const exeExtension = isWindows ? ".exe" : "";
-  return {
-    platform: osPlatform,
-    arch: osPlatformArch,
-    isWindows,
-    exeExtension
-  };
-}
-function getCodeQLExePath() {
-  const platformInfo2 = getPlatformInfo();
-  const codeqlExeName = platformInfo2.isWindows ? "codeql.exe" : "codeql";
-  const codeqlDist = process.env.CODEQL_DIST;
-  if (codeqlDist) {
-    const codeqlPathFromDist = (0, import_path14.resolve)((0, import_path14.join)(codeqlDist, codeqlExeName));
-    if ((0, import_fs8.existsSync)(codeqlPathFromDist)) {
-      cdsExtractorLog("info", `Using CodeQL executable from CODEQL_DIST: ${codeqlPathFromDist}`);
-      return codeqlPathFromDist;
-    } else {
-      cdsExtractorLog(
-        "error",
-        `CODEQL_DIST is set to '${codeqlDist}', but CodeQL executable was not found at '${codeqlPathFromDist}'. Please ensure this path is correct. Falling back to PATH-based discovery.`
-      );
-    }
-  }
-  cdsExtractorLog(
-    "info",
-    'CODEQL_DIST environment variable not set or invalid. Attempting to find CodeQL executable via system PATH using "codeql version --format=json".'
-  );
-  try {
-    const versionOutput = (0, import_child_process9.execFileSync)(codeqlExeName, ["version", "--format=json"], {
-      encoding: "utf8",
-      timeout: 5e3,
-      // 5 seconds timeout
-      stdio: "pipe"
-      // Suppress output to console
-    });
-    try {
-      const versionInfo = JSON.parse(versionOutput);
-      if (versionInfo && typeof versionInfo.unpackedLocation === "string" && versionInfo.unpackedLocation) {
-        const resolvedPathFromVersion = (0, import_path14.resolve)((0, import_path14.join)(versionInfo.unpackedLocation, codeqlExeName));
-        if ((0, import_fs8.existsSync)(resolvedPathFromVersion)) {
-          cdsExtractorLog(
-            "info",
-            `CodeQL executable found via 'codeql version --format=json' at: ${resolvedPathFromVersion}`
-          );
-          return resolvedPathFromVersion;
-        }
-        cdsExtractorLog(
-          "warn",
-          `'codeql version --format=json' provided unpackedLocation '${versionInfo.unpackedLocation}', but executable not found at '${resolvedPathFromVersion}'.`
-        );
-      } else {
-        cdsExtractorLog(
-          "warn",
-          "Could not determine CodeQL executable path from 'codeql version --format=json' output. 'unpackedLocation' field missing, empty, or invalid."
-        );
-      }
-    } catch (parseError) {
-      cdsExtractorLog(
-        "warn",
-        `Failed to parse 'codeql version --format=json' output: ${String(parseError)}. Output was: ${versionOutput}`
-      );
-    }
-  } catch (error) {
-    let errorMessage = `INFO: Failed to find CodeQL executable via 'codeql version --format=json'. Error: ${String(error)}`;
-    if (error && typeof error === "object" && "code" in error && error.code === "ENOENT") {
-      errorMessage += `
-INFO: The command '${codeqlExeName}' was not found in your system PATH.`;
-    }
-    cdsExtractorLog("info", errorMessage);
-  }
-  cdsExtractorLog(
-    "error",
-    'Failed to determine CodeQL executable path. Please ensure the CODEQL_DIST environment variable is set and points to a valid CodeQL distribution, or that the CodeQL CLI (codeql) is available in your system PATH and "codeql version --format=json" can provide its location.'
-  );
-  return "";
-}
-function getJavaScriptExtractorRoot(codeqlExePath2) {
-  let jsExtractorRoot = process.env.CODEQL_EXTRACTOR_JAVASCRIPT_ROOT ?? "";
-  if (jsExtractorRoot) {
-    cdsExtractorLog(
-      "info",
-      `Using JavaScript extractor root from environment variable CODEQL_EXTRACTOR_JAVASCRIPT_ROOT: ${jsExtractorRoot}`
-    );
-    return jsExtractorRoot;
-  }
-  if (!codeqlExePath2) {
-    cdsExtractorLog(
-      "warn",
-      "Cannot resolve JavaScript extractor root because the CodeQL executable path was not provided or found."
-    );
-    return "";
-  }
-  try {
-    jsExtractorRoot = (0, import_child_process9.execFileSync)(
-      codeqlExePath2,
-      ["resolve", "extractor", "--language=javascript"],
-      { stdio: "pipe" }
-      // Suppress output from the command itself
-    ).toString().trim();
-    if (jsExtractorRoot) {
-      cdsExtractorLog("info", `JavaScript extractor root resolved to: ${jsExtractorRoot}`);
-    } else {
-      cdsExtractorLog(
-        "warn",
-        `'codeql resolve extractor --language=javascript' using '${codeqlExePath2}' returned an empty path.`
-      );
-    }
-  } catch (error) {
-    cdsExtractorLog(
-      "error",
-      `Error resolving JavaScript extractor root using '${codeqlExePath2}': ${String(error)}`
-    );
-    jsExtractorRoot = "";
-  }
-  return jsExtractorRoot;
-}
-function setupJavaScriptExtractorEnv() {
-  process.env.CODEQL_EXTRACTOR_JAVASCRIPT_WIP_DATABASE = process.env.CODEQL_EXTRACTOR_CDS_WIP_DATABASE;
-  process.env.CODEQL_EXTRACTOR_JAVASCRIPT_DIAGNOSTIC_DIR = process.env.CODEQL_EXTRACTOR_CDS_DIAGNOSTIC_DIR;
-  process.env.CODEQL_EXTRACTOR_JAVASCRIPT_LOG_DIR = process.env.CODEQL_EXTRACTOR_CDS_LOG_DIR;
-  process.env.CODEQL_EXTRACTOR_JAVASCRIPT_SCRATCH_DIR = process.env.CODEQL_EXTRACTOR_CDS_SCRATCH_DIR;
-  process.env.CODEQL_EXTRACTOR_JAVASCRIPT_TRAP_DIR = process.env.CODEQL_EXTRACTOR_CDS_TRAP_DIR;
-  process.env.CODEQL_EXTRACTOR_JAVASCRIPT_SOURCE_ARCHIVE_DIR = process.env.CODEQL_EXTRACTOR_CDS_SOURCE_ARCHIVE_DIR;
-}
-function getAutobuildScriptPath(jsExtractorRoot) {
-  if (!jsExtractorRoot) return "";
-  const platformInfo2 = getPlatformInfo();
-  const autobuildScriptName = platformInfo2.isWindows ? "autobuild.cmd" : "autobuild.sh";
-  return (0, import_path14.resolve)((0, import_path14.join)(jsExtractorRoot, "tools", autobuildScriptName));
-}
-function configureLgtmIndexFilters() {
-  let excludeFilters = "";
-  if (process.env.LGTM_INDEX_FILTERS) {
-    cdsExtractorLog(
-      "info",
-      `Found $LGTM_INDEX_FILTERS already set to:
-${process.env.LGTM_INDEX_FILTERS}`
-    );
-    const allowedExcludePatterns = [(0, import_path14.join)("exclude:**", "*"), (0, import_path14.join)("exclude:**", "*.*")];
-    excludeFilters = "\n" + process.env.LGTM_INDEX_FILTERS.split("\n").filter(
-      (line) => line.startsWith("exclude") && !allowedExcludePatterns.some((pattern) => line.includes(pattern))
-    ).join("\n");
-  }
-  const lgtmIndexFiltersPatterns = [
-    (0, import_path14.join)("exclude:**", "*.*"),
-    (0, import_path14.join)("include:**", "*.cds.json"),
-    (0, import_path14.join)("include:**", "*.cds"),
-    (0, import_path14.join)("include:**", cdsExtractorMarkerFileName),
-    (0, import_path14.join)("exclude:**", "node_modules", "**", "*.*")
-  ].join("\n");
-  process.env.LGTM_INDEX_FILTERS = lgtmIndexFiltersPatterns + excludeFilters;
-  process.env.LGTM_INDEX_TYPESCRIPT = "NONE";
-  process.env.LGTM_INDEX_FILETYPES = ".cds:JSON";
-}
-function applyPathsIgnoreToLgtmFilters(sourceRoot2) {
-  const patterns = getPathsIgnorePatterns(sourceRoot2);
-  if (patterns.length === 0) {
-    return;
-  }
-  const excludeLines = patterns.map((p) => `exclude:${p}`).join("\n");
-  const current = process.env.LGTM_INDEX_FILTERS ?? "";
-  process.env.LGTM_INDEX_FILTERS = current + "\n" + excludeLines;
-  cdsExtractorLog(
-    "info",
-    `Applied ${patterns.length} paths-ignore pattern(s) to LGTM_INDEX_FILTERS`
-  );
-}
-function setupAndValidateEnvironment(sourceRoot2) {
-  const errorMessages2 = [];
-  const platformInfo2 = getPlatformInfo();
-  const codeqlExePath2 = getCodeQLExePath();
-  if (!codeqlExePath2) {
-    errorMessages2.push(
-      "Failed to find CodeQL executable. Ensure CODEQL_DIST is set and valid, or CodeQL CLI is in PATH."
-    );
-  }
-  if (!dirExists(sourceRoot2)) {
-    errorMessages2.push(`Project root directory '${sourceRoot2}' does not exist.`);
-  }
-  const jsExtractorRoot = getJavaScriptExtractorRoot(codeqlExePath2);
-  if (!jsExtractorRoot) {
-    if (codeqlExePath2) {
-      errorMessages2.push(
-        "Failed to determine JavaScript extractor root using the found CodeQL executable."
-      );
-    } else {
-      errorMessages2.push(
-        "Cannot determine JavaScript extractor root because CodeQL executable was not found."
-      );
-    }
-  }
-  if (jsExtractorRoot) {
-    process.env.CODEQL_EXTRACTOR_JAVASCRIPT_ROOT = jsExtractorRoot;
-    setupJavaScriptExtractorEnv();
-  }
-  const autobuildScriptPath2 = jsExtractorRoot ? getAutobuildScriptPath(jsExtractorRoot) : "";
-  return {
-    success: errorMessages2.length === 0,
-    errorMessages: errorMessages2,
-    codeqlExePath: codeqlExePath2,
-    // Will be '' if not found
-    jsExtractorRoot,
-    // Will be '' if not found
-    autobuildScriptPath: autobuildScriptPath2,
-    platformInfo: platformInfo2
-  };
-}
-
-// src/codeql.ts
 function runJavaScriptExtractor(sourceRoot2, autobuildScriptPath2, codeqlExePath2) {
   cdsExtractorLog(
     "info",
@@ -10641,7 +10662,8 @@ try {
       const allCdsFiles = Array.from(
         /* @__PURE__ */ new Set([
           ...Ui((0, import_path16.join)(sourceRoot, "**/*.cds"), {
-            ignore: ["**/node_modules/**", "**/.git/**"]
+            ignore: ["**/node_modules/**", "**/.git/**"],
+            windowsPathsNoEscape: true
           })
         ])
       );
diff --git a/extractors/cds/tools/dist/cds-extractor.bundle.js.map b/extractors/cds/tools/dist/cds-extractor.bundle.js.map
index 441beb62d..2a70f0949 100644
--- a/extractors/cds/tools/dist/cds-extractor.bundle.js.map
+++ b/extractors/cds/tools/dist/cds-extractor.bundle.js.map
@@ -1,7 +1,7 @@
 {
   "version": 3,
-  "sources": ["../cds-extractor.ts", "../node_modules/glob/node_modules/balanced-match/src/index.ts", "../node_modules/glob/node_modules/brace-expansion/src/index.ts", "../node_modules/glob/node_modules/minimatch/src/assert-valid-pattern.ts", "../node_modules/glob/node_modules/minimatch/src/brace-expressions.ts", "../node_modules/glob/node_modules/minimatch/src/unescape.ts", "../node_modules/glob/node_modules/minimatch/src/ast.ts", "../node_modules/glob/node_modules/minimatch/src/escape.ts", "../node_modules/glob/node_modules/minimatch/src/index.ts", "../node_modules/glob/src/glob.ts", "../node_modules/glob/node_modules/lru-cache/src/index.ts", "../node_modules/glob/node_modules/path-scurry/src/index.ts", "../node_modules/glob/node_modules/minipass/src/index.ts", "../node_modules/glob/src/pattern.ts", "../node_modules/glob/src/ignore.ts", "../node_modules/glob/src/processor.ts", "../node_modules/glob/src/walker.ts", "../node_modules/glob/src/has-magic.ts", "../node_modules/glob/src/index.ts", "../src/cds/compiler/command.ts", "../src/filesystem.ts", "../src/constants.ts", "../src/logging/cdsExtractorLog.ts", "../src/logging/statusReport.ts", "../src/cds/compiler/compile.ts", "../src/cds/compiler/version.ts", "../src/cds/compiler/validator.ts", "../src/diagnostics.ts", "../src/packageManager/cacheInstaller.ts", "../src/packageManager/versionResolver.ts", "../src/packageManager/projectInstaller.ts", "../src/cds/compiler/retry.ts", "../src/cds/compiler/graph.ts", "../src/cds/compiler/project.ts", "../src/cds/indexer.ts", "../src/cds/parser/graph.ts", "../src/cds/parser/functions.ts", "../src/paths-ignore.ts", "../node_modules/js-yaml/dist/js-yaml.mjs", "../node_modules/brace-expansion/node_modules/balanced-match/src/index.ts", "../node_modules/brace-expansion/src/index.ts", "../node_modules/minimatch/src/assert-valid-pattern.ts", "../node_modules/minimatch/src/brace-expressions.ts", "../node_modules/minimatch/src/unescape.ts", "../node_modules/minimatch/src/ast.ts", "../node_modules/minimatch/src/escape.ts", "../node_modules/minimatch/src/index.ts", "../src/codeql.ts", "../src/environment.ts", "../src/utils.ts"],
-  "sourcesContent": ["import { join } from 'path';\n\nimport { sync as globSync } from 'glob';\n\nimport { orchestrateCompilation } from './src/cds/compiler';\nimport { orchestrateCdsIndexer } from './src/cds/indexer';\nimport { buildCdsProjectDependencyGraph, type CdsDependencyGraph } from './src/cds/parser';\nimport { handleEarlyExit, runJavaScriptExtractionWithMarker } from './src/codeql';\nimport {\n  addCompilationDiagnostic,\n  addDependencyGraphDiagnostic,\n  addDependencyInstallationDiagnostic,\n  addEnvironmentSetupDiagnostic,\n  addNoCdsProjectsDiagnostic,\n} from './src/diagnostics';\nimport { setupAndValidateEnvironment } from './src/environment';\nimport {\n  cdsExtractorLog,\n  generateStatusReport,\n  logExtractorStart,\n  logExtractorStop,\n  logPerformanceMilestone,\n  logPerformanceTrackingStart,\n  logPerformanceTrackingStop,\n  setSourceRootDirectory,\n} from './src/logging';\nimport { cacheInstallDependencies } from './src/packageManager';\nimport { validateArguments } from './src/utils';\n\n// ============================================================================\n// Main Extraction Flow\n// ============================================================================\n\n// Validate arguments\nconst validationResult = validateArguments(process.argv);\nif (!validationResult.isValid) {\n  console.warn(validationResult.usageMessage);\n  console.log(\n    `CDS extractor terminated due to invalid arguments: ${validationResult.usageMessage}`,\n  );\n  console.log(`Completed run of the cds-extractor.js script for the CDS extractor.`);\n  process.exit(0);\n}\n\nconst { sourceRoot } = validationResult.args!;\n\n// Initialize logging\nsetSourceRootDirectory(sourceRoot);\nlogExtractorStart(sourceRoot);\n\n// Setup and validate environment\nlogPerformanceTrackingStart('Environment Setup');\nconst {\n  success: envSetupSuccess,\n  errorMessages,\n  codeqlExePath,\n  autobuildScriptPath,\n  platformInfo,\n} = setupAndValidateEnvironment(sourceRoot);\nlogPerformanceTrackingStop('Environment Setup');\n\nif (!envSetupSuccess) {\n  const codeqlExe = platformInfo.isWindows ? 'codeql.exe' : 'codeql';\n  const errorMessage = `'${codeqlExe} database index-files --language cds' terminated early due to: ${errorMessages.join(', ')}.`;\n  cdsExtractorLog('warn', errorMessage);\n\n  if (codeqlExePath) {\n    addEnvironmentSetupDiagnostic(sourceRoot, errorMessage, codeqlExePath);\n  }\n\n  logExtractorStop(\n    false,\n    'Warning: Environment setup failed, continuing with limited functionality',\n  );\n} else {\n  process.chdir(sourceRoot);\n}\n\ncdsExtractorLog(\n  'info',\n  `CodeQL CDS extractor using autobuild mode for scan of project source root directory '${sourceRoot}'.`,\n);\n\n// Build CDS project dependency graph\ncdsExtractorLog('info', 'Building CDS project dependency graph...');\nlet dependencyGraph: CdsDependencyGraph;\n\ntry {\n  logPerformanceTrackingStart('Dependency Graph Build');\n  dependencyGraph = buildCdsProjectDependencyGraph(sourceRoot);\n  logPerformanceTrackingStop('Dependency Graph Build');\n\n  logPerformanceMilestone(\n    'Dependency graph created',\n    `${dependencyGraph.projects.size} projects, ${dependencyGraph.statusSummary.totalCdsFiles} CDS files`,\n  );\n\n  // Log project details\n  if (dependencyGraph.projects.size > 0) {\n    for (const [projectDir, project] of dependencyGraph.projects.entries()) {\n      cdsExtractorLog(\n        'info',\n        `Project: ${projectDir}, Status: ${project.status}, CDS files: ${project.cdsFiles.length}, Compilation targets: ${project.compilationTargets.length}`,\n      );\n    }\n  } else {\n    // No CDS projects detected - try direct file search as diagnostic\n    cdsExtractorLog(\n      'error',\n      'No CDS projects were detected. This is an unrecoverable error as there is nothing to scan.',\n    );\n\n    try {\n      const allCdsFiles = Array.from(\n        new Set([\n          ...globSync(join(sourceRoot, '**/*.cds'), {\n            ignore: ['**/node_modules/**', '**/.git/**'],\n          }),\n        ]),\n      );\n      cdsExtractorLog(\n        'info',\n        `Direct search found ${allCdsFiles.length} CDS files in the source tree.`,\n      );\n\n      if (allCdsFiles.length > 0) {\n        cdsExtractorLog(\n          'info',\n          `Sample CDS files: ${allCdsFiles.slice(0, 5).join(', ')}${allCdsFiles.length > 5 ? ', ...' : ''}`,\n        );\n        cdsExtractorLog(\n          'error',\n          'CDS files were found but no projects were detected. This indicates a problem with project detection logic.',\n        );\n      } else {\n        cdsExtractorLog(\n          'info',\n          'No CDS files found in the source tree. This may be expected if the source does not contain CAP/CDS projects.',\n        );\n      }\n    } catch (globError) {\n      cdsExtractorLog('warn', `Could not perform direct CDS file search: ${String(globError)}`);\n    }\n\n    const warningMessage =\n      'No CDS projects were detected. This may be expected if the source does not contain CAP/CDS projects.';\n    if (codeqlExePath) {\n      addNoCdsProjectsDiagnostic(sourceRoot, warningMessage, codeqlExePath);\n    }\n\n    logExtractorStop(false, 'Warning: No CDS projects detected, skipping CDS-specific processing');\n    handleEarlyExit(\n      sourceRoot,\n      autobuildScriptPath || '',\n      codeqlExePath,\n      'JavaScript extraction completed (CDS processing was skipped)',\n    );\n  }\n} catch (error) {\n  const errorMessage = `Failed to build CDS dependency graph: ${String(error)}`;\n  cdsExtractorLog('error', errorMessage);\n\n  if (codeqlExePath) {\n    addDependencyGraphDiagnostic(sourceRoot, errorMessage, codeqlExePath);\n  }\n\n  logExtractorStop(\n    false,\n    'Warning: Dependency graph build failed, skipping CDS-specific processing',\n  );\n  handleEarlyExit(\n    sourceRoot,\n    autobuildScriptPath || '',\n    codeqlExePath,\n    'JavaScript extraction completed (CDS processing was skipped)',\n  );\n}\n\n// Install dependencies\nlogPerformanceTrackingStart('Dependency Installation');\nconst projectCacheDirMap = cacheInstallDependencies(dependencyGraph, sourceRoot, codeqlExePath);\nlogPerformanceTrackingStop('Dependency Installation');\n\nif (projectCacheDirMap.size === 0) {\n  cdsExtractorLog(\n    'error',\n    'No project cache directory mappings were created. This indicates that dependency installation failed for all discovered projects.',\n  );\n\n  if (dependencyGraph.projects.size > 0) {\n    const errorMessage = `Found ${dependencyGraph.projects.size} CDS projects but failed to install dependencies for any of them. Cannot proceed with compilation.`;\n    cdsExtractorLog('error', errorMessage);\n\n    if (codeqlExePath) {\n      addDependencyInstallationDiagnostic(sourceRoot, errorMessage, codeqlExePath);\n    }\n\n    logExtractorStop(\n      false,\n      'Warning: Dependency installation failed for all projects, continuing with limited functionality',\n    );\n  }\n\n  cdsExtractorLog(\n    'warn',\n    'No projects and no cache mappings - this should have been detected earlier.',\n  );\n}\n\n// Run cds-indexer for projects that use it (before compilation)\nlogPerformanceTrackingStart('CDS Indexer');\nconst cdsIndexerSummary = orchestrateCdsIndexer(\n  dependencyGraph,\n  sourceRoot,\n  projectCacheDirMap,\n  codeqlExePath,\n);\nlogPerformanceTrackingStop('CDS Indexer');\n\nif (cdsIndexerSummary.projectsRequiringIndexer > 0) {\n  logPerformanceMilestone(\n    'CDS indexer completed',\n    `${cdsIndexerSummary.successfulRuns} succeeded, ${cdsIndexerSummary.failedRuns} failed`,\n  );\n}\n\n// Collect all CDS files to process\nconst cdsFilePathsToProcess: string[] = [];\nfor (const project of dependencyGraph.projects.values()) {\n  cdsFilePathsToProcess.push(...project.cdsFiles);\n}\n\ncdsExtractorLog(\n  'info',\n  `Found ${cdsFilePathsToProcess.length} total CDS files, ${dependencyGraph.statusSummary.totalCdsFiles} CDS files in dependency graph`,\n);\n\n// Compile CDS files\nlogPerformanceTrackingStart('CDS Compilation');\ntry {\n  orchestrateCompilation(dependencyGraph, projectCacheDirMap, codeqlExePath);\n\n  if (!dependencyGraph.statusSummary.overallSuccess) {\n    cdsExtractorLog(\n      'error',\n      `Compilation completed with failures: ${dependencyGraph.statusSummary.failedCompilations} failed out of ${dependencyGraph.statusSummary.totalCompilationTasks} total tasks`,\n    );\n\n    for (const error of dependencyGraph.errors.critical) {\n      cdsExtractorLog('error', `Critical error in ${error.phase}: ${error.message}`);\n    }\n  }\n\n  logPerformanceTrackingStop('CDS Compilation');\n  logPerformanceMilestone('CDS compilation completed');\n} catch (error) {\n  logPerformanceTrackingStop('CDS Compilation');\n  cdsExtractorLog('error', `Compilation orchestration failed: ${String(error)}`);\n\n  if (cdsFilePathsToProcess.length > 0) {\n    addCompilationDiagnostic(\n      cdsFilePathsToProcess[0],\n      `Compilation orchestration failed: ${String(error)}`,\n      codeqlExePath,\n      sourceRoot,\n    );\n  }\n}\n\n// Run JavaScript extraction with marker file handling\nconst extractionSuccess = runJavaScriptExtractionWithMarker(\n  sourceRoot,\n  autobuildScriptPath,\n  codeqlExePath,\n  dependencyGraph,\n);\n\nlogExtractorStop(\n  extractionSuccess,\n  extractionSuccess ? 'CDS extraction completed successfully' : 'JavaScript extractor failed',\n);\n\ncdsExtractorLog(\n  'info',\n  'CDS Extractor Status Report : Final...\\n' + generateStatusReport(dependencyGraph),\n);\n\nconsole.log(`Completed run of the cds-extractor.js script for the CDS extractor.`);\n", "export const balanced = (\n  a: string | RegExp,\n  b: string | RegExp,\n  str: string,\n) => {\n  const ma = a instanceof RegExp ? maybeMatch(a, str) : a\n  const mb = b instanceof RegExp ? maybeMatch(b, str) : b\n\n  const r = ma !== null && mb != null && range(ma, mb, str)\n\n  return (\n    r && {\n      start: r[0],\n      end: r[1],\n      pre: str.slice(0, r[0]),\n      body: str.slice(r[0] + ma.length, r[1]),\n      post: str.slice(r[1] + mb.length),\n    }\n  )\n}\n\nconst maybeMatch = (reg: RegExp, str: string) => {\n  const m = str.match(reg)\n  return m ? m[0] : null\n}\n\nexport const range = (\n  a: string,\n  b: string,\n  str: string,\n): undefined | [number, number] => {\n  let begs: number[],\n    beg: number | undefined,\n    left: number,\n    right: number | undefined = undefined,\n    result: undefined | [number, number]\n  let ai = str.indexOf(a)\n  let bi = str.indexOf(b, ai + 1)\n  let i = ai\n\n  if (ai >= 0 && bi > 0) {\n    if (a === b) {\n      return [ai, bi]\n    }\n    begs = []\n    left = str.length\n\n    while (i >= 0 && !result) {\n      if (i === ai) {\n        begs.push(i)\n        ai = str.indexOf(a, i + 1)\n      } else if (begs.length === 1) {\n        const r = begs.pop()\n        if (r !== undefined) result = [r, bi]\n      } else {\n        beg = begs.pop()\n        if (beg !== undefined && beg < left) {\n          left = beg\n          right = bi\n        }\n\n        bi = str.indexOf(b, i + 1)\n      }\n\n      i = ai < bi && ai >= 0 ? ai : bi\n    }\n\n    if (begs.length && right !== undefined) {\n      result = [left, right]\n    }\n  }\n\n  return result\n}\n", "import { balanced } from 'balanced-match'\n\nconst escSlash = '\\0SLASH' + Math.random() + '\\0'\nconst escOpen = '\\0OPEN' + Math.random() + '\\0'\nconst escClose = '\\0CLOSE' + Math.random() + '\\0'\nconst escComma = '\\0COMMA' + Math.random() + '\\0'\nconst escPeriod = '\\0PERIOD' + Math.random() + '\\0'\nconst escSlashPattern = new RegExp(escSlash, 'g')\nconst escOpenPattern = new RegExp(escOpen, 'g')\nconst escClosePattern = new RegExp(escClose, 'g')\nconst escCommaPattern = new RegExp(escComma, 'g')\nconst escPeriodPattern = new RegExp(escPeriod, 'g')\nconst slashPattern = /\\\\\\\\/g\nconst openPattern = /\\\\{/g\nconst closePattern = /\\\\}/g\nconst commaPattern = /\\\\,/g\nconst periodPattern = /\\\\./g\n\nexport const EXPANSION_MAX = 100_000\n\nfunction numeric(str: string) {\n  return !isNaN(str as any) ? parseInt(str, 10) : str.charCodeAt(0)\n}\n\nfunction escapeBraces(str: string) {\n  return str\n    .replace(slashPattern, escSlash)\n    .replace(openPattern, escOpen)\n    .replace(closePattern, escClose)\n    .replace(commaPattern, escComma)\n    .replace(periodPattern, escPeriod)\n}\n\nfunction unescapeBraces(str: string) {\n  return str\n    .replace(escSlashPattern, '\\\\')\n    .replace(escOpenPattern, '{')\n    .replace(escClosePattern, '}')\n    .replace(escCommaPattern, ',')\n    .replace(escPeriodPattern, '.')\n}\n\n/**\n * Basically just str.split(\",\"), but handling cases\n * where we have nested braced sections, which should be\n * treated as individual members, like {a,{b,c},d}\n */\nfunction parseCommaParts(str: string) {\n  if (!str) {\n    return ['']\n  }\n\n  const parts: string[] = []\n  const m = balanced('{', '}', str)\n\n  if (!m) {\n    return str.split(',')\n  }\n\n  const { pre, body, post } = m\n  const p = pre.split(',')\n\n  p[p.length - 1] += '{' + body + '}'\n  const postParts = parseCommaParts(post)\n  if (post.length) {\n    ;(p[p.length - 1] as string) += postParts.shift()\n    p.push.apply(p, postParts)\n  }\n\n  parts.push.apply(parts, p)\n\n  return parts\n}\n\nexport type BraceExpansionOptions = {\n  max?: number\n}\n\nexport function expand(str: string, options: BraceExpansionOptions = {}) {\n  if (!str) {\n    return []\n  }\n\n  const { max = EXPANSION_MAX } = options\n\n  // I don't know why Bash 4.3 does this, but it does.\n  // Anything starting with {} will have the first two bytes preserved\n  // but *only* at the top level, so {},a}b will not expand to anything,\n  // but a{},b}c will be expanded to [a}c,abc].\n  // One could argue that this is a bug in Bash, but since the goal of\n  // this module is to match Bash's rules, we escape a leading {}\n  if (str.slice(0, 2) === '{}') {\n    str = '\\\\{\\\\}' + str.slice(2)\n  }\n\n  return expand_(escapeBraces(str), max, true).map(unescapeBraces)\n}\n\nfunction embrace(str: string) {\n  return '{' + str + '}'\n}\n\nfunction isPadded(el: string) {\n  return /^-?0\\d/.test(el)\n}\n\nfunction lte(i: number, y: number) {\n  return i <= y\n}\n\nfunction gte(i: number, y: number) {\n  return i >= y\n}\n\nfunction expand_(str: string, max: number, isTop: boolean): string[] {\n  /** @type {string[]} */\n  const expansions: string[] = []\n\n  const m = balanced('{', '}', str)\n  if (!m) return [str]\n\n  // no need to expand pre, since it is guaranteed to be free of brace-sets\n  const pre = m.pre\n  const post: string[] = m.post.length ? expand_(m.post, max, false) : ['']\n\n  if (/\\$$/.test(m.pre)) {\n    for (let k = 0; k < post.length && k < max; k++) {\n      const expansion = pre + '{' + m.body + '}' + post[k]\n      expansions.push(expansion)\n    }\n  } else {\n    const isNumericSequence = /^-?\\d+\\.\\.-?\\d+(?:\\.\\.-?\\d+)?$/.test(m.body)\n    const isAlphaSequence = /^[a-zA-Z]\\.\\.[a-zA-Z](?:\\.\\.-?\\d+)?$/.test(\n      m.body,\n    )\n    const isSequence = isNumericSequence || isAlphaSequence\n    const isOptions = m.body.indexOf(',') >= 0\n    if (!isSequence && !isOptions) {\n      // {a},b}\n      if (m.post.match(/,(?!,).*\\}/)) {\n        str = m.pre + '{' + m.body + escClose + m.post\n        return expand_(str, max, true)\n      }\n      return [str]\n    }\n\n    let n: string[]\n    if (isSequence) {\n      n = m.body.split(/\\.\\./)\n    } else {\n      n = parseCommaParts(m.body)\n      if (n.length === 1 && n[0] !== undefined) {\n        // x{{a,b}}y ==> x{a}y x{b}y\n        n = expand_(n[0], max, false).map(embrace)\n        //XXX is this necessary? Can't seem to hit it in tests.\n        /* c8 ignore start */\n        if (n.length === 1) {\n          return post.map(p => m.pre + n[0] + p)\n        }\n        /* c8 ignore stop */\n      }\n    }\n\n    // at this point, n is the parts, and we know it's not a comma set\n    // with a single entry.\n    let N: string[]\n\n    if (isSequence && n[0] !== undefined && n[1] !== undefined) {\n      const x = numeric(n[0])\n      const y = numeric(n[1])\n      const width = Math.max(n[0].length, n[1].length)\n      let incr =\n        n.length === 3 && n[2] !== undefined ? Math.abs(numeric(n[2])) : 1\n      let test = lte\n      const reverse = y < x\n      if (reverse) {\n        incr *= -1\n        test = gte\n      }\n      const pad = n.some(isPadded)\n\n      N = []\n\n      for (let i = x; test(i, y); i += incr) {\n        let c\n        if (isAlphaSequence) {\n          c = String.fromCharCode(i)\n          if (c === '\\\\') {\n            c = ''\n          }\n        } else {\n          c = String(i)\n          if (pad) {\n            const need = width - c.length\n            if (need > 0) {\n              const z = new Array(need + 1).join('0')\n              if (i < 0) {\n                c = '-' + z + c.slice(1)\n              } else {\n                c = z + c\n              }\n            }\n          }\n        }\n        N.push(c)\n      }\n    } else {\n      N = []\n\n      for (let j = 0; j < n.length; j++) {\n        N.push.apply(N, expand_(n[j] as string, max, false))\n      }\n    }\n\n    for (let j = 0; j < N.length; j++) {\n      for (let k = 0; k < post.length && expansions.length < max; k++) {\n        const expansion = pre + N[j] + post[k]\n        if (!isTop || isSequence || expansion) {\n          expansions.push(expansion)\n        }\n      }\n    }\n  }\n\n  return expansions\n}\n", "const MAX_PATTERN_LENGTH = 1024 * 64\nexport const assertValidPattern: (pattern: any) => void = (\n  pattern: any,\n): asserts pattern is string => {\n  if (typeof pattern !== 'string') {\n    throw new TypeError('invalid pattern')\n  }\n\n  if (pattern.length > MAX_PATTERN_LENGTH) {\n    throw new TypeError('pattern is too long')\n  }\n}\n", "// translate the various posix character classes into unicode properties\n// this works across all unicode locales\n\n// { : [, /u flag required, negated]\nconst posixClasses: { [k: string]: [e: string, u: boolean, n?: boolean] } =\n  {\n    '[:alnum:]': ['\\\\p{L}\\\\p{Nl}\\\\p{Nd}', true],\n    '[:alpha:]': ['\\\\p{L}\\\\p{Nl}', true],\n    '[:ascii:]': ['\\\\x' + '00-\\\\x' + '7f', false],\n    '[:blank:]': ['\\\\p{Zs}\\\\t', true],\n    '[:cntrl:]': ['\\\\p{Cc}', true],\n    '[:digit:]': ['\\\\p{Nd}', true],\n    '[:graph:]': ['\\\\p{Z}\\\\p{C}', true, true],\n    '[:lower:]': ['\\\\p{Ll}', true],\n    '[:print:]': ['\\\\p{C}', true],\n    '[:punct:]': ['\\\\p{P}', true],\n    '[:space:]': ['\\\\p{Z}\\\\t\\\\r\\\\n\\\\v\\\\f', true],\n    '[:upper:]': ['\\\\p{Lu}', true],\n    '[:word:]': ['\\\\p{L}\\\\p{Nl}\\\\p{Nd}\\\\p{Pc}', true],\n    '[:xdigit:]': ['A-Fa-f0-9', false],\n  }\n\n// only need to escape a few things inside of brace expressions\n// escapes: [ \\ ] -\nconst braceEscape = (s: string) => s.replace(/[[\\]\\\\-]/g, '\\\\$&')\n// escape all regexp magic characters\nconst regexpEscape = (s: string) =>\n  s.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&')\n\n// everything has already been escaped, we just have to join\nconst rangesToString = (ranges: string[]): string => ranges.join('')\n\nexport type ParseClassResult = [\n  src: string,\n  uFlag: boolean,\n  consumed: number,\n  hasMagic: boolean,\n]\n\n// takes a glob string at a posix brace expression, and returns\n// an equivalent regular expression source, and boolean indicating\n// whether the /u flag needs to be applied, and the number of chars\n// consumed to parse the character class.\n// This also removes out of order ranges, and returns ($.) if the\n// entire class just no good.\nexport const parseClass = (\n  glob: string,\n  position: number,\n): ParseClassResult => {\n  const pos = position\n  /* c8 ignore start */\n  if (glob.charAt(pos) !== '[') {\n    throw new Error('not in a brace expression')\n  }\n  /* c8 ignore stop */\n  const ranges: string[] = []\n  const negs: string[] = []\n\n  let i = pos + 1\n  let sawStart = false\n  let uflag = false\n  let escaping = false\n  let negate = false\n  let endPos = pos\n  let rangeStart = ''\n  WHILE: while (i < glob.length) {\n    const c = glob.charAt(i)\n    if ((c === '!' || c === '^') && i === pos + 1) {\n      negate = true\n      i++\n      continue\n    }\n\n    if (c === ']' && sawStart && !escaping) {\n      endPos = i + 1\n      break\n    }\n\n    sawStart = true\n    if (c === '\\\\') {\n      if (!escaping) {\n        escaping = true\n        i++\n        continue\n      }\n      // escaped \\ char, fall through and treat like normal char\n    }\n    if (c === '[' && !escaping) {\n      // either a posix class, a collation equivalent, or just a [\n      for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {\n        if (glob.startsWith(cls, i)) {\n          // invalid, [a-[] is fine, but not [a-[:alpha]]\n          if (rangeStart) {\n            return ['$.', false, glob.length - pos, true]\n          }\n          i += cls.length\n          if (neg) negs.push(unip)\n          else ranges.push(unip)\n          uflag = uflag || u\n          continue WHILE\n        }\n      }\n    }\n\n    // now it's just a normal character, effectively\n    escaping = false\n    if (rangeStart) {\n      // throw this range away if it's not valid, but others\n      // can still match.\n      if (c > rangeStart) {\n        ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c))\n      } else if (c === rangeStart) {\n        ranges.push(braceEscape(c))\n      }\n      rangeStart = ''\n      i++\n      continue\n    }\n\n    // now might be the start of a range.\n    // can be either c-d or c-] or c] or c] at this point\n    if (glob.startsWith('-]', i + 1)) {\n      ranges.push(braceEscape(c + '-'))\n      i += 2\n      continue\n    }\n    if (glob.startsWith('-', i + 1)) {\n      rangeStart = c\n      i += 2\n      continue\n    }\n\n    // not the start of a range, just a single character\n    ranges.push(braceEscape(c))\n    i++\n  }\n\n  if (endPos < i) {\n    // didn't see the end of the class, not a valid class,\n    // but might still be valid as a literal match.\n    return ['', false, 0, false]\n  }\n\n  // if we got no ranges and no negates, then we have a range that\n  // cannot possibly match anything, and that poisons the whole glob\n  if (!ranges.length && !negs.length) {\n    return ['$.', false, glob.length - pos, true]\n  }\n\n  // if we got one positive range, and it's a single character, then that's\n  // not actually a magic pattern, it's just that one literal character.\n  // we should not treat that as \"magic\", we should just return the literal\n  // character. [_] is a perfectly valid way to escape glob magic chars.\n  if (\n    negs.length === 0 &&\n    ranges.length === 1 &&\n    /^\\\\?.$/.test(ranges[0]) &&\n    !negate\n  ) {\n    const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0]\n    return [regexpEscape(r), false, endPos - pos, false]\n  }\n\n  const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']'\n  const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']'\n  const comb =\n    ranges.length && negs.length ? '(' + sranges + '|' + snegs + ')'\n    : ranges.length ? sranges\n    : snegs\n\n  return [comb, uflag, endPos - pos, true]\n}\n", "import { MinimatchOptions } from './index.js'\n\n/**\n * Un-escape a string that has been escaped with {@link escape}.\n *\n * If the {@link MinimatchOptions.windowsPathsNoEscape} option is used, then\n * square-bracket escapes are removed, but not backslash escapes.\n *\n * For example, it will turn the string `'[*]'` into `*`, but it will not\n * turn `'\\\\*'` into `'*'`, because `\\` is a path separator in\n * `windowsPathsNoEscape` mode.\n *\n * When `windowsPathsNoEscape` is not set, then both square-bracket escapes and\n * backslash escapes are removed.\n *\n * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped\n * or unescaped.\n *\n * When `magicalBraces` is not set, escapes of braces (`{` and `}`) will not be\n * unescaped.\n */\n\nexport const unescape = (\n  s: string,\n  {\n    windowsPathsNoEscape = false,\n    magicalBraces = true,\n  }: Pick = {},\n) => {\n  if (magicalBraces) {\n    return windowsPathsNoEscape ?\n        s.replace(/\\[([^\\/\\\\])\\]/g, '$1')\n      : s\n          .replace(/((?!\\\\).|^)\\[([^\\/\\\\])\\]/g, '$1$2')\n          .replace(/\\\\([^\\/])/g, '$1')\n  }\n  return windowsPathsNoEscape ?\n      s.replace(/\\[([^\\/\\\\{}])\\]/g, '$1')\n    : s\n        .replace(/((?!\\\\).|^)\\[([^\\/\\\\{}])\\]/g, '$1$2')\n        .replace(/\\\\([^\\/{}])/g, '$1')\n}\n", "// parse a single path portion\n\nimport { parseClass } from './brace-expressions.js'\nimport { MinimatchOptions, MMRegExp } from './index.js'\nimport { unescape } from './unescape.js'\n\n// classes [] are handled by the parseClass method\n// for positive extglobs, we sub-parse the contents, and combine,\n// with the appropriate regexp close.\n// for negative extglobs, we sub-parse the contents, but then\n// have to include the rest of the pattern, then the parent, etc.,\n// as the thing that cannot be because RegExp negative lookaheads\n// are different from globs.\n//\n// So for example:\n// a@(i|w!(x|y)z|j)b => ^a(i|w((!?(x|y)zb).*)z|j)b$\n//   1   2 3   4 5 6      1   2    3   46      5 6\n//\n// Assembling the extglob requires not just the negated patterns themselves,\n// but also anything following the negative patterns up to the boundary\n// of the current pattern, plus anything following in the parent pattern.\n//\n//\n// So, first, we parse the string into an AST of extglobs, without turning\n// anything into regexps yet.\n//\n// ['a', {@ [['i'], ['w', {!['x', 'y']}, 'z'], ['j']]}, 'b']\n//\n// Then, for all the negative extglobs, we append whatever comes after in\n// each parent as their tail\n//\n// ['a', {@ [['i'], ['w', {!['x', 'y'], 'z', 'b'}, 'z'], ['j']]}, 'b']\n//\n// Lastly, we turn each of these pieces into a regexp, and join\n//\n//                                 v----- .* because there's more following,\n//                                 v    v  otherwise, .+ because it must be\n//                                 v    v  *something* there.\n// ['^a', {@ ['i', 'w(?:(!?(?:x|y).*zb$).*)z', 'j' ]}, 'b$']\n//   copy what follows into here--^^^^^\n// ['^a', '(?:i|w(?:(?!(?:x|y).*zb$).*)z|j)', 'b$']\n// ['^a(?:i|w(?:(?!(?:x|y).*zb$).*)z|j)b$']\n\nexport type ExtglobType = '!' | '?' | '+' | '*' | '@'\nconst types = new Set(['!', '?', '+', '*', '@'])\nconst isExtglobType = (c: string): c is ExtglobType =>\n  types.has(c as ExtglobType)\n\n// Patterns that get prepended to bind to the start of either the\n// entire string, or just a single path portion, to prevent dots\n// and/or traversal patterns, when needed.\n// Exts don't need the ^ or / bit, because the root binds that already.\nconst startNoTraversal = '(?!(?:^|/)\\\\.\\\\.?(?:$|/))'\nconst startNoDot = '(?!\\\\.)'\n\n// characters that indicate a start of pattern needs the \"no dots\" bit,\n// because a dot *might* be matched. ( is not in the list, because in\n// the case of a child extglob, it will handle the prevention itself.\nconst addPatternStart = new Set(['[', '.'])\n// cases where traversal is A-OK, no dot prevention needed\nconst justDots = new Set(['..', '.'])\nconst reSpecials = new Set('().*{}+?[]^$\\\\!')\nconst regExpEscape = (s: string) =>\n  s.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&')\n\n// any single thing other than /\nconst qmark = '[^/]'\n\n// * => any number of characters\nconst star = qmark + '*?'\n// use + when we need to ensure that *something* matches, because the * is\n// the only thing in the path portion.\nconst starNoEmpty = qmark + '+?'\n\n// remove the \\ chars that we added if we end up doing a nonmagic compare\n// const deslash = (s: string) => s.replace(/\\\\(.)/g, '$1')\n\nexport class AST {\n  type: ExtglobType | null\n  readonly #root: AST\n\n  #hasMagic?: boolean\n  #uflag: boolean = false\n  #parts: (string | AST)[] = []\n  readonly #parent?: AST\n  readonly #parentIndex: number\n  #negs: AST[]\n  #filledNegs: boolean = false\n  #options: MinimatchOptions\n  #toString?: string\n  // set to true if it's an extglob with no children\n  // (which really means one child of '')\n  #emptyExt: boolean = false\n\n  constructor(\n    type: ExtglobType | null,\n    parent?: AST,\n    options: MinimatchOptions = {},\n  ) {\n    this.type = type\n    // extglobs are inherently magical\n    if (type) this.#hasMagic = true\n    this.#parent = parent\n    this.#root = this.#parent ? this.#parent.#root : this\n    this.#options = this.#root === this ? options : this.#root.#options\n    this.#negs = this.#root === this ? [] : this.#root.#negs\n    if (type === '!' && !this.#root.#filledNegs) this.#negs.push(this)\n    this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0\n  }\n\n  get hasMagic(): boolean | undefined {\n    /* c8 ignore start */\n    if (this.#hasMagic !== undefined) return this.#hasMagic\n    /* c8 ignore stop */\n    for (const p of this.#parts) {\n      if (typeof p === 'string') continue\n      if (p.type || p.hasMagic) return (this.#hasMagic = true)\n    }\n    // note: will be undefined until we generate the regexp src and find out\n    return this.#hasMagic\n  }\n\n  // reconstructs the pattern\n  toString(): string {\n    if (this.#toString !== undefined) return this.#toString\n    if (!this.type) {\n      return (this.#toString = this.#parts.map(p => String(p)).join(''))\n    } else {\n      return (this.#toString =\n        this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')')\n    }\n  }\n\n  #fillNegs() {\n    /* c8 ignore start */\n    if (this !== this.#root) throw new Error('should only call on root')\n    if (this.#filledNegs) return this\n    /* c8 ignore stop */\n\n    // call toString() once to fill this out\n    this.toString()\n    this.#filledNegs = true\n    let n: AST | undefined\n    while ((n = this.#negs.pop())) {\n      if (n.type !== '!') continue\n      // walk up the tree, appending everthing that comes AFTER parentIndex\n      let p: AST | undefined = n\n      let pp = p.#parent\n      while (pp) {\n        for (\n          let i = p.#parentIndex + 1;\n          !pp.type && i < pp.#parts.length;\n          i++\n        ) {\n          for (const part of n.#parts) {\n            /* c8 ignore start */\n            if (typeof part === 'string') {\n              throw new Error('string part in extglob AST??')\n            }\n            /* c8 ignore stop */\n            part.copyIn(pp.#parts[i])\n          }\n        }\n        p = pp\n        pp = p.#parent\n      }\n    }\n    return this\n  }\n\n  push(...parts: (string | AST)[]) {\n    for (const p of parts) {\n      if (p === '') continue\n      /* c8 ignore start */\n      if (\n        typeof p !== 'string' &&\n        !(p instanceof AST && p.#parent === this)\n      ) {\n        throw new Error('invalid part: ' + p)\n      }\n      /* c8 ignore stop */\n      this.#parts.push(p)\n    }\n  }\n\n  toJSON() {\n    const ret: any[] =\n      this.type === null ?\n        this.#parts\n          .slice()\n          .map(p => (typeof p === 'string' ? p : p.toJSON()))\n      : [this.type, ...this.#parts.map(p => (p as AST).toJSON())]\n    if (this.isStart() && !this.type) ret.unshift([])\n    if (\n      this.isEnd() &&\n      (this === this.#root ||\n        (this.#root.#filledNegs && this.#parent?.type === '!'))\n    ) {\n      ret.push({})\n    }\n    return ret\n  }\n\n  isStart(): boolean {\n    if (this.#root === this) return true\n    // if (this.type) return !!this.#parent?.isStart()\n    if (!this.#parent?.isStart()) return false\n    if (this.#parentIndex === 0) return true\n    // if everything AHEAD of this is a negation, then it's still the \"start\"\n    const p = this.#parent\n    for (let i = 0; i < this.#parentIndex; i++) {\n      const pp = p.#parts[i]\n      if (!(pp instanceof AST && pp.type === '!')) {\n        return false\n      }\n    }\n    return true\n  }\n\n  isEnd(): boolean {\n    if (this.#root === this) return true\n    if (this.#parent?.type === '!') return true\n    if (!this.#parent?.isEnd()) return false\n    if (!this.type) return this.#parent?.isEnd()\n    // if not root, it'll always have a parent\n    /* c8 ignore start */\n    const pl = this.#parent ? this.#parent.#parts.length : 0\n    /* c8 ignore stop */\n    return this.#parentIndex === pl - 1\n  }\n\n  copyIn(part: AST | string) {\n    if (typeof part === 'string') this.push(part)\n    else this.push(part.clone(this))\n  }\n\n  clone(parent: AST) {\n    const c = new AST(this.type, parent)\n    for (const p of this.#parts) {\n      c.copyIn(p)\n    }\n    return c\n  }\n\n  static #parseAST(\n    str: string,\n    ast: AST,\n    pos: number,\n    opt: MinimatchOptions,\n  ): number {\n    let escaping = false\n    let inBrace = false\n    let braceStart = -1\n    let braceNeg = false\n    if (ast.type === null) {\n      // outside of a extglob, append until we find a start\n      let i = pos\n      let acc = ''\n      while (i < str.length) {\n        const c = str.charAt(i++)\n        // still accumulate escapes at this point, but we do ignore\n        // starts that are escaped\n        if (escaping || c === '\\\\') {\n          escaping = !escaping\n          acc += c\n          continue\n        }\n\n        if (inBrace) {\n          if (i === braceStart + 1) {\n            if (c === '^' || c === '!') {\n              braceNeg = true\n            }\n          } else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {\n            inBrace = false\n          }\n          acc += c\n          continue\n        } else if (c === '[') {\n          inBrace = true\n          braceStart = i\n          braceNeg = false\n          acc += c\n          continue\n        }\n\n        if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {\n          ast.push(acc)\n          acc = ''\n          const ext = new AST(c, ast)\n          i = AST.#parseAST(str, ext, i, opt)\n          ast.push(ext)\n          continue\n        }\n        acc += c\n      }\n      ast.push(acc)\n      return i\n    }\n\n    // some kind of extglob, pos is at the (\n    // find the next | or )\n    let i = pos + 1\n    let part = new AST(null, ast)\n    const parts: AST[] = []\n    let acc = ''\n    while (i < str.length) {\n      const c = str.charAt(i++)\n      // still accumulate escapes at this point, but we do ignore\n      // starts that are escaped\n      if (escaping || c === '\\\\') {\n        escaping = !escaping\n        acc += c\n        continue\n      }\n\n      if (inBrace) {\n        if (i === braceStart + 1) {\n          if (c === '^' || c === '!') {\n            braceNeg = true\n          }\n        } else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {\n          inBrace = false\n        }\n        acc += c\n        continue\n      } else if (c === '[') {\n        inBrace = true\n        braceStart = i\n        braceNeg = false\n        acc += c\n        continue\n      }\n\n      if (isExtglobType(c) && str.charAt(i) === '(') {\n        part.push(acc)\n        acc = ''\n        const ext = new AST(c, part)\n        part.push(ext)\n        i = AST.#parseAST(str, ext, i, opt)\n        continue\n      }\n      if (c === '|') {\n        part.push(acc)\n        acc = ''\n        parts.push(part)\n        part = new AST(null, ast)\n        continue\n      }\n      if (c === ')') {\n        if (acc === '' && ast.#parts.length === 0) {\n          ast.#emptyExt = true\n        }\n        part.push(acc)\n        acc = ''\n        ast.push(...parts, part)\n        return i\n      }\n      acc += c\n    }\n\n    // unfinished extglob\n    // if we got here, it was a malformed extglob! not an extglob, but\n    // maybe something else in there.\n    ast.type = null\n    ast.#hasMagic = undefined\n    ast.#parts = [str.substring(pos - 1)]\n    return i\n  }\n\n  static fromGlob(pattern: string, options: MinimatchOptions = {}) {\n    const ast = new AST(null, undefined, options)\n    AST.#parseAST(pattern, ast, 0, options)\n    return ast\n  }\n\n  // returns the regular expression if there's magic, or the unescaped\n  // string if not.\n  toMMPattern(): MMRegExp | string {\n    // should only be called on root\n    /* c8 ignore start */\n    if (this !== this.#root) return this.#root.toMMPattern()\n    /* c8 ignore stop */\n    const glob = this.toString()\n    const [re, body, hasMagic, uflag] = this.toRegExpSource()\n    // if we're in nocase mode, and not nocaseMagicOnly, then we do\n    // still need a regular expression if we have to case-insensitively\n    // match capital/lowercase characters.\n    const anyMagic =\n      hasMagic ||\n      this.#hasMagic ||\n      (this.#options.nocase &&\n        !this.#options.nocaseMagicOnly &&\n        glob.toUpperCase() !== glob.toLowerCase())\n    if (!anyMagic) {\n      return body\n    }\n\n    const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '')\n    return Object.assign(new RegExp(`^${re}$`, flags), {\n      _src: re,\n      _glob: glob,\n    })\n  }\n\n  get options() {\n    return this.#options\n  }\n\n  // returns the string match, the regexp source, whether there's magic\n  // in the regexp (so a regular expression is required) and whether or\n  // not the uflag is needed for the regular expression (for posix classes)\n  // TODO: instead of injecting the start/end at this point, just return\n  // the BODY of the regexp, along with the start/end portions suitable\n  // for binding the start/end in either a joined full-path makeRe context\n  // (where we bind to (^|/), or a standalone matchPart context (where\n  // we bind to ^, and not /).  Otherwise slashes get duped!\n  //\n  // In part-matching mode, the start is:\n  // - if not isStart: nothing\n  // - if traversal possible, but not allowed: ^(?!\\.\\.?$)\n  // - if dots allowed or not possible: ^\n  // - if dots possible and not allowed: ^(?!\\.)\n  // end is:\n  // - if not isEnd(): nothing\n  // - else: $\n  //\n  // In full-path matching mode, we put the slash at the START of the\n  // pattern, so start is:\n  // - if first pattern: same as part-matching mode\n  // - if not isStart(): nothing\n  // - if traversal possible, but not allowed: /(?!\\.\\.?(?:$|/))\n  // - if dots allowed or not possible: /\n  // - if dots possible and not allowed: /(?!\\.)\n  // end is:\n  // - if last pattern, same as part-matching mode\n  // - else nothing\n  //\n  // Always put the (?:$|/) on negated tails, though, because that has to be\n  // there to bind the end of the negated pattern portion, and it's easier to\n  // just stick it in now rather than try to inject it later in the middle of\n  // the pattern.\n  //\n  // We can just always return the same end, and leave it up to the caller\n  // to know whether it's going to be used joined or in parts.\n  // And, if the start is adjusted slightly, can do the same there:\n  // - if not isStart: nothing\n  // - if traversal possible, but not allowed: (?:/|^)(?!\\.\\.?$)\n  // - if dots allowed or not possible: (?:/|^)\n  // - if dots possible and not allowed: (?:/|^)(?!\\.)\n  //\n  // But it's better to have a simpler binding without a conditional, for\n  // performance, so probably better to return both start options.\n  //\n  // Then the caller just ignores the end if it's not the first pattern,\n  // and the start always gets applied.\n  //\n  // But that's always going to be $ if it's the ending pattern, or nothing,\n  // so the caller can just attach $ at the end of the pattern when building.\n  //\n  // So the todo is:\n  // - better detect what kind of start is needed\n  // - return both flavors of starting pattern\n  // - attach $ at the end of the pattern when creating the actual RegExp\n  //\n  // Ah, but wait, no, that all only applies to the root when the first pattern\n  // is not an extglob. If the first pattern IS an extglob, then we need all\n  // that dot prevention biz to live in the extglob portions, because eg\n  // +(*|.x*) can match .xy but not .yx.\n  //\n  // So, return the two flavors if it's #root and the first child is not an\n  // AST, otherwise leave it to the child AST to handle it, and there,\n  // use the (?:^|/) style of start binding.\n  //\n  // Even simplified further:\n  // - Since the start for a join is eg /(?!\\.) and the start for a part\n  // is ^(?!\\.), we can just prepend (?!\\.) to the pattern (either root\n  // or start or whatever) and prepend ^ or / at the Regexp construction.\n  toRegExpSource(\n    allowDot?: boolean,\n  ): [re: string, body: string, hasMagic: boolean, uflag: boolean] {\n    const dot = allowDot ?? !!this.#options.dot\n    if (this.#root === this) this.#fillNegs()\n    if (!this.type) {\n      const noEmpty =\n        this.isStart() &&\n        this.isEnd() &&\n        !this.#parts.some(s => typeof s !== 'string')\n      const src = this.#parts\n        .map(p => {\n          const [re, _, hasMagic, uflag] =\n            typeof p === 'string' ?\n              AST.#parseGlob(p, this.#hasMagic, noEmpty)\n            : p.toRegExpSource(allowDot)\n          this.#hasMagic = this.#hasMagic || hasMagic\n          this.#uflag = this.#uflag || uflag\n          return re\n        })\n        .join('')\n\n      let start = ''\n      if (this.isStart()) {\n        if (typeof this.#parts[0] === 'string') {\n          // this is the string that will match the start of the pattern,\n          // so we need to protect against dots and such.\n\n          // '.' and '..' cannot match unless the pattern is that exactly,\n          // even if it starts with . or dot:true is set.\n          const dotTravAllowed =\n            this.#parts.length === 1 && justDots.has(this.#parts[0])\n          if (!dotTravAllowed) {\n            const aps = addPatternStart\n            // check if we have a possibility of matching . or ..,\n            // and prevent that.\n            const needNoTrav =\n              // dots are allowed, and the pattern starts with [ or .\n              (dot && aps.has(src.charAt(0))) ||\n              // the pattern starts with \\., and then [ or .\n              (src.startsWith('\\\\.') && aps.has(src.charAt(2))) ||\n              // the pattern starts with \\.\\., and then [ or .\n              (src.startsWith('\\\\.\\\\.') && aps.has(src.charAt(4)))\n            // no need to prevent dots if it can't match a dot, or if a\n            // sub-pattern will be preventing it anyway.\n            const needNoDot = !dot && !allowDot && aps.has(src.charAt(0))\n\n            start =\n              needNoTrav ? startNoTraversal\n              : needNoDot ? startNoDot\n              : ''\n          }\n        }\n      }\n\n      // append the \"end of path portion\" pattern to negation tails\n      let end = ''\n      if (\n        this.isEnd() &&\n        this.#root.#filledNegs &&\n        this.#parent?.type === '!'\n      ) {\n        end = '(?:$|\\\\/)'\n      }\n      const final = start + src + end\n      return [\n        final,\n        unescape(src),\n        (this.#hasMagic = !!this.#hasMagic),\n        this.#uflag,\n      ]\n    }\n\n    // We need to calculate the body *twice* if it's a repeat pattern\n    // at the start, once in nodot mode, then again in dot mode, so a\n    // pattern like *(?) can match 'x.y'\n\n    const repeated = this.type === '*' || this.type === '+'\n    // some kind of extglob\n    const start = this.type === '!' ? '(?:(?!(?:' : '(?:'\n    let body = this.#partsToRegExp(dot)\n\n    if (this.isStart() && this.isEnd() && !body && this.type !== '!') {\n      // invalid extglob, has to at least be *something* present, if it's\n      // the entire path portion.\n      const s = this.toString()\n      this.#parts = [s]\n      this.type = null\n      this.#hasMagic = undefined\n      return [s, unescape(this.toString()), false, false]\n    }\n\n    // XXX abstract out this map method\n    let bodyDotAllowed =\n      !repeated || allowDot || dot || !startNoDot ?\n        ''\n      : this.#partsToRegExp(true)\n    if (bodyDotAllowed === body) {\n      bodyDotAllowed = ''\n    }\n    if (bodyDotAllowed) {\n      body = `(?:${body})(?:${bodyDotAllowed})*?`\n    }\n\n    // an empty !() is exactly equivalent to a starNoEmpty\n    let final = ''\n    if (this.type === '!' && this.#emptyExt) {\n      final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty\n    } else {\n      const close =\n        this.type === '!' ?\n          // !() must match something,but !(x) can match ''\n          '))' +\n          (this.isStart() && !dot && !allowDot ? startNoDot : '') +\n          star +\n          ')'\n        : this.type === '@' ? ')'\n        : this.type === '?' ? ')?'\n        : this.type === '+' && bodyDotAllowed ? ')'\n        : this.type === '*' && bodyDotAllowed ? `)?`\n        : `)${this.type}`\n      final = start + body + close\n    }\n    return [\n      final,\n      unescape(body),\n      (this.#hasMagic = !!this.#hasMagic),\n      this.#uflag,\n    ]\n  }\n\n  #partsToRegExp(dot: boolean) {\n    return this.#parts\n      .map(p => {\n        // extglob ASTs should only contain parent ASTs\n        /* c8 ignore start */\n        if (typeof p === 'string') {\n          throw new Error('string type in extglob ast??')\n        }\n        /* c8 ignore stop */\n        // can ignore hasMagic, because extglobs are already always magic\n        const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot)\n        this.#uflag = this.#uflag || uflag\n        return re\n      })\n      .filter(p => !(this.isStart() && this.isEnd()) || !!p)\n      .join('|')\n  }\n\n  static #parseGlob(\n    glob: string,\n    hasMagic: boolean | undefined,\n    noEmpty: boolean = false,\n  ): [re: string, body: string, hasMagic: boolean, uflag: boolean] {\n    let escaping = false\n    let re = ''\n    let uflag = false\n    // multiple stars that aren't globstars coalesce into one *\n    let inStar = false\n    for (let i = 0; i < glob.length; i++) {\n      const c = glob.charAt(i)\n      if (escaping) {\n        escaping = false\n        re += (reSpecials.has(c) ? '\\\\' : '') + c\n        continue\n      }\n      if (c === '*') {\n        if (inStar) continue\n        inStar = true\n        re += noEmpty && /^[*]+$/.test(glob) ? starNoEmpty : star\n        hasMagic = true\n        continue\n      } else {\n        inStar = false\n      }\n      if (c === '\\\\') {\n        if (i === glob.length - 1) {\n          re += '\\\\\\\\'\n        } else {\n          escaping = true\n        }\n        continue\n      }\n      if (c === '[') {\n        const [src, needUflag, consumed, magic] = parseClass(glob, i)\n        if (consumed) {\n          re += src\n          uflag = uflag || needUflag\n          i += consumed - 1\n          hasMagic = hasMagic || magic\n          continue\n        }\n      }\n      if (c === '?') {\n        re += qmark\n        hasMagic = true\n        continue\n      }\n      re += regExpEscape(c)\n    }\n    return [re, unescape(glob), !!hasMagic, uflag]\n  }\n}\n", "import { MinimatchOptions } from './index.js'\n\n/**\n * Escape all magic characters in a glob pattern.\n *\n * If the {@link MinimatchOptions.windowsPathsNoEscape}\n * option is used, then characters are escaped by wrapping in `[]`, because\n * a magic character wrapped in a character class can only be satisfied by\n * that exact character.  In this mode, `\\` is _not_ escaped, because it is\n * not interpreted as a magic character, but instead as a path separator.\n *\n * If the {@link MinimatchOptions.magicalBraces} option is used,\n * then braces (`{` and `}`) will be escaped.\n */\nexport const escape = (\n  s: string,\n  {\n    windowsPathsNoEscape = false,\n    magicalBraces = false,\n  }: Pick = {},\n) => {\n  // don't need to escape +@! because we escape the parens\n  // that make those magic, and escaping ! as [!] isn't valid,\n  // because [!]] is a valid glob class meaning not ']'.\n  if (magicalBraces) {\n    return windowsPathsNoEscape ?\n        s.replace(/[?*()[\\]{}]/g, '[$&]')\n      : s.replace(/[?*()[\\]\\\\{}]/g, '\\\\$&')\n  }\n  return windowsPathsNoEscape ?\n      s.replace(/[?*()[\\]]/g, '[$&]')\n    : s.replace(/[?*()[\\]\\\\]/g, '\\\\$&')\n}\n", "import { expand } from 'brace-expansion'\nimport { assertValidPattern } from './assert-valid-pattern.js'\nimport { AST, ExtglobType } from './ast.js'\nimport { escape } from './escape.js'\nimport { unescape } from './unescape.js'\n\nexport type Platform =\n  | 'aix'\n  | 'android'\n  | 'darwin'\n  | 'freebsd'\n  | 'haiku'\n  | 'linux'\n  | 'openbsd'\n  | 'sunos'\n  | 'win32'\n  | 'cygwin'\n  | 'netbsd'\n\nexport interface MinimatchOptions {\n  /** do not expand `{x,y}` style braces */\n  nobrace?: boolean\n  /** do not treat patterns starting with `#` as a comment */\n  nocomment?: boolean\n  /** do not treat patterns starting with `!` as a negation */\n  nonegate?: boolean\n  /** print LOTS of debugging output */\n  debug?: boolean\n  /** treat `**` the same as `*` */\n  noglobstar?: boolean\n  /** do not expand extglobs like `+(a|b)` */\n  noext?: boolean\n  /** return the pattern if nothing matches */\n  nonull?: boolean\n  /** treat `\\\\` as a path separator, not an escape character */\n  windowsPathsNoEscape?: boolean\n  /**\n   * inverse of {@link MinimatchOptions.windowsPathsNoEscape}\n   * @deprecated\n   */\n  allowWindowsEscape?: boolean\n  /**\n   * Compare a partial path to a pattern. As long as the parts\n   * of the path that are present are not contradicted by the\n   * pattern, it will be treated as a match. This is useful in\n   * applications where you're walking through a folder structure,\n   * and don't yet have the full path, but want to ensure that you\n   * do not walk down paths that can never be a match.\n   */\n  partial?: boolean\n  /** allow matches that start with `.` even if the pattern does not */\n  dot?: boolean\n  /** ignore case */\n  nocase?: boolean\n  /** ignore case only in wildcard patterns */\n  nocaseMagicOnly?: boolean\n  /** consider braces to be \"magic\" for the purpose of `hasMagic` */\n  magicalBraces?: boolean\n  /**\n   * If set, then patterns without slashes will be matched\n   * against the basename of the path if it contains slashes.\n   * For example, `a?b` would match the path `/xyz/123/acb`, but\n   * not `/xyz/acb/123`.\n   */\n  matchBase?: boolean\n  /** invert the results of negated matches */\n  flipNegate?: boolean\n  /** do not collapse multiple `/` into a single `/` */\n  preserveMultipleSlashes?: boolean\n  /**\n   * A number indicating the level of optimization that should be done\n   * to the pattern prior to parsing and using it for matches.\n   */\n  optimizationLevel?: number\n  /** operating system platform */\n  platform?: Platform\n  /**\n   * When a pattern starts with a UNC path or drive letter, and in\n   * `nocase:true` mode, do not convert the root portions of the\n   * pattern into a case-insensitive regular expression, and instead\n   * leave them as strings.\n   *\n   * This is the default when the platform is `win32` and\n   * `nocase:true` is set.\n   */\n  windowsNoMagicRoot?: boolean\n  /**\n   * max number of `{...}` patterns to expand. Default 100_000.\n   */\n  braceExpandMax?: number\n}\n\nexport const minimatch = (\n  p: string,\n  pattern: string,\n  options: MinimatchOptions = {},\n) => {\n  assertValidPattern(pattern)\n\n  // shortcut: comments match nothing.\n  if (!options.nocomment && pattern.charAt(0) === '#') {\n    return false\n  }\n\n  return new Minimatch(pattern, options).match(p)\n}\n\n// Optimized checking for the most common glob patterns.\nconst starDotExtRE = /^\\*+([^+@!?\\*\\[\\(]*)$/\nconst starDotExtTest = (ext: string) => (f: string) =>\n  !f.startsWith('.') && f.endsWith(ext)\nconst starDotExtTestDot = (ext: string) => (f: string) => f.endsWith(ext)\nconst starDotExtTestNocase = (ext: string) => {\n  ext = ext.toLowerCase()\n  return (f: string) => !f.startsWith('.') && f.toLowerCase().endsWith(ext)\n}\nconst starDotExtTestNocaseDot = (ext: string) => {\n  ext = ext.toLowerCase()\n  return (f: string) => f.toLowerCase().endsWith(ext)\n}\nconst starDotStarRE = /^\\*+\\.\\*+$/\nconst starDotStarTest = (f: string) =>\n  !f.startsWith('.') && f.includes('.')\nconst starDotStarTestDot = (f: string) =>\n  f !== '.' && f !== '..' && f.includes('.')\nconst dotStarRE = /^\\.\\*+$/\nconst dotStarTest = (f: string) =>\n  f !== '.' && f !== '..' && f.startsWith('.')\nconst starRE = /^\\*+$/\nconst starTest = (f: string) => f.length !== 0 && !f.startsWith('.')\nconst starTestDot = (f: string) =>\n  f.length !== 0 && f !== '.' && f !== '..'\nconst qmarksRE = /^\\?+([^+@!?\\*\\[\\(]*)?$/\nconst qmarksTestNocase = ([$0, ext = '']: RegExpMatchArray) => {\n  const noext = qmarksTestNoExt([$0])\n  if (!ext) return noext\n  ext = ext.toLowerCase()\n  return (f: string) => noext(f) && f.toLowerCase().endsWith(ext)\n}\nconst qmarksTestNocaseDot = ([$0, ext = '']: RegExpMatchArray) => {\n  const noext = qmarksTestNoExtDot([$0])\n  if (!ext) return noext\n  ext = ext.toLowerCase()\n  return (f: string) => noext(f) && f.toLowerCase().endsWith(ext)\n}\nconst qmarksTestDot = ([$0, ext = '']: RegExpMatchArray) => {\n  const noext = qmarksTestNoExtDot([$0])\n  return !ext ? noext : (f: string) => noext(f) && f.endsWith(ext)\n}\nconst qmarksTest = ([$0, ext = '']: RegExpMatchArray) => {\n  const noext = qmarksTestNoExt([$0])\n  return !ext ? noext : (f: string) => noext(f) && f.endsWith(ext)\n}\nconst qmarksTestNoExt = ([$0]: RegExpMatchArray) => {\n  const len = $0.length\n  return (f: string) => f.length === len && !f.startsWith('.')\n}\nconst qmarksTestNoExtDot = ([$0]: RegExpMatchArray) => {\n  const len = $0.length\n  return (f: string) => f.length === len && f !== '.' && f !== '..'\n}\n\n/* c8 ignore start */\nconst defaultPlatform: Platform = (\n  typeof process === 'object' && process ?\n    (typeof process.env === 'object' &&\n      process.env &&\n      process.env.__MINIMATCH_TESTING_PLATFORM__) ||\n    process.platform\n  : 'posix') as Platform\n\nexport type Sep = '\\\\' | '/'\n\nconst path: { [k: string]: { sep: Sep } } = {\n  win32: { sep: '\\\\' },\n  posix: { sep: '/' },\n}\n/* c8 ignore stop */\n\nexport const sep =\n  defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep\nminimatch.sep = sep\n\nexport const GLOBSTAR = Symbol('globstar **')\nminimatch.GLOBSTAR = GLOBSTAR\n\n// any single thing other than /\n// don't need to escape / when using new RegExp()\nconst qmark = '[^/]'\n\n// * => any number of characters\nconst star = qmark + '*?'\n\n// ** when dots are allowed.  Anything goes, except .. and .\n// not (^ or / followed by one or two dots followed by $ or /),\n// followed by anything, any number of times.\nconst twoStarDot = '(?:(?!(?:\\\\/|^)(?:\\\\.{1,2})($|\\\\/)).)*?'\n\n// not a ^ or / followed by a dot,\n// followed by anything, any number of times.\nconst twoStarNoDot = '(?:(?!(?:\\\\/|^)\\\\.).)*?'\n\nexport const filter =\n  (pattern: string, options: MinimatchOptions = {}) =>\n  (p: string) =>\n    minimatch(p, pattern, options)\nminimatch.filter = filter\n\nconst ext = (a: MinimatchOptions, b: MinimatchOptions = {}) =>\n  Object.assign({}, a, b)\n\nexport const defaults = (def: MinimatchOptions): typeof minimatch => {\n  if (!def || typeof def !== 'object' || !Object.keys(def).length) {\n    return minimatch\n  }\n\n  const orig = minimatch\n\n  const m = (p: string, pattern: string, options: MinimatchOptions = {}) =>\n    orig(p, pattern, ext(def, options))\n\n  return Object.assign(m, {\n    Minimatch: class Minimatch extends orig.Minimatch {\n      constructor(pattern: string, options: MinimatchOptions = {}) {\n        super(pattern, ext(def, options))\n      }\n      static defaults(options: MinimatchOptions) {\n        return orig.defaults(ext(def, options)).Minimatch\n      }\n    },\n\n    AST: class AST extends orig.AST {\n      /* c8 ignore start */\n      constructor(\n        type: ExtglobType | null,\n        parent?: AST,\n        options: MinimatchOptions = {},\n      ) {\n        super(type, parent, ext(def, options))\n      }\n      /* c8 ignore stop */\n\n      static fromGlob(pattern: string, options: MinimatchOptions = {}) {\n        return orig.AST.fromGlob(pattern, ext(def, options))\n      }\n    },\n\n    unescape: (\n      s: string,\n      options: Pick<\n        MinimatchOptions,\n        'windowsPathsNoEscape' | 'magicalBraces'\n      > = {},\n    ) => orig.unescape(s, ext(def, options)),\n\n    escape: (\n      s: string,\n      options: Pick<\n        MinimatchOptions,\n        'windowsPathsNoEscape' | 'magicalBraces'\n      > = {},\n    ) => orig.escape(s, ext(def, options)),\n\n    filter: (pattern: string, options: MinimatchOptions = {}) =>\n      orig.filter(pattern, ext(def, options)),\n\n    defaults: (options: MinimatchOptions) =>\n      orig.defaults(ext(def, options)),\n\n    makeRe: (pattern: string, options: MinimatchOptions = {}) =>\n      orig.makeRe(pattern, ext(def, options)),\n\n    braceExpand: (pattern: string, options: MinimatchOptions = {}) =>\n      orig.braceExpand(pattern, ext(def, options)),\n\n    match: (\n      list: string[],\n      pattern: string,\n      options: MinimatchOptions = {},\n    ) => orig.match(list, pattern, ext(def, options)),\n\n    sep: orig.sep,\n    GLOBSTAR: GLOBSTAR as typeof GLOBSTAR,\n  })\n}\nminimatch.defaults = defaults\n\n// Brace expansion:\n// a{b,c}d -> abd acd\n// a{b,}c -> abc ac\n// a{0..3}d -> a0d a1d a2d a3d\n// a{b,c{d,e}f}g -> abg acdfg acefg\n// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg\n//\n// Invalid sets are not expanded.\n// a{2..}b -> a{2..}b\n// a{b}c -> a{b}c\nexport const braceExpand = (\n  pattern: string,\n  options: MinimatchOptions = {},\n) => {\n  assertValidPattern(pattern)\n\n  // Thanks to Yeting Li  for\n  // improving this regexp to avoid a ReDOS vulnerability.\n  if (options.nobrace || !/\\{(?:(?!\\{).)*\\}/.test(pattern)) {\n    // shortcut. no need to expand.\n    return [pattern]\n  }\n\n  return expand(pattern, { max: options.braceExpandMax })\n}\nminimatch.braceExpand = braceExpand\n\n// parse a component of the expanded set.\n// At this point, no pattern may contain \"/\" in it\n// so we're going to return a 2d array, where each entry is the full\n// pattern, split on '/', and then turned into a regular expression.\n// A regexp is made at the end which joins each array with an\n// escaped /, and another full one which joins each regexp with |.\n//\n// Following the lead of Bash 4.1, note that \"**\" only has special meaning\n// when it is the *only* thing in a path portion.  Otherwise, any series\n// of * is equivalent to a single *.  Globstar behavior is enabled by\n// default, and can be disabled by setting options.noglobstar.\n\nexport const makeRe = (pattern: string, options: MinimatchOptions = {}) =>\n  new Minimatch(pattern, options).makeRe()\nminimatch.makeRe = makeRe\n\nexport const match = (\n  list: string[],\n  pattern: string,\n  options: MinimatchOptions = {},\n) => {\n  const mm = new Minimatch(pattern, options)\n  list = list.filter(f => mm.match(f))\n  if (mm.options.nonull && !list.length) {\n    list.push(pattern)\n  }\n  return list\n}\nminimatch.match = match\n\n// replace stuff like \\* with *\nconst globMagic = /[?*]|[+@!]\\(.*?\\)|\\[|\\]/\nconst regExpEscape = (s: string) =>\n  s.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&')\n\nexport type MMRegExp = RegExp & {\n  _src?: string\n  _glob?: string\n}\n\nexport type ParseReturnFiltered = string | MMRegExp | typeof GLOBSTAR\nexport type ParseReturn = ParseReturnFiltered | false\n\nexport class Minimatch {\n  options: MinimatchOptions\n  set: ParseReturnFiltered[][]\n  pattern: string\n\n  windowsPathsNoEscape: boolean\n  nonegate: boolean\n  negate: boolean\n  comment: boolean\n  empty: boolean\n  preserveMultipleSlashes: boolean\n  partial: boolean\n  globSet: string[]\n  globParts: string[][]\n  nocase: boolean\n\n  isWindows: boolean\n  platform: Platform\n  windowsNoMagicRoot: boolean\n\n  regexp: false | null | MMRegExp\n  constructor(pattern: string, options: MinimatchOptions = {}) {\n    assertValidPattern(pattern)\n\n    options = options || {}\n    this.options = options\n    this.pattern = pattern\n    this.platform = options.platform || defaultPlatform\n    this.isWindows = this.platform === 'win32'\n    // avoid the annoying deprecation flag lol\n    const awe = ('allowWindow' + 'sEscape') as keyof MinimatchOptions\n    this.windowsPathsNoEscape =\n      !!options.windowsPathsNoEscape || options[awe] === false\n    if (this.windowsPathsNoEscape) {\n      this.pattern = this.pattern.replace(/\\\\/g, '/')\n    }\n    this.preserveMultipleSlashes = !!options.preserveMultipleSlashes\n    this.regexp = null\n    this.negate = false\n    this.nonegate = !!options.nonegate\n    this.comment = false\n    this.empty = false\n    this.partial = !!options.partial\n    this.nocase = !!this.options.nocase\n    this.windowsNoMagicRoot =\n      options.windowsNoMagicRoot !== undefined ?\n        options.windowsNoMagicRoot\n      : !!(this.isWindows && this.nocase)\n\n    this.globSet = []\n    this.globParts = []\n    this.set = []\n\n    // make the set of regexps etc.\n    this.make()\n  }\n\n  hasMagic(): boolean {\n    if (this.options.magicalBraces && this.set.length > 1) {\n      return true\n    }\n    for (const pattern of this.set) {\n      for (const part of pattern) {\n        if (typeof part !== 'string') return true\n      }\n    }\n    return false\n  }\n\n  debug(..._: any[]) {}\n\n  make() {\n    const pattern = this.pattern\n    const options = this.options\n\n    // empty patterns and comments match nothing.\n    if (!options.nocomment && pattern.charAt(0) === '#') {\n      this.comment = true\n      return\n    }\n\n    if (!pattern) {\n      this.empty = true\n      return\n    }\n\n    // step 1: figure out negation, etc.\n    this.parseNegate()\n\n    // step 2: expand braces\n    this.globSet = [...new Set(this.braceExpand())]\n\n    if (options.debug) {\n      this.debug = (...args: any[]) => console.error(...args)\n    }\n\n    this.debug(this.pattern, this.globSet)\n\n    // step 3: now we have a set, so turn each one into a series of\n    // path-portion matching patterns.\n    // These will be regexps, except in the case of \"**\", which is\n    // set to the GLOBSTAR object for globstar behavior,\n    // and will not contain any / characters\n    //\n    // First, we preprocess to make the glob pattern sets a bit simpler\n    // and deduped.  There are some perf-killing patterns that can cause\n    // problems with a glob walk, but we can simplify them down a bit.\n    const rawGlobParts = this.globSet.map(s => this.slashSplit(s))\n    this.globParts = this.preprocess(rawGlobParts)\n    this.debug(this.pattern, this.globParts)\n\n    // glob --> regexps\n    let set = this.globParts.map((s, _, __) => {\n      if (this.isWindows && this.windowsNoMagicRoot) {\n        // check if it's a drive or unc path.\n        const isUNC =\n          s[0] === '' &&\n          s[1] === '' &&\n          (s[2] === '?' || !globMagic.test(s[2])) &&\n          !globMagic.test(s[3])\n        const isDrive = /^[a-z]:/i.test(s[0])\n        if (isUNC) {\n          return [\n            ...s.slice(0, 4),\n            ...s.slice(4).map(ss => this.parse(ss)),\n          ]\n        } else if (isDrive) {\n          return [s[0], ...s.slice(1).map(ss => this.parse(ss))]\n        }\n      }\n      return s.map(ss => this.parse(ss))\n    })\n\n    this.debug(this.pattern, set)\n\n    // filter out everything that didn't compile properly.\n    this.set = set.filter(\n      s => s.indexOf(false) === -1,\n    ) as ParseReturnFiltered[][]\n\n    // do not treat the ? in UNC paths as magic\n    if (this.isWindows) {\n      for (let i = 0; i < this.set.length; i++) {\n        const p = this.set[i]\n        if (\n          p[0] === '' &&\n          p[1] === '' &&\n          this.globParts[i][2] === '?' &&\n          typeof p[3] === 'string' &&\n          /^[a-z]:$/i.test(p[3])\n        ) {\n          p[2] = '?'\n        }\n      }\n    }\n\n    this.debug(this.pattern, this.set)\n  }\n\n  // various transforms to equivalent pattern sets that are\n  // faster to process in a filesystem walk.  The goal is to\n  // eliminate what we can, and push all ** patterns as far\n  // to the right as possible, even if it increases the number\n  // of patterns that we have to process.\n  preprocess(globParts: string[][]) {\n    // if we're not in globstar mode, then turn ** into *\n    if (this.options.noglobstar) {\n      for (let i = 0; i < globParts.length; i++) {\n        for (let j = 0; j < globParts[i].length; j++) {\n          if (globParts[i][j] === '**') {\n            globParts[i][j] = '*'\n          }\n        }\n      }\n    }\n\n    const { optimizationLevel = 1 } = this.options\n\n    if (optimizationLevel >= 2) {\n      // aggressive optimization for the purpose of fs walking\n      globParts = this.firstPhasePreProcess(globParts)\n      globParts = this.secondPhasePreProcess(globParts)\n    } else if (optimizationLevel >= 1) {\n      // just basic optimizations to remove some .. parts\n      globParts = this.levelOneOptimize(globParts)\n    } else {\n      // just collapse multiple ** portions into one\n      globParts = this.adjascentGlobstarOptimize(globParts)\n    }\n\n    return globParts\n  }\n\n  // just get rid of adjascent ** portions\n  adjascentGlobstarOptimize(globParts: string[][]) {\n    return globParts.map(parts => {\n      let gs: number = -1\n      while (-1 !== (gs = parts.indexOf('**', gs + 1))) {\n        let i = gs\n        while (parts[i + 1] === '**') {\n          i++\n        }\n        if (i !== gs) {\n          parts.splice(gs, i - gs)\n        }\n      }\n      return parts\n    })\n  }\n\n  // get rid of adjascent ** and resolve .. portions\n  levelOneOptimize(globParts: string[][]) {\n    return globParts.map(parts => {\n      parts = parts.reduce((set: string[], part) => {\n        const prev = set[set.length - 1]\n        if (part === '**' && prev === '**') {\n          return set\n        }\n        if (part === '..') {\n          if (prev && prev !== '..' && prev !== '.' && prev !== '**') {\n            set.pop()\n            return set\n          }\n        }\n        set.push(part)\n        return set\n      }, [])\n      return parts.length === 0 ? [''] : parts\n    })\n  }\n\n  levelTwoFileOptimize(parts: string | string[]) {\n    if (!Array.isArray(parts)) {\n      parts = this.slashSplit(parts)\n    }\n    let didSomething: boolean = false\n    do {\n      didSomething = false\n      // 
// -> 
/\n      if (!this.preserveMultipleSlashes) {\n        for (let i = 1; i < parts.length - 1; i++) {\n          const p = parts[i]\n          // don't squeeze out UNC patterns\n          if (i === 1 && p === '' && parts[0] === '') continue\n          if (p === '.' || p === '') {\n            didSomething = true\n            parts.splice(i, 1)\n            i--\n          }\n        }\n        if (\n          parts[0] === '.' &&\n          parts.length === 2 &&\n          (parts[1] === '.' || parts[1] === '')\n        ) {\n          didSomething = true\n          parts.pop()\n        }\n      }\n\n      // 
/

/../ ->

/\n      let dd: number = 0\n      while (-1 !== (dd = parts.indexOf('..', dd + 1))) {\n        const p = parts[dd - 1]\n        if (p && p !== '.' && p !== '..' && p !== '**') {\n          didSomething = true\n          parts.splice(dd - 1, 2)\n          dd -= 2\n        }\n      }\n    } while (didSomething)\n    return parts.length === 0 ? [''] : parts\n  }\n\n  // First phase: single-pattern processing\n  // 
 is 1 or more portions\n  //  is 1 or more portions\n  // 

is any portion other than ., .., '', or **\n // is . or ''\n //\n // **/.. is *brutal* for filesystem walking performance, because\n // it effectively resets the recursive walk each time it occurs,\n // and ** cannot be reduced out by a .. pattern part like a regexp\n // or most strings (other than .., ., and '') can be.\n //\n //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/}\n //

// -> 
/\n  // 
/

/../ ->

/\n  // **/**/ -> **/\n  //\n  // **/*/ -> */**/ <== not valid because ** doesn't follow\n  // this WOULD be allowed if ** did follow symlinks, or * didn't\n  firstPhasePreProcess(globParts: string[][]) {\n    let didSomething = false\n    do {\n      didSomething = false\n      // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/}\n for (let parts of globParts) {\n let gs: number = -1\n while (-1 !== (gs = parts.indexOf('**', gs + 1))) {\n let gss: number = gs\n while (parts[gss + 1] === '**') {\n //

/**/**/ -> 
/**/\n            gss++\n          }\n          // eg, if gs is 2 and gss is 4, that means we have 3 **\n          // parts, and can remove 2 of them.\n          if (gss > gs) {\n            parts.splice(gs + 1, gss - gs)\n          }\n\n          let next = parts[gs + 1]\n          const p = parts[gs + 2]\n          const p2 = parts[gs + 3]\n          if (next !== '..') continue\n          if (\n            !p ||\n            p === '.' ||\n            p === '..' ||\n            !p2 ||\n            p2 === '.' ||\n            p2 === '..'\n          ) {\n            continue\n          }\n          didSomething = true\n          // edit parts in place, and push the new one\n          parts.splice(gs, 1)\n          const other = parts.slice(0)\n          other[gs] = '**'\n          globParts.push(other)\n          gs--\n        }\n\n        // 
// -> 
/\n        if (!this.preserveMultipleSlashes) {\n          for (let i = 1; i < parts.length - 1; i++) {\n            const p = parts[i]\n            // don't squeeze out UNC patterns\n            if (i === 1 && p === '' && parts[0] === '') continue\n            if (p === '.' || p === '') {\n              didSomething = true\n              parts.splice(i, 1)\n              i--\n            }\n          }\n          if (\n            parts[0] === '.' &&\n            parts.length === 2 &&\n            (parts[1] === '.' || parts[1] === '')\n          ) {\n            didSomething = true\n            parts.pop()\n          }\n        }\n\n        // 
/

/../ ->

/\n        let dd: number = 0\n        while (-1 !== (dd = parts.indexOf('..', dd + 1))) {\n          const p = parts[dd - 1]\n          if (p && p !== '.' && p !== '..' && p !== '**') {\n            didSomething = true\n            const needDot = dd === 1 && parts[dd + 1] === '**'\n            const splin = needDot ? ['.'] : []\n            parts.splice(dd - 1, 2, ...splin)\n            if (parts.length === 0) parts.push('')\n            dd -= 2\n          }\n        }\n      }\n    } while (didSomething)\n\n    return globParts\n  }\n\n  // second phase: multi-pattern dedupes\n  // {
/*/,
/

/} ->

/*/\n  // {
/,
/} -> 
/\n  // {
/**/,
/} -> 
/**/\n  //\n  // {
/**/,
/**/

/} ->

/**/\n  // ^-- not valid because ** doens't follow symlinks\n  secondPhasePreProcess(globParts: string[][]): string[][] {\n    for (let i = 0; i < globParts.length - 1; i++) {\n      for (let j = i + 1; j < globParts.length; j++) {\n        const matched = this.partsMatch(\n          globParts[i],\n          globParts[j],\n          !this.preserveMultipleSlashes,\n        )\n        if (matched) {\n          globParts[i] = []\n          globParts[j] = matched\n          break\n        }\n      }\n    }\n    return globParts.filter(gs => gs.length)\n  }\n\n  partsMatch(\n    a: string[],\n    b: string[],\n    emptyGSMatch: boolean = false,\n  ): false | string[] {\n    let ai = 0\n    let bi = 0\n    let result: string[] = []\n    let which: string = ''\n    while (ai < a.length && bi < b.length) {\n      if (a[ai] === b[bi]) {\n        result.push(which === 'b' ? b[bi] : a[ai])\n        ai++\n        bi++\n      } else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {\n        result.push(a[ai])\n        ai++\n      } else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {\n        result.push(b[bi])\n        bi++\n      } else if (\n        a[ai] === '*' &&\n        b[bi] &&\n        (this.options.dot || !b[bi].startsWith('.')) &&\n        b[bi] !== '**'\n      ) {\n        if (which === 'b') return false\n        which = 'a'\n        result.push(a[ai])\n        ai++\n        bi++\n      } else if (\n        b[bi] === '*' &&\n        a[ai] &&\n        (this.options.dot || !a[ai].startsWith('.')) &&\n        a[ai] !== '**'\n      ) {\n        if (which === 'a') return false\n        which = 'b'\n        result.push(b[bi])\n        ai++\n        bi++\n      } else {\n        return false\n      }\n    }\n    // if we fall out of the loop, it means they two are identical\n    // as long as their lengths match\n    return a.length === b.length && result\n  }\n\n  parseNegate() {\n    if (this.nonegate) return\n\n    const pattern = this.pattern\n    let negate = false\n    let negateOffset = 0\n\n    for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {\n      negate = !negate\n      negateOffset++\n    }\n\n    if (negateOffset) this.pattern = pattern.slice(negateOffset)\n    this.negate = negate\n  }\n\n  // set partial to true to test if, for example,\n  // \"/a/b\" matches the start of \"/*/b/*/d\"\n  // Partial means, if you run out of file before you run\n  // out of pattern, then that's fine, as long as all\n  // the parts match.\n  matchOne(\n    file: string[],\n    pattern: ParseReturn[],\n    partial: boolean = false,\n  ) {\n    const options = this.options\n\n    // UNC paths like //?/X:/... can match X:/... and vice versa\n    // Drive letters in absolute drive or unc paths are always compared\n    // case-insensitively.\n    if (this.isWindows) {\n      const fileDrive =\n        typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0])\n      const fileUNC =\n        !fileDrive &&\n        file[0] === '' &&\n        file[1] === '' &&\n        file[2] === '?' &&\n        /^[a-z]:$/i.test(file[3])\n\n      const patternDrive =\n        typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0])\n      const patternUNC =\n        !patternDrive &&\n        pattern[0] === '' &&\n        pattern[1] === '' &&\n        pattern[2] === '?' &&\n        typeof pattern[3] === 'string' &&\n        /^[a-z]:$/i.test(pattern[3])\n\n      const fdi =\n        fileUNC ? 3\n        : fileDrive ? 0\n        : undefined\n      const pdi =\n        patternUNC ? 3\n        : patternDrive ? 0\n        : undefined\n      if (typeof fdi === 'number' && typeof pdi === 'number') {\n        const [fd, pd]: [string, string] = [\n          file[fdi],\n          pattern[pdi] as string,\n        ]\n        if (fd.toLowerCase() === pd.toLowerCase()) {\n          pattern[pdi] = fd\n          if (pdi > fdi) {\n            pattern = pattern.slice(pdi)\n          } else if (fdi > pdi) {\n            file = file.slice(fdi)\n          }\n        }\n      }\n    }\n\n    // resolve and reduce . and .. portions in the file as well.\n    // don't need to do the second phase, because it's only one string[]\n    const { optimizationLevel = 1 } = this.options\n    if (optimizationLevel >= 2) {\n      file = this.levelTwoFileOptimize(file)\n    }\n\n    this.debug('matchOne', this, { file, pattern })\n    this.debug('matchOne', file.length, pattern.length)\n\n    for (\n      var fi = 0, pi = 0, fl = file.length, pl = pattern.length;\n      fi < fl && pi < pl;\n      fi++, pi++\n    ) {\n      this.debug('matchOne loop')\n      var p = pattern[pi]\n      var f = file[fi]\n\n      this.debug(pattern, p, f)\n\n      // should be impossible.\n      // some invalid regexp stuff in the set.\n      /* c8 ignore start */\n      if (p === false) {\n        return false\n      }\n      /* c8 ignore stop */\n\n      if (p === GLOBSTAR) {\n        this.debug('GLOBSTAR', [pattern, p, f])\n\n        // \"**\"\n        // a/**/b/**/c would match the following:\n        // a/b/x/y/z/c\n        // a/x/y/z/b/c\n        // a/b/x/b/x/c\n        // a/b/c\n        // To do this, take the rest of the pattern after\n        // the **, and see if it would match the file remainder.\n        // If so, return success.\n        // If not, the ** \"swallows\" a segment, and try again.\n        // This is recursively awful.\n        //\n        // a/**/b/**/c matching a/b/x/y/z/c\n        // - a matches a\n        // - doublestar\n        //   - matchOne(b/x/y/z/c, b/**/c)\n        //     - b matches b\n        //     - doublestar\n        //       - matchOne(x/y/z/c, c) -> no\n        //       - matchOne(y/z/c, c) -> no\n        //       - matchOne(z/c, c) -> no\n        //       - matchOne(c, c) yes, hit\n        var fr = fi\n        var pr = pi + 1\n        if (pr === pl) {\n          this.debug('** at the end')\n          // a ** at the end will just swallow the rest.\n          // We have found a match.\n          // however, it will not swallow /.x, unless\n          // options.dot is set.\n          // . and .. are *never* matched by **, for explosively\n          // exponential reasons.\n          for (; fi < fl; fi++) {\n            if (\n              file[fi] === '.' ||\n              file[fi] === '..' ||\n              (!options.dot && file[fi].charAt(0) === '.')\n            )\n              return false\n          }\n          return true\n        }\n\n        // ok, let's see if we can swallow whatever we can.\n        while (fr < fl) {\n          var swallowee = file[fr]\n\n          this.debug('\\nglobstar while', file, fr, pattern, pr, swallowee)\n\n          // XXX remove this slice.  Just pass the start index.\n          if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {\n            this.debug('globstar found match!', fr, fl, swallowee)\n            // found a match.\n            return true\n          } else {\n            // can't swallow \".\" or \"..\" ever.\n            // can only swallow \".foo\" when explicitly asked.\n            if (\n              swallowee === '.' ||\n              swallowee === '..' ||\n              (!options.dot && swallowee.charAt(0) === '.')\n            ) {\n              this.debug('dot detected!', file, fr, pattern, pr)\n              break\n            }\n\n            // ** swallows a segment, and continue.\n            this.debug('globstar swallow a segment, and continue')\n            fr++\n          }\n        }\n\n        // no match was found.\n        // However, in partial mode, we can't say this is necessarily over.\n        /* c8 ignore start */\n        if (partial) {\n          // ran out of file\n          this.debug('\\n>>> no match, partial?', file, fr, pattern, pr)\n          if (fr === fl) {\n            return true\n          }\n        }\n        /* c8 ignore stop */\n        return false\n      }\n\n      // something other than **\n      // non-magic patterns just have to match exactly\n      // patterns with magic have been turned into regexps.\n      let hit: boolean\n      if (typeof p === 'string') {\n        hit = f === p\n        this.debug('string match', p, f, hit)\n      } else {\n        hit = p.test(f)\n        this.debug('pattern match', p, f, hit)\n      }\n\n      if (!hit) return false\n    }\n\n    // Note: ending in / means that we'll get a final \"\"\n    // at the end of the pattern.  This can only match a\n    // corresponding \"\" at the end of the file.\n    // If the file ends in /, then it can only match a\n    // a pattern that ends in /, unless the pattern just\n    // doesn't have any more for it. But, a/b/ should *not*\n    // match \"a/b/*\", even though \"\" matches against the\n    // [^/]*? pattern, except in partial mode, where it might\n    // simply not be reached yet.\n    // However, a/b/ should still satisfy a/*\n\n    // now either we fell off the end of the pattern, or we're done.\n    if (fi === fl && pi === pl) {\n      // ran out of pattern and filename at the same time.\n      // an exact hit!\n      return true\n    } else if (fi === fl) {\n      // ran out of file, but still had pattern left.\n      // this is ok if we're doing the match as part of\n      // a glob fs traversal.\n      return partial\n    } else if (pi === pl) {\n      // ran out of pattern, still have file left.\n      // this is only acceptable if we're on the very last\n      // empty segment of a file with a trailing slash.\n      // a/* should match a/b/\n      return fi === fl - 1 && file[fi] === ''\n\n      /* c8 ignore start */\n    } else {\n      // should be unreachable.\n      throw new Error('wtf?')\n    }\n    /* c8 ignore stop */\n  }\n\n  braceExpand() {\n    return braceExpand(this.pattern, this.options)\n  }\n\n  parse(pattern: string): ParseReturn {\n    assertValidPattern(pattern)\n\n    const options = this.options\n\n    // shortcuts\n    if (pattern === '**') return GLOBSTAR\n    if (pattern === '') return ''\n\n    // far and away, the most common glob pattern parts are\n    // *, *.*, and *.  Add a fast check method for those.\n    let m: RegExpMatchArray | null\n    let fastTest: null | ((f: string) => boolean) = null\n    if ((m = pattern.match(starRE))) {\n      fastTest = options.dot ? starTestDot : starTest\n    } else if ((m = pattern.match(starDotExtRE))) {\n      fastTest = (\n        options.nocase ?\n          options.dot ?\n            starDotExtTestNocaseDot\n          : starDotExtTestNocase\n        : options.dot ? starDotExtTestDot\n        : starDotExtTest)(m[1])\n    } else if ((m = pattern.match(qmarksRE))) {\n      fastTest = (\n        options.nocase ?\n          options.dot ?\n            qmarksTestNocaseDot\n          : qmarksTestNocase\n        : options.dot ? qmarksTestDot\n        : qmarksTest)(m)\n    } else if ((m = pattern.match(starDotStarRE))) {\n      fastTest = options.dot ? starDotStarTestDot : starDotStarTest\n    } else if ((m = pattern.match(dotStarRE))) {\n      fastTest = dotStarTest\n    }\n\n    const re = AST.fromGlob(pattern, this.options).toMMPattern()\n    if (fastTest && typeof re === 'object') {\n      // Avoids overriding in frozen environments\n      Reflect.defineProperty(re, 'test', { value: fastTest })\n    }\n    return re\n  }\n\n  makeRe() {\n    if (this.regexp || this.regexp === false) return this.regexp\n\n    // at this point, this.set is a 2d array of partial\n    // pattern strings, or \"**\".\n    //\n    // It's better to use .match().  This function shouldn't\n    // be used, really, but it's pretty convenient sometimes,\n    // when you just want to work with a regex.\n    const set = this.set\n\n    if (!set.length) {\n      this.regexp = false\n      return this.regexp\n    }\n    const options = this.options\n\n    const twoStar =\n      options.noglobstar ? star\n      : options.dot ? twoStarDot\n      : twoStarNoDot\n    const flags = new Set(options.nocase ? ['i'] : [])\n\n    // regexpify non-globstar patterns\n    // if ** is only item, then we just do one twoStar\n    // if ** is first, and there are more, prepend (\\/|twoStar\\/)? to next\n    // if ** is last, append (\\/twoStar|) to previous\n    // if ** is in the middle, append (\\/|\\/twoStar\\/) to previous\n    // then filter out GLOBSTAR symbols\n    let re = set\n      .map(pattern => {\n        const pp: (string | typeof GLOBSTAR)[] = pattern.map(p => {\n          if (p instanceof RegExp) {\n            for (const f of p.flags.split('')) flags.add(f)\n          }\n          return (\n            typeof p === 'string' ? regExpEscape(p)\n            : p === GLOBSTAR ? GLOBSTAR\n            : p._src\n          )\n        }) as (string | typeof GLOBSTAR)[]\n        pp.forEach((p, i) => {\n          const next = pp[i + 1]\n          const prev = pp[i - 1]\n          if (p !== GLOBSTAR || prev === GLOBSTAR) {\n            return\n          }\n          if (prev === undefined) {\n            if (next !== undefined && next !== GLOBSTAR) {\n              pp[i + 1] = '(?:\\\\/|' + twoStar + '\\\\/)?' + next\n            } else {\n              pp[i] = twoStar\n            }\n          } else if (next === undefined) {\n            pp[i - 1] = prev + '(?:\\\\/|\\\\/' + twoStar + ')?'\n          } else if (next !== GLOBSTAR) {\n            pp[i - 1] = prev + '(?:\\\\/|\\\\/' + twoStar + '\\\\/)' + next\n            pp[i + 1] = GLOBSTAR\n          }\n        })\n        const filtered = pp.filter(p => p !== GLOBSTAR)\n\n        // For partial matches, we need to make the pattern match\n        // any prefix of the full path. We do this by generating\n        // alternative patterns that match progressively longer prefixes.\n        if (this.partial && filtered.length >= 1) {\n          const prefixes: string[] = []\n          for (let i = 1; i <= filtered.length; i++) {\n            prefixes.push(filtered.slice(0, i).join('/'))\n          }\n          return '(?:' + prefixes.join('|') + ')'\n        }\n\n        return filtered.join('/')\n      })\n      .join('|')\n\n    // need to wrap in parens if we had more than one thing with |,\n    // otherwise only the first will be anchored to ^ and the last to $\n    const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', '']\n    // must match entire pattern\n    // ending in a * or ** will make it less strict.\n    re = '^' + open + re + close + '$'\n\n    // In partial mode, '/' should always match as it's a valid prefix for any pattern\n    if (this.partial) {\n      re = '^(?:\\\\/|' + open + re.slice(1, -1) + close + ')$'\n    }\n\n    // can match anything, as long as it's not this.\n    if (this.negate) re = '^(?!' + re + ').+$'\n\n    try {\n      this.regexp = new RegExp(re, [...flags].join(''))\n      /* c8 ignore start */\n    } catch (ex) {\n      // should be impossible\n      this.regexp = false\n    }\n    /* c8 ignore stop */\n    return this.regexp\n  }\n\n  slashSplit(p: string) {\n    // if p starts with // on windows, we preserve that\n    // so that UNC paths aren't broken.  Otherwise, any number of\n    // / characters are coalesced into one, unless\n    // preserveMultipleSlashes is set to true.\n    if (this.preserveMultipleSlashes) {\n      return p.split('/')\n    } else if (this.isWindows && /^\\/\\/[^\\/]+/.test(p)) {\n      // add an extra '' for the one we lose\n      return ['', ...p.split(/\\/+/)]\n    } else {\n      return p.split(/\\/+/)\n    }\n  }\n\n  match(f: string, partial = this.partial) {\n    this.debug('match', f, this.pattern)\n    // short-circuit in the case of busted things.\n    // comments, etc.\n    if (this.comment) {\n      return false\n    }\n    if (this.empty) {\n      return f === ''\n    }\n\n    if (f === '/' && partial) {\n      return true\n    }\n\n    const options = this.options\n\n    // windows: need to use /, not \\\n    if (this.isWindows) {\n      f = f.split('\\\\').join('/')\n    }\n\n    // treat the test path as a set of pathparts.\n    const ff = this.slashSplit(f)\n    this.debug(this.pattern, 'split', ff)\n\n    // just ONE of the pattern sets in this.set needs to match\n    // in order for it to be valid.  If negating, then just one\n    // match means that we have failed.\n    // Either way, return on the first hit.\n\n    const set = this.set\n    this.debug(this.pattern, 'set', set)\n\n    // Find the basename of the path by looking for the last non-empty segment\n    let filename: string = ff[ff.length - 1]\n    if (!filename) {\n      for (let i = ff.length - 2; !filename && i >= 0; i--) {\n        filename = ff[i]\n      }\n    }\n\n    for (let i = 0; i < set.length; i++) {\n      const pattern = set[i]\n      let file = ff\n      if (options.matchBase && pattern.length === 1) {\n        file = [filename]\n      }\n      const hit = this.matchOne(file, pattern, partial)\n      if (hit) {\n        if (options.flipNegate) {\n          return true\n        }\n        return !this.negate\n      }\n    }\n\n    // didn't get any hits.  this is success if it's a negative\n    // pattern, failure otherwise.\n    if (options.flipNegate) {\n      return false\n    }\n    return this.negate\n  }\n\n  static defaults(def: MinimatchOptions) {\n    return minimatch.defaults(def).Minimatch\n  }\n}\n/* c8 ignore start */\nexport { AST } from './ast.js'\nexport { escape } from './escape.js'\nexport { unescape } from './unescape.js'\n/* c8 ignore stop */\nminimatch.AST = AST\nminimatch.Minimatch = Minimatch\nminimatch.escape = escape\nminimatch.unescape = unescape\n", "import { Minimatch, MinimatchOptions } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport { fileURLToPath } from 'node:url'\nimport {\n  FSOption,\n  Path,\n  PathScurry,\n  PathScurryDarwin,\n  PathScurryPosix,\n  PathScurryWin32,\n} from 'path-scurry'\nimport { IgnoreLike } from './ignore.js'\nimport { Pattern } from './pattern.js'\nimport { GlobStream, GlobWalker } from './walker.js'\n\nexport type MatchSet = Minimatch['set']\nexport type GlobParts = Exclude\n\n// if no process global, just call it linux.\n// so we default to case-sensitive, / separators\nconst defaultPlatform: NodeJS.Platform =\n  (\n    typeof process === 'object' &&\n    process &&\n    typeof process.platform === 'string'\n  ) ?\n    process.platform\n  : 'linux'\n\n/**\n * A `GlobOptions` object may be provided to any of the exported methods, and\n * must be provided to the `Glob` constructor.\n *\n * All options are optional, boolean, and false by default, unless otherwise\n * noted.\n *\n * All resolved options are added to the Glob object as properties.\n *\n * If you are running many `glob` operations, you can pass a Glob object as the\n * `options` argument to a subsequent operation to share the previously loaded\n * cache.\n */\nexport interface GlobOptions {\n  /**\n   * Set to `true` to always receive absolute paths for\n   * matched files. Set to `false` to always return relative paths.\n   *\n   * When this option is not set, absolute paths are returned for patterns\n   * that are absolute, and otherwise paths are returned that are relative\n   * to the `cwd` setting.\n   *\n   * This does _not_ make an extra system call to get\n   * the realpath, it only does string path resolution.\n   *\n   * Conflicts with {@link withFileTypes}\n   */\n  absolute?: boolean\n\n  /**\n   * Set to false to enable {@link windowsPathsNoEscape}\n   *\n   * @deprecated\n   */\n  allowWindowsEscape?: boolean\n\n  /**\n   * The current working directory in which to search. Defaults to\n   * `process.cwd()`.\n   *\n   * May be eiher a string path or a `file://` URL object or string.\n   */\n  cwd?: string | URL\n\n  /**\n   * Include `.dot` files in normal matches and `globstar`\n   * matches. Note that an explicit dot in a portion of the pattern\n   * will always match dot files.\n   */\n  dot?: boolean\n\n  /**\n   * Prepend all relative path strings with `./` (or `.\\` on Windows).\n   *\n   * Without this option, returned relative paths are \"bare\", so instead of\n   * returning `'./foo/bar'`, they are returned as `'foo/bar'`.\n   *\n   * Relative patterns starting with `'../'` are not prepended with `./`, even\n   * if this option is set.\n   */\n  dotRelative?: boolean\n\n  /**\n   * Follow symlinked directories when expanding `**`\n   * patterns. This can result in a lot of duplicate references in\n   * the presence of cyclic links, and make performance quite bad.\n   *\n   * By default, a `**` in a pattern will follow 1 symbolic link if\n   * it is not the first item in the pattern, or none if it is the\n   * first item in the pattern, following the same behavior as Bash.\n   */\n  follow?: boolean\n\n  /**\n   * string or string[], or an object with `ignored` and `childrenIgnored`\n   * methods.\n   *\n   * If a string or string[] is provided, then this is treated as a glob\n   * pattern or array of glob patterns to exclude from matches. To ignore all\n   * children within a directory, as well as the entry itself, append `'/**'`\n   * to the ignore pattern.\n   *\n   * **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of\n   * any other settings.\n   *\n   * If an object is provided that has `ignored(path)` and/or\n   * `childrenIgnored(path)` methods, then these methods will be called to\n   * determine whether any Path is a match or if its children should be\n   * traversed, respectively.\n   */\n  ignore?: string | string[] | IgnoreLike\n\n  /**\n   * Treat brace expansion like `{a,b}` as a \"magic\" pattern. Has no\n   * effect if {@link nobrace} is set.\n   *\n   * Only has effect on the {@link hasMagic} function.\n   */\n  magicalBraces?: boolean\n\n  /**\n   * Add a `/` character to directory matches. Note that this requires\n   * additional stat calls in some cases.\n   */\n  mark?: boolean\n\n  /**\n   * Perform a basename-only match if the pattern does not contain any slash\n   * characters. That is, `*.js` would be treated as equivalent to\n   * `**\\/*.js`, matching all js files in all directories.\n   */\n  matchBase?: boolean\n\n  /**\n   * Limit the directory traversal to a given depth below the cwd.\n   * Note that this does NOT prevent traversal to sibling folders,\n   * root patterns, and so on. It only limits the maximum folder depth\n   * that the walk will descend, relative to the cwd.\n   */\n  maxDepth?: number\n\n  /**\n   * Do not expand `{a,b}` and `{1..3}` brace sets.\n   */\n  nobrace?: boolean\n\n  /**\n   * Perform a case-insensitive match. This defaults to `true` on macOS and\n   * Windows systems, and `false` on all others.\n   *\n   * **Note** `nocase` should only be explicitly set when it is\n   * known that the filesystem's case sensitivity differs from the\n   * platform default. If set `true` on case-sensitive file\n   * systems, or `false` on case-insensitive file systems, then the\n   * walk may return more or less results than expected.\n   */\n  nocase?: boolean\n\n  /**\n   * Do not match directories, only files. (Note: to match\n   * _only_ directories, put a `/` at the end of the pattern.)\n   */\n  nodir?: boolean\n\n  /**\n   * Do not match \"extglob\" patterns such as `+(a|b)`.\n   */\n  noext?: boolean\n\n  /**\n   * Do not match `**` against multiple filenames. (Ie, treat it as a normal\n   * `*` instead.)\n   *\n   * Conflicts with {@link matchBase}\n   */\n  noglobstar?: boolean\n\n  /**\n   * Defaults to value of `process.platform` if available, or `'linux'` if\n   * not. Setting `platform:'win32'` on non-Windows systems may cause strange\n   * behavior.\n   */\n  platform?: NodeJS.Platform\n\n  /**\n   * Set to true to call `fs.realpath` on all of the\n   * results. In the case of an entry that cannot be resolved, the\n   * entry is omitted. This incurs a slight performance penalty, of\n   * course, because of the added system calls.\n   */\n  realpath?: boolean\n\n  /**\n   *\n   * A string path resolved against the `cwd` option, which\n   * is used as the starting point for absolute patterns that start\n   * with `/`, (but not drive letters or UNC paths on Windows).\n   *\n   * Note that this _doesn't_ necessarily limit the walk to the\n   * `root` directory, and doesn't affect the cwd starting point for\n   * non-absolute patterns. A pattern containing `..` will still be\n   * able to traverse out of the root directory, if it is not an\n   * actual root directory on the filesystem, and any non-absolute\n   * patterns will be matched in the `cwd`. For example, the\n   * pattern `/../*` with `{root:'/some/path'}` will return all\n   * files in `/some`, not all files in `/some/path`. The pattern\n   * `*` with `{root:'/some/path'}` will return all the entries in\n   * the cwd, not the entries in `/some/path`.\n   *\n   * To start absolute and non-absolute patterns in the same\n   * path, you can use `{root:''}`. However, be aware that on\n   * Windows systems, a pattern like `x:/*` or `//host/share/*` will\n   * _always_ start in the `x:/` or `//host/share` directory,\n   * regardless of the `root` setting.\n   */\n  root?: string\n\n  /**\n   * A [PathScurry](http://npm.im/path-scurry) object used\n   * to traverse the file system. If the `nocase` option is set\n   * explicitly, then any provided `scurry` object must match this\n   * setting.\n   */\n  scurry?: PathScurry\n\n  /**\n   * Call `lstat()` on all entries, whether required or not to determine\n   * if it's a valid match. When used with {@link withFileTypes}, this means\n   * that matches will include data such as modified time, permissions, and\n   * so on.  Note that this will incur a performance cost due to the added\n   * system calls.\n   */\n  stat?: boolean\n\n  /**\n   * An AbortSignal which will cancel the Glob walk when\n   * triggered.\n   */\n  signal?: AbortSignal\n\n  /**\n   * Use `\\\\` as a path separator _only_, and\n   *  _never_ as an escape character. If set, all `\\\\` characters are\n   *  replaced with `/` in the pattern.\n   *\n   *  Note that this makes it **impossible** to match against paths\n   *  containing literal glob pattern characters, but allows matching\n   *  with patterns constructed using `path.join()` and\n   *  `path.resolve()` on Windows platforms, mimicking the (buggy!)\n   *  behavior of Glob v7 and before on Windows. Please use with\n   *  caution, and be mindful of [the caveat below about Windows\n   *  paths](#windows). (For legacy reasons, this is also set if\n   *  `allowWindowsEscape` is set to the exact value `false`.)\n   */\n  windowsPathsNoEscape?: boolean\n\n  /**\n   * Return [PathScurry](http://npm.im/path-scurry)\n   * `Path` objects instead of strings. These are similar to a\n   * NodeJS `Dirent` object, but with additional methods and\n   * properties.\n   *\n   * Conflicts with {@link absolute}\n   */\n  withFileTypes?: boolean\n\n  /**\n   * An fs implementation to override some or all of the defaults.  See\n   * http://npm.im/path-scurry for details about what can be overridden.\n   */\n  fs?: FSOption\n\n  /**\n   * Just passed along to Minimatch.  Note that this makes all pattern\n   * matching operations slower and *extremely* noisy.\n   */\n  debug?: boolean\n\n  /**\n   * Return `/` delimited paths, even on Windows.\n   *\n   * On posix systems, this has no effect.  But, on Windows, it means that\n   * paths will be `/` delimited, and absolute paths will be their full\n   * resolved UNC forms, eg instead of `'C:\\\\foo\\\\bar'`, it would return\n   * `'//?/C:/foo/bar'`\n   */\n  posix?: boolean\n\n  /**\n   * Do not match any children of any matches. For example, the pattern\n   * `**\\/foo` would match `a/foo`, but not `a/foo/b/foo` in this mode.\n   *\n   * This is especially useful for cases like \"find all `node_modules`\n   * folders, but not the ones in `node_modules`\".\n   *\n   * In order to support this, the `Ignore` implementation must support an\n   * `add(pattern: string)` method. If using the default `Ignore` class, then\n   * this is fine, but if this is set to `false`, and a custom `Ignore` is\n   * provided that does not have an `add()` method, then it will throw an\n   * error.\n   *\n   * **Caveat** It *only* ignores matches that would be a descendant of a\n   * previous match, and only if that descendant is matched *after* the\n   * ancestor is encountered. Since the file system walk happens in\n   * indeterminate order, it's possible that a match will already be added\n   * before its ancestor, if multiple or braced patterns are used.\n   *\n   * For example:\n   *\n   * ```ts\n   * const results = await glob([\n   *   // likely to match first, since it's just a stat\n   *   'a/b/c/d/e/f',\n   *\n   *   // this pattern is more complicated! It must to various readdir()\n   *   // calls and test the results against a regular expression, and that\n   *   // is certainly going to take a little bit longer.\n   *   //\n   *   // So, later on, it encounters a match at 'a/b/c/d/e', but it's too\n   *   // late to ignore a/b/c/d/e/f, because it's already been emitted.\n   *   'a/[bdf]/?/[a-z]/*',\n   * ], { includeChildMatches: false })\n   * ```\n   *\n   * It's best to only set this to `false` if you can be reasonably sure that\n   * no components of the pattern will potentially match one another's file\n   * system descendants, or if the occasional included child entry will not\n   * cause problems.\n   *\n   * @default true\n   */\n  includeChildMatches?: boolean\n\n  /**\n   * max number of `{...}` patterns to expand. Default `1_000`.\n   *\n   * Note: this is much less than minimatch's default of `100_000`,\n   * because Glob has higher memory requirements due to walking\n   * the file system tree.\n   */\n  braceExpandMax?: number\n}\n\nexport type GlobOptionsWithFileTypesTrue = GlobOptions & {\n  withFileTypes: true\n  // string options not relevant if returning Path objects.\n  absolute?: undefined\n  mark?: undefined\n  posix?: undefined\n}\n\nexport type GlobOptionsWithFileTypesFalse = GlobOptions & {\n  withFileTypes?: false\n}\n\nexport type GlobOptionsWithFileTypesUnset = GlobOptions & {\n  withFileTypes?: undefined\n}\n\nexport type Result =\n  Opts extends GlobOptionsWithFileTypesTrue ? Path\n  : Opts extends GlobOptionsWithFileTypesFalse ? string\n  : Opts extends GlobOptionsWithFileTypesUnset ? string\n  : string | Path\nexport type Results = Result[]\n\nexport type FileTypes =\n  Opts extends GlobOptionsWithFileTypesTrue ? true\n  : Opts extends GlobOptionsWithFileTypesFalse ? false\n  : Opts extends GlobOptionsWithFileTypesUnset ? false\n  : boolean\n\n/**\n * An object that can perform glob pattern traversals.\n */\nexport class Glob implements GlobOptions {\n  absolute?: boolean\n  cwd: string\n  root?: string\n  dot: boolean\n  dotRelative: boolean\n  follow: boolean\n  ignore?: string | string[] | IgnoreLike\n  magicalBraces: boolean\n  mark?: boolean\n  matchBase: boolean\n  maxDepth: number\n  nobrace: boolean\n  nocase: boolean\n  nodir: boolean\n  noext: boolean\n  noglobstar: boolean\n  pattern: string[]\n  platform: NodeJS.Platform\n  realpath: boolean\n  scurry: PathScurry\n  stat: boolean\n  signal?: AbortSignal\n  windowsPathsNoEscape: boolean\n  withFileTypes: FileTypes\n  includeChildMatches: boolean\n\n  /**\n   * The options provided to the constructor.\n   */\n  opts: Opts\n\n  /**\n   * An array of parsed immutable {@link Pattern} objects.\n   */\n  patterns: Pattern[]\n\n  /**\n   * All options are stored as properties on the `Glob` object.\n   *\n   * See {@link GlobOptions} for full options descriptions.\n   *\n   * Note that a previous `Glob` object can be passed as the\n   * `GlobOptions` to another `Glob` instantiation to re-use settings\n   * and caches with a new pattern.\n   *\n   * Traversal functions can be called multiple times to run the walk\n   * again.\n   */\n  constructor(pattern: string | string[], opts: Opts) {\n    /* c8 ignore start */\n    if (!opts) throw new TypeError('glob options required')\n    /* c8 ignore stop */\n    this.withFileTypes = !!opts.withFileTypes as FileTypes\n    this.signal = opts.signal\n    this.follow = !!opts.follow\n    this.dot = !!opts.dot\n    this.dotRelative = !!opts.dotRelative\n    this.nodir = !!opts.nodir\n    this.mark = !!opts.mark\n    if (!opts.cwd) {\n      this.cwd = ''\n    } else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {\n      opts.cwd = fileURLToPath(opts.cwd)\n    }\n    this.cwd = opts.cwd || ''\n    this.root = opts.root\n    this.magicalBraces = !!opts.magicalBraces\n    this.nobrace = !!opts.nobrace\n    this.noext = !!opts.noext\n    this.realpath = !!opts.realpath\n    this.absolute = opts.absolute\n    this.includeChildMatches = opts.includeChildMatches !== false\n\n    this.noglobstar = !!opts.noglobstar\n    this.matchBase = !!opts.matchBase\n    this.maxDepth =\n      typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity\n    this.stat = !!opts.stat\n    this.ignore = opts.ignore\n\n    if (this.withFileTypes && this.absolute !== undefined) {\n      throw new Error('cannot set absolute and withFileTypes:true')\n    }\n\n    if (typeof pattern === 'string') {\n      pattern = [pattern]\n    }\n\n    this.windowsPathsNoEscape =\n      !!opts.windowsPathsNoEscape ||\n      (opts as { allowWindowsEscape?: boolean }).allowWindowsEscape ===\n        false\n\n    if (this.windowsPathsNoEscape) {\n      pattern = pattern.map(p => p.replace(/\\\\/g, '/'))\n    }\n\n    if (this.matchBase) {\n      if (opts.noglobstar) {\n        throw new TypeError('base matching requires globstar')\n      }\n      pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`))\n    }\n\n    this.pattern = pattern\n\n    this.platform = opts.platform || defaultPlatform\n    this.opts = { ...opts, platform: this.platform }\n    if (opts.scurry) {\n      this.scurry = opts.scurry\n      if (\n        opts.nocase !== undefined &&\n        opts.nocase !== opts.scurry.nocase\n      ) {\n        throw new Error('nocase option contradicts provided scurry option')\n      }\n    } else {\n      const Scurry =\n        opts.platform === 'win32' ? PathScurryWin32\n        : opts.platform === 'darwin' ? PathScurryDarwin\n        : opts.platform ? PathScurryPosix\n        : PathScurry\n      this.scurry = new Scurry(this.cwd, {\n        nocase: opts.nocase,\n        fs: opts.fs,\n      })\n    }\n    this.nocase = this.scurry.nocase\n\n    // If you do nocase:true on a case-sensitive file system, then\n    // we need to use regexps instead of strings for non-magic\n    // path portions, because statting `aBc` won't return results\n    // for the file `AbC` for example.\n    const nocaseMagicOnly =\n      this.platform === 'darwin' || this.platform === 'win32'\n\n    const mmo: MinimatchOptions = {\n      braceExpandMax: 10_000,\n      ...opts,\n      dot: this.dot,\n      matchBase: this.matchBase,\n      nobrace: this.nobrace,\n      // default nocase based on platform\n      nocase: this.nocase,\n      nocaseMagicOnly,\n      nocomment: true,\n      noext: this.noext,\n      nonegate: true,\n      optimizationLevel: 2,\n      platform: this.platform,\n      windowsPathsNoEscape: this.windowsPathsNoEscape,\n      debug: !!this.opts.debug,\n    }\n\n    const mms = this.pattern.map(p => new Minimatch(p, mmo))\n    const [matchSet, globParts] = mms.reduce(\n      (set: [MatchSet, GlobParts], m) => {\n        set[0].push(...m.set)\n        set[1].push(...m.globParts)\n        return set\n      },\n      [[], []],\n    )\n    this.patterns = matchSet.map((set, i) => {\n      const g = globParts[i]\n      /* c8 ignore start */\n      if (!g) throw new Error('invalid pattern object')\n      /* c8 ignore stop */\n      return new Pattern(set, g, 0, this.platform)\n    })\n  }\n\n  /**\n   * Returns a Promise that resolves to the results array.\n   */\n  async walk(): Promise>\n  async walk(): Promise<(string | Path)[]> {\n    // Walkers always return array of Path objects, so we just have to\n    // coerce them into the right shape.  It will have already called\n    // realpath() if the option was set to do so, so we know that's cached.\n    // start out knowing the cwd, at least\n    return [\n      ...(await new GlobWalker(this.patterns, this.scurry.cwd, {\n        ...this.opts,\n        maxDepth:\n          this.maxDepth !== Infinity ?\n            this.maxDepth + this.scurry.cwd.depth()\n          : Infinity,\n        platform: this.platform,\n        nocase: this.nocase,\n        includeChildMatches: this.includeChildMatches,\n      }).walk()),\n    ]\n  }\n\n  /**\n   * synchronous {@link Glob.walk}\n   */\n  walkSync(): Results\n  walkSync(): (string | Path)[] {\n    return [\n      ...new GlobWalker(this.patterns, this.scurry.cwd, {\n        ...this.opts,\n        maxDepth:\n          this.maxDepth !== Infinity ?\n            this.maxDepth + this.scurry.cwd.depth()\n          : Infinity,\n        platform: this.platform,\n        nocase: this.nocase,\n        includeChildMatches: this.includeChildMatches,\n      }).walkSync(),\n    ]\n  }\n\n  /**\n   * Stream results asynchronously.\n   */\n  stream(): Minipass, Result>\n  stream(): Minipass {\n    return new GlobStream(this.patterns, this.scurry.cwd, {\n      ...this.opts,\n      maxDepth:\n        this.maxDepth !== Infinity ?\n          this.maxDepth + this.scurry.cwd.depth()\n        : Infinity,\n      platform: this.platform,\n      nocase: this.nocase,\n      includeChildMatches: this.includeChildMatches,\n    }).stream()\n  }\n\n  /**\n   * Stream results synchronously.\n   */\n  streamSync(): Minipass, Result>\n  streamSync(): Minipass {\n    return new GlobStream(this.patterns, this.scurry.cwd, {\n      ...this.opts,\n      maxDepth:\n        this.maxDepth !== Infinity ?\n          this.maxDepth + this.scurry.cwd.depth()\n        : Infinity,\n      platform: this.platform,\n      nocase: this.nocase,\n      includeChildMatches: this.includeChildMatches,\n    }).streamSync()\n  }\n\n  /**\n   * Default sync iteration function. Returns a Generator that\n   * iterates over the results.\n   */\n  iterateSync(): Generator, void, void> {\n    return this.streamSync()[Symbol.iterator]()\n  }\n  [Symbol.iterator]() {\n    return this.iterateSync()\n  }\n\n  /**\n   * Default async iteration function. Returns an AsyncGenerator that\n   * iterates over the results.\n   */\n  iterate(): AsyncGenerator, void, void> {\n    return this.stream()[Symbol.asyncIterator]()\n  }\n  [Symbol.asyncIterator]() {\n    return this.iterate()\n  }\n}\n", "/**\n * @module LRUCache\n */\n\n// module-private names and types\n// this provides the default Perf object source.\n// it can be passed in via configuration to override it\n// for a single LRU object.\nexport type Perf = { now: () => number }\nconst defaultPerf: Perf =\n  (\n    typeof performance === 'object' &&\n    performance &&\n    typeof performance.now === 'function'\n  ) ?\n    performance\n  : Date\n\nconst warned = new Set()\n\n// either a function or a class\ntype ForC = ((...a: any[]) => any) | { new (...a: any[]): any }\n\n/* c8 ignore start */\nconst PROCESS = (\n  typeof process === 'object' && !!process ?\n    process\n  : {}) as { [k: string]: any }\n/* c8 ignore start */\n\nconst emitWarning = (\n  msg: string,\n  type: string,\n  code: string,\n  fn: ForC,\n) => {\n  typeof PROCESS.emitWarning === 'function' ?\n    PROCESS.emitWarning(msg, type, code, fn)\n  : console.error(`[${code}] ${type}: ${msg}`)\n}\n\nlet AC = globalThis.AbortController\nlet AS = globalThis.AbortSignal\n\n/* c8 ignore start */\nif (typeof AC === 'undefined') {\n  //@ts-ignore\n  AS = class AbortSignal {\n    onabort?: (...a: any[]) => any\n    _onabort: ((...a: any[]) => any)[] = []\n    reason?: any\n    aborted: boolean = false\n    addEventListener(_: string, fn: (...a: any[]) => any) {\n      this._onabort.push(fn)\n    }\n  }\n  //@ts-ignore\n  AC = class AbortController {\n    constructor() {\n      warnACPolyfill()\n    }\n    signal = new AS()\n    abort(reason: any) {\n      if (this.signal.aborted) return\n      //@ts-ignore\n      this.signal.reason = reason\n      //@ts-ignore\n      this.signal.aborted = true\n      //@ts-ignore\n      for (const fn of this.signal._onabort) {\n        fn(reason)\n      }\n      this.signal.onabort?.(reason)\n    }\n  }\n  let printACPolyfillWarning =\n    PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'\n  const warnACPolyfill = () => {\n    if (!printACPolyfillWarning) return\n    printACPolyfillWarning = false\n    emitWarning(\n      'AbortController is not defined. If using lru-cache in ' +\n        'node 14, load an AbortController polyfill from the ' +\n        '`node-abort-controller` package. A minimal polyfill is ' +\n        'provided for use by LRUCache.fetch(), but it should not be ' +\n        'relied upon in other contexts (eg, passing it to other APIs that ' +\n        'use AbortController/AbortSignal might have undesirable effects). ' +\n        'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.',\n      'NO_ABORT_CONTROLLER',\n      'ENOTSUP',\n      warnACPolyfill,\n    )\n  }\n}\n/* c8 ignore stop */\n\nconst shouldWarn = (code: string) => !warned.has(code)\n\nconst TYPE = Symbol('type')\nexport type PosInt = number & { [TYPE]: 'Positive Integer' }\nexport type Index = number & { [TYPE]: 'LRUCache Index' }\n\nconst isPosInt = (n: any): n is PosInt =>\n  n && n === Math.floor(n) && n > 0 && isFinite(n)\n\nexport type UintArray = Uint8Array | Uint16Array | Uint32Array\nexport type NumberArray = UintArray | number[]\n\n/* c8 ignore start */\n// This is a little bit ridiculous, tbh.\n// The maximum array length is 2^32-1 or thereabouts on most JS impls.\n// And well before that point, you're caching the entire world, I mean,\n// that's ~32GB of just integers for the next/prev links, plus whatever\n// else to hold that many keys and values.  Just filling the memory with\n// zeroes at init time is brutal when you get that big.\n// But why not be complete?\n// Maybe in the future, these limits will have expanded.\nconst getUintArray = (max: number) =>\n  !isPosInt(max) ? null\n  : max <= Math.pow(2, 8) ? Uint8Array\n  : max <= Math.pow(2, 16) ? Uint16Array\n  : max <= Math.pow(2, 32) ? Uint32Array\n  : max <= Number.MAX_SAFE_INTEGER ? ZeroArray\n  : null\n/* c8 ignore stop */\n\nclass ZeroArray extends Array {\n  constructor(size: number) {\n    super(size)\n    this.fill(0)\n  }\n}\nexport type { ZeroArray }\nexport type { Stack }\n\nexport type StackLike = Stack | Index[]\nclass Stack {\n  heap: NumberArray\n  length: number\n  // private constructor\n  static #constructing: boolean = false\n  static create(max: number): StackLike {\n    const HeapCls = getUintArray(max)\n    if (!HeapCls) return []\n    Stack.#constructing = true\n    const s = new Stack(max, HeapCls)\n    Stack.#constructing = false\n    return s\n  }\n  constructor(max: number, HeapCls: { new (n: number): NumberArray }) {\n    /* c8 ignore start */\n    if (!Stack.#constructing) {\n      throw new TypeError('instantiate Stack using Stack.create(n)')\n    }\n    /* c8 ignore stop */\n    this.heap = new HeapCls(max)\n    this.length = 0\n  }\n  push(n: Index) {\n    this.heap[this.length++] = n\n  }\n  pop(): Index {\n    return this.heap[--this.length] as Index\n  }\n}\n\n/**\n * Promise representing an in-progress {@link LRUCache#fetch} call\n */\nexport type BackgroundFetch = Promise & {\n  __returned: BackgroundFetch | undefined\n  __abortController: AbortController\n  __staleWhileFetching: V | undefined\n}\n\nexport type DisposeTask = [\n  value: V,\n  key: K,\n  reason: LRUCache.DisposeReason,\n]\n\nexport namespace LRUCache {\n  /**\n   * An integer greater than 0, reflecting the calculated size of items\n   */\n  export type Size = number\n\n  /**\n   * Integer greater than 0, representing some number of milliseconds, or the\n   * time at which a TTL started counting from.\n   */\n  export type Milliseconds = number\n\n  /**\n   * An integer greater than 0, reflecting a number of items\n   */\n  export type Count = number\n\n  /**\n   * The reason why an item was removed from the cache, passed\n   * to the {@link Disposer} methods.\n   *\n   * - `evict`: The item was evicted because it is the least recently used,\n   *   and the cache is full.\n   * - `set`: A new value was set, overwriting the old value being disposed.\n   * - `delete`: The item was explicitly deleted, either by calling\n   *   {@link LRUCache#delete}, {@link LRUCache#clear}, or\n   *   {@link LRUCache#set} with an undefined value.\n   * - `expire`: The item was removed due to exceeding its TTL.\n   * - `fetch`: A {@link OptionsBase#fetchMethod} operation returned\n   *   `undefined` or was aborted, causing the item to be deleted.\n   */\n  export type DisposeReason =\n    | 'evict'\n    | 'set'\n    | 'delete'\n    | 'expire'\n    | 'fetch'\n  /**\n   * A method called upon item removal, passed as the\n   * {@link OptionsBase.dispose} and/or\n   * {@link OptionsBase.disposeAfter} options.\n   */\n  export type Disposer = (\n    value: V,\n    key: K,\n    reason: DisposeReason,\n  ) => void\n\n  /**\n   * The reason why an item was added to the cache, passed\n   * to the {@link Inserter} methods.\n   *\n   * - `add`: the item was not found in the cache, and was added\n   * - `update`: the item was in the cache, with the same value provided\n   * - `replace`: the item was in the cache, and replaced\n   */\n  export type InsertReason = 'add' | 'update' | 'replace'\n\n  /**\n   * A method called upon item insertion, passed as the\n   * {@link OptionsBase.insert}\n   */\n  export type Inserter = (\n    value: V,\n    key: K,\n    reason: InsertReason,\n  ) => void\n\n  /**\n   * A function that returns the effective calculated size\n   * of an entry in the cache.\n   */\n  export type SizeCalculator = (value: V, key: K) => Size\n\n  /**\n   * Options provided to the\n   * {@link OptionsBase.fetchMethod} function.\n   */\n  export interface FetcherOptions {\n    signal: AbortSignal\n    options: FetcherFetchOptions\n    /**\n     * Object provided in the {@link FetchOptions.context} option to\n     * {@link LRUCache#fetch}\n     */\n    context: FC\n  }\n\n  /**\n   * Occasionally, it may be useful to track the internal behavior of the\n   * cache, particularly for logging, debugging, or for behavior within the\n   * `fetchMethod`. To do this, you can pass a `status` object to the\n   * {@link LRUCache#fetch}, {@link LRUCache#get}, {@link LRUCache#set},\n   * {@link LRUCache#memo}, and {@link LRUCache#has} methods.\n   *\n   * The `status` option should be a plain JavaScript object. The following\n   * fields will be set on it appropriately, depending on the situation.\n   */\n  export interface Status {\n    /**\n     * The status of a set() operation.\n     *\n     * - add: the item was not found in the cache, and was added\n     * - update: the item was in the cache, with the same value provided\n     * - replace: the item was in the cache, and replaced\n     * - miss: the item was not added to the cache for some reason\n     */\n    set?: 'add' | 'update' | 'replace' | 'miss'\n\n    /**\n     * the ttl stored for the item, or undefined if ttls are not used.\n     */\n    ttl?: Milliseconds\n\n    /**\n     * the start time for the item, or undefined if ttls are not used.\n     */\n    start?: Milliseconds\n\n    /**\n     * The timestamp used for TTL calculation\n     */\n    now?: Milliseconds\n\n    /**\n     * the remaining ttl for the item, or undefined if ttls are not used.\n     */\n    remainingTTL?: Milliseconds\n\n    /**\n     * The calculated size for the item, if sizes are used.\n     */\n    entrySize?: Size\n\n    /**\n     * The total calculated size of the cache, if sizes are used.\n     */\n    totalCalculatedSize?: Size\n\n    /**\n     * A flag indicating that the item was not stored, due to exceeding the\n     * {@link OptionsBase.maxEntrySize}\n     */\n    maxEntrySizeExceeded?: true\n\n    /**\n     * The old value, specified in the case of `set:'update'` or\n     * `set:'replace'`\n     */\n    oldValue?: V\n\n    /**\n     * The results of a {@link LRUCache#has} operation\n     *\n     * - hit: the item was found in the cache\n     * - stale: the item was found in the cache, but is stale\n     * - miss: the item was not found in the cache\n     */\n    has?: 'hit' | 'stale' | 'miss'\n\n    /**\n     * The status of a {@link LRUCache#fetch} operation.\n     * Note that this can change as the underlying fetch() moves through\n     * various states.\n     *\n     * - inflight: there is another fetch() for this key which is in process\n     * - get: there is no {@link OptionsBase.fetchMethod}, so\n     *   {@link LRUCache#get} was called.\n     * - miss: the item is not in cache, and will be fetched.\n     * - hit: the item is in the cache, and was resolved immediately.\n     * - stale: the item is in the cache, but stale.\n     * - refresh: the item is in the cache, and not stale, but\n     *   {@link FetchOptions.forceRefresh} was specified.\n     */\n    fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh'\n\n    /**\n     * The {@link OptionsBase.fetchMethod} was called\n     */\n    fetchDispatched?: true\n\n    /**\n     * The cached value was updated after a successful call to\n     * {@link OptionsBase.fetchMethod}\n     */\n    fetchUpdated?: true\n\n    /**\n     * The reason for a fetch() rejection.  Either the error raised by the\n     * {@link OptionsBase.fetchMethod}, or the reason for an\n     * AbortSignal.\n     */\n    fetchError?: Error\n\n    /**\n     * The fetch received an abort signal\n     */\n    fetchAborted?: true\n\n    /**\n     * The abort signal received was ignored, and the fetch was allowed to\n     * continue.\n     */\n    fetchAbortIgnored?: true\n\n    /**\n     * The fetchMethod promise resolved successfully\n     */\n    fetchResolved?: true\n\n    /**\n     * The fetchMethod promise was rejected\n     */\n    fetchRejected?: true\n\n    /**\n     * The status of a {@link LRUCache#get} operation.\n     *\n     * - fetching: The item is currently being fetched.  If a previous value\n     *   is present and allowed, that will be returned.\n     * - stale: The item is in the cache, and is stale.\n     * - hit: the item is in the cache\n     * - miss: the item is not in the cache\n     */\n    get?: 'stale' | 'hit' | 'miss'\n\n    /**\n     * A fetch or get operation returned a stale value.\n     */\n    returnedStale?: true\n  }\n\n  /**\n   * options which override the options set in the LRUCache constructor\n   * when calling {@link LRUCache#fetch}.\n   *\n   * This is the union of {@link GetOptions} and {@link SetOptions}, plus\n   * {@link OptionsBase.noDeleteOnFetchRejection},\n   * {@link OptionsBase.allowStaleOnFetchRejection},\n   * {@link FetchOptions.forceRefresh}, and\n   * {@link FetcherOptions.context}\n   *\n   * Any of these may be modified in the {@link OptionsBase.fetchMethod}\n   * function, but the {@link GetOptions} fields will of course have no\n   * effect, as the {@link LRUCache#get} call already happened by the time\n   * the fetchMethod is called.\n   */\n  export interface FetcherFetchOptions\n    extends Pick<\n      OptionsBase,\n      | 'allowStale'\n      | 'updateAgeOnGet'\n      | 'noDeleteOnStaleGet'\n      | 'sizeCalculation'\n      | 'ttl'\n      | 'noDisposeOnSet'\n      | 'noUpdateTTL'\n      | 'noDeleteOnFetchRejection'\n      | 'allowStaleOnFetchRejection'\n      | 'ignoreFetchAbort'\n      | 'allowStaleOnFetchAbort'\n    > {\n    status?: Status\n    size?: Size\n  }\n\n  /**\n   * Options that may be passed to the {@link LRUCache#fetch} method.\n   */\n  export interface FetchOptions\n    extends FetcherFetchOptions {\n    /**\n     * Set to true to force a re-load of the existing data, even if it\n     * is not yet stale.\n     */\n    forceRefresh?: boolean\n    /**\n     * Context provided to the {@link OptionsBase.fetchMethod} as\n     * the {@link FetcherOptions.context} param.\n     *\n     * If the FC type is specified as unknown (the default),\n     * undefined or void, then this is optional.  Otherwise, it will\n     * be required.\n     */\n    context?: FC\n    signal?: AbortSignal\n    status?: Status\n  }\n  /**\n   * Options provided to {@link LRUCache#fetch} when the FC type is something\n   * other than `unknown`, `undefined`, or `void`\n   */\n  export interface FetchOptionsWithContext\n    extends FetchOptions {\n    context: FC\n  }\n  /**\n   * Options provided to {@link LRUCache#fetch} when the FC type is\n   * `undefined` or `void`\n   */\n  export interface FetchOptionsNoContext\n    extends FetchOptions {\n    context?: undefined\n  }\n\n  export interface MemoOptions\n    extends Pick<\n      OptionsBase,\n      | 'allowStale'\n      | 'updateAgeOnGet'\n      | 'noDeleteOnStaleGet'\n      | 'sizeCalculation'\n      | 'ttl'\n      | 'noDisposeOnSet'\n      | 'noUpdateTTL'\n      | 'noDeleteOnFetchRejection'\n      | 'allowStaleOnFetchRejection'\n      | 'ignoreFetchAbort'\n      | 'allowStaleOnFetchAbort'\n    > {\n    /**\n     * Set to true to force a re-load of the existing data, even if it\n     * is not yet stale.\n     */\n    forceRefresh?: boolean\n    /**\n     * Context provided to the {@link OptionsBase.memoMethod} as\n     * the {@link MemoizerOptions.context} param.\n     *\n     * If the FC type is specified as unknown (the default),\n     * undefined or void, then this is optional.  Otherwise, it will\n     * be required.\n     */\n    context?: FC\n    status?: Status\n  }\n  /**\n   * Options provided to {@link LRUCache#memo} when the FC type is something\n   * other than `unknown`, `undefined`, or `void`\n   */\n  export interface MemoOptionsWithContext\n    extends MemoOptions {\n    context: FC\n  }\n  /**\n   * Options provided to {@link LRUCache#memo} when the FC type is\n   * `undefined` or `void`\n   */\n  export interface MemoOptionsNoContext\n    extends MemoOptions {\n    context?: undefined\n  }\n\n  /**\n   * Options provided to the\n   * {@link OptionsBase.memoMethod} function.\n   */\n  export interface MemoizerOptions {\n    options: MemoizerMemoOptions\n    /**\n     * Object provided in the {@link MemoOptions.context} option to\n     * {@link LRUCache#memo}\n     */\n    context: FC\n  }\n\n  /**\n   * options which override the options set in the LRUCache constructor\n   * when calling {@link LRUCache#memo}.\n   *\n   * This is the union of {@link GetOptions} and {@link SetOptions}, plus\n   * {@link MemoOptions.forceRefresh}, and\n   * {@link MemoOptions.context}\n   *\n   * Any of these may be modified in the {@link OptionsBase.memoMethod}\n   * function, but the {@link GetOptions} fields will of course have no\n   * effect, as the {@link LRUCache#get} call already happened by the time\n   * the memoMethod is called.\n   */\n  export interface MemoizerMemoOptions\n    extends Pick<\n      OptionsBase,\n      | 'allowStale'\n      | 'updateAgeOnGet'\n      | 'noDeleteOnStaleGet'\n      | 'sizeCalculation'\n      | 'ttl'\n      | 'noDisposeOnSet'\n      | 'noUpdateTTL'\n    > {\n    status?: Status\n    size?: Size\n    start?: Milliseconds\n  }\n\n  /**\n   * Options that may be passed to the {@link LRUCache#has} method.\n   */\n  export interface HasOptions\n    extends Pick, 'updateAgeOnHas'> {\n    status?: Status\n  }\n\n  /**\n   * Options that may be passed to the {@link LRUCache#get} method.\n   */\n  export interface GetOptions\n    extends Pick<\n      OptionsBase,\n      'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet'\n    > {\n    status?: Status\n  }\n\n  /**\n   * Options that may be passed to the {@link LRUCache#peek} method.\n   */\n  export interface PeekOptions\n    extends Pick, 'allowStale'> {}\n\n  /**\n   * Options that may be passed to the {@link LRUCache#set} method.\n   */\n  export interface SetOptions\n    extends Pick<\n      OptionsBase,\n      'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL'\n    > {\n    /**\n     * If size tracking is enabled, then setting an explicit size\n     * in the {@link LRUCache#set} call will prevent calling the\n     * {@link OptionsBase.sizeCalculation} function.\n     */\n    size?: Size\n    /**\n     * If TTL tracking is enabled, then setting an explicit start\n     * time in the {@link LRUCache#set} call will override the\n     * default time from `performance.now()` or `Date.now()`.\n     *\n     * Note that it must be a valid value for whichever time-tracking\n     * method is in use.\n     */\n    start?: Milliseconds\n    status?: Status\n  }\n\n  /**\n   * The type signature for the {@link OptionsBase.fetchMethod} option.\n   */\n  export type Fetcher = (\n    key: K,\n    staleValue: V | undefined,\n    options: FetcherOptions,\n  ) => Promise | V | undefined | void\n\n  /**\n   * the type signature for the {@link OptionsBase.memoMethod} option.\n   */\n  export type Memoizer = (\n    key: K,\n    staleValue: V | undefined,\n    options: MemoizerOptions,\n  ) => V\n\n  /**\n   * Options which may be passed to the {@link LRUCache} constructor.\n   *\n   * Most of these may be overridden in the various options that use\n   * them.\n   *\n   * Despite all being technically optional, the constructor requires that\n   * a cache is at minimum limited by one or more of {@link OptionsBase.max},\n   * {@link OptionsBase.ttl}, or {@link OptionsBase.maxSize}.\n   *\n   * If {@link OptionsBase.ttl} is used alone, then it is strongly advised\n   * (and in fact required by the type definitions here) that the cache\n   * also set {@link OptionsBase.ttlAutopurge}, to prevent potentially\n   * unbounded storage.\n   *\n   * All options are also available on the {@link LRUCache} instance, making\n   * it safe to pass an LRUCache instance as the options argumemnt to\n   * make another empty cache of the same type.\n   *\n   * Some options are marked as read-only, because changing them after\n   * instantiation is not safe. Changing any of the other options will of\n   * course only have an effect on subsequent method calls.\n   */\n  export interface OptionsBase {\n    /**\n     * The maximum number of items to store in the cache before evicting\n     * old entries. This is read-only on the {@link LRUCache} instance,\n     * and may not be overridden.\n     *\n     * If set, then storage space will be pre-allocated at construction\n     * time, and the cache will perform significantly faster.\n     *\n     * Note that significantly fewer items may be stored, if\n     * {@link OptionsBase.maxSize} and/or {@link OptionsBase.ttl} are also\n     * set.\n     *\n     * **It is strongly recommended to set a `max` to prevent unbounded growth\n     * of the cache.**\n     */\n    max?: Count\n\n    /**\n     * Max time in milliseconds for items to live in cache before they are\n     * considered stale.  Note that stale items are NOT preemptively removed by\n     * default, and MAY live in the cache, contributing to its LRU max, long\n     * after they have expired, unless {@link OptionsBase.ttlAutopurge} is\n     * set.\n     *\n     * If set to `0` (the default value), then that means \"do not track\n     * TTL\", not \"expire immediately\".\n     *\n     * Also, as this cache is optimized for LRU/MRU operations, some of\n     * the staleness/TTL checks will reduce performance, as they will incur\n     * overhead by deleting items.\n     *\n     * This is not primarily a TTL cache, and does not make strong TTL\n     * guarantees. There is no pre-emptive pruning of expired items, but you\n     * _may_ set a TTL on the cache, and it will treat expired items as missing\n     * when they are fetched, and delete them.\n     *\n     * Optional, but must be a non-negative integer in ms if specified.\n     *\n     * This may be overridden by passing an options object to `cache.set()`.\n     *\n     * At least one of `max`, `maxSize`, or `TTL` is required. This must be a\n     * positive integer if set.\n     *\n     * Even if ttl tracking is enabled, **it is strongly recommended to set a\n     * `max` to prevent unbounded growth of the cache.**\n     *\n     * If ttl tracking is enabled, and `max` and `maxSize` are not set,\n     * and `ttlAutopurge` is not set, then a warning will be emitted\n     * cautioning about the potential for unbounded memory consumption.\n     * (The TypeScript definitions will also discourage this.)\n     */\n    ttl?: Milliseconds\n\n    /**\n     * Minimum amount of time in ms in which to check for staleness.\n     * Defaults to 1, which means that the current time is checked\n     * at most once per millisecond.\n     *\n     * Set to 0 to check the current time every time staleness is tested.\n     * (This reduces performance, and is theoretically unnecessary.)\n     *\n     * Setting this to a higher value will improve performance somewhat\n     * while using ttl tracking, albeit at the expense of keeping stale\n     * items around a bit longer than their TTLs would indicate.\n     *\n     * @default 1\n     */\n    ttlResolution?: Milliseconds\n\n    /**\n     * Preemptively remove stale items from the cache.\n     *\n     * Note that this may *significantly* degrade performance, especially if\n     * the cache is storing a large number of items. It is almost always best\n     * to just leave the stale items in the cache, and let them fall out as new\n     * items are added.\n     *\n     * Note that this means that {@link OptionsBase.allowStale} is a bit\n     * pointless, as stale items will be deleted almost as soon as they\n     * expire.\n     *\n     * Use with caution!\n     */\n    ttlAutopurge?: boolean\n\n    /**\n     * When using time-expiring entries with `ttl`, setting this to `true` will\n     * make each item's age reset to 0 whenever it is retrieved from cache with\n     * {@link LRUCache#get}, causing it to not expire. (It can still fall out\n     * of cache based on recency of use, of course.)\n     *\n     * Has no effect if {@link OptionsBase.ttl} is not set.\n     *\n     * This may be overridden by passing an options object to `cache.get()`.\n     */\n    updateAgeOnGet?: boolean\n\n    /**\n     * When using time-expiring entries with `ttl`, setting this to `true` will\n     * make each item's age reset to 0 whenever its presence in the cache is\n     * checked with {@link LRUCache#has}, causing it to not expire. (It can\n     * still fall out of cache based on recency of use, of course.)\n     *\n     * Has no effect if {@link OptionsBase.ttl} is not set.\n     */\n    updateAgeOnHas?: boolean\n\n    /**\n     * Allow {@link LRUCache#get} and {@link LRUCache#fetch} calls to return\n     * stale data, if available.\n     *\n     * By default, if you set `ttl`, stale items will only be deleted from the\n     * cache when you `get(key)`. That is, it's not preemptively pruning items,\n     * unless {@link OptionsBase.ttlAutopurge} is set.\n     *\n     * If you set `allowStale:true`, it'll return the stale value *as well as*\n     * deleting it. If you don't set this, then it'll return `undefined` when\n     * you try to get a stale entry.\n     *\n     * Note that when a stale entry is fetched, _even if it is returned due to\n     * `allowStale` being set_, it is removed from the cache immediately. You\n     * can suppress this behavior by setting\n     * {@link OptionsBase.noDeleteOnStaleGet}, either in the constructor, or in\n     * the options provided to {@link LRUCache#get}.\n     *\n     * This may be overridden by passing an options object to `cache.get()`.\n     * The `cache.has()` method will always return `false` for stale items.\n     *\n     * Only relevant if a ttl is set.\n     */\n    allowStale?: boolean\n\n    /**\n     * Function that is called on items when they are dropped from the\n     * cache, as `dispose(value, key, reason)`.\n     *\n     * This can be handy if you want to close file descriptors or do\n     * other cleanup tasks when items are no longer stored in the cache.\n     *\n     * **NOTE**: It is called _before_ the item has been fully removed\n     * from the cache, so if you want to put it right back in, you need\n     * to wait until the next tick. If you try to add it back in during\n     * the `dispose()` function call, it will break things in subtle and\n     * weird ways.\n     *\n     * Unlike several other options, this may _not_ be overridden by\n     * passing an option to `set()`, for performance reasons.\n     *\n     * The `reason` will be one of the following strings, corresponding\n     * to the reason for the item's deletion:\n     *\n     * - `evict` Item was evicted to make space for a new addition\n     * - `set` Item was overwritten by a new value\n     * - `expire` Item expired its TTL\n     * - `fetch` Item was deleted due to a failed or aborted fetch, or a\n     *   fetchMethod returning `undefined.\n     * - `delete` Item was removed by explicit `cache.delete(key)`,\n     *   `cache.clear()`, or `cache.set(key, undefined)`.\n     */\n    dispose?: Disposer\n\n    /**\n     * Function that is called when new items are inserted into the cache,\n     * as `onInsert(value, key, reason)`.\n     *\n     * This can be useful if you need to perform actions when an item is\n     * added, such as logging or tracking insertions.\n     *\n     * Unlike some other options, this may _not_ be overridden by passing\n     * an option to `set()`, for performance and consistency reasons.\n     */\n    onInsert?: Inserter\n\n    /**\n     * The same as {@link OptionsBase.dispose}, but called *after* the entry\n     * is completely removed and the cache is once again in a clean state.\n     *\n     * It is safe to add an item right back into the cache at this point.\n     * However, note that it is *very* easy to inadvertently create infinite\n     * recursion this way.\n     */\n    disposeAfter?: Disposer\n\n    /**\n     * Set to true to suppress calling the\n     * {@link OptionsBase.dispose} function if the entry key is\n     * still accessible within the cache.\n     *\n     * This may be overridden by passing an options object to\n     * {@link LRUCache#set}.\n     *\n     * Only relevant if `dispose` or `disposeAfter` are set.\n     */\n    noDisposeOnSet?: boolean\n\n    /**\n     * Boolean flag to tell the cache to not update the TTL when setting a new\n     * value for an existing key (ie, when updating a value rather than\n     * inserting a new value).  Note that the TTL value is _always_ set (if\n     * provided) when adding a new entry into the cache.\n     *\n     * Has no effect if a {@link OptionsBase.ttl} is not set.\n     *\n     * May be passed as an option to {@link LRUCache#set}.\n     */\n    noUpdateTTL?: boolean\n\n    /**\n     * Set to a positive integer to track the sizes of items added to the\n     * cache, and automatically evict items in order to stay below this size.\n     * Note that this may result in fewer than `max` items being stored.\n     *\n     * Attempting to add an item to the cache whose calculated size is greater\n     * that this amount will be a no-op. The item will not be cached, and no\n     * other items will be evicted.\n     *\n     * Optional, must be a positive integer if provided.\n     *\n     * Sets `maxEntrySize` to the same value, unless a different value is\n     * provided for `maxEntrySize`.\n     *\n     * At least one of `max`, `maxSize`, or `TTL` is required. This must be a\n     * positive integer if set.\n     *\n     * Even if size tracking is enabled, **it is strongly recommended to set a\n     * `max` to prevent unbounded growth of the cache.**\n     *\n     * Note also that size tracking can negatively impact performance,\n     * though for most cases, only minimally.\n     */\n    maxSize?: Size\n\n    /**\n     * The maximum allowed size for any single item in the cache.\n     *\n     * If a larger item is passed to {@link LRUCache#set} or returned by a\n     * {@link OptionsBase.fetchMethod} or {@link OptionsBase.memoMethod}, then\n     * it will not be stored in the cache.\n     *\n     * Attempting to add an item whose calculated size is greater than\n     * this amount will not cache the item or evict any old items, but\n     * WILL delete an existing value if one is already present.\n     *\n     * Optional, must be a positive integer if provided. Defaults to\n     * the value of `maxSize` if provided.\n     */\n    maxEntrySize?: Size\n\n    /**\n     * A function that returns a number indicating the item's size.\n     *\n     * Requires {@link OptionsBase.maxSize} to be set.\n     *\n     * If not provided, and {@link OptionsBase.maxSize} or\n     * {@link OptionsBase.maxEntrySize} are set, then all\n     * {@link LRUCache#set} calls **must** provide an explicit\n     * {@link SetOptions.size} or sizeCalculation param.\n     */\n    sizeCalculation?: SizeCalculator\n\n    /**\n     * Method that provides the implementation for {@link LRUCache#fetch}\n     *\n     * ```ts\n     * fetchMethod(key, staleValue, { signal, options, context })\n     * ```\n     *\n     * If `fetchMethod` is not provided, then `cache.fetch(key)` is equivalent\n     * to `Promise.resolve(cache.get(key))`.\n     *\n     * If at any time, `signal.aborted` is set to `true`, or if the\n     * `signal.onabort` method is called, or if it emits an `'abort'` event\n     * which you can listen to with `addEventListener`, then that means that\n     * the fetch should be abandoned. This may be passed along to async\n     * functions aware of AbortController/AbortSignal behavior.\n     *\n     * The `fetchMethod` should **only** return `undefined` or a Promise\n     * resolving to `undefined` if the AbortController signaled an `abort`\n     * event. In all other cases, it should return or resolve to a value\n     * suitable for adding to the cache.\n     *\n     * The `options` object is a union of the options that may be provided to\n     * `set()` and `get()`. If they are modified, then that will result in\n     * modifying the settings to `cache.set()` when the value is resolved, and\n     * in the case of\n     * {@link OptionsBase.noDeleteOnFetchRejection} and\n     * {@link OptionsBase.allowStaleOnFetchRejection}, the handling of\n     * `fetchMethod` failures.\n     *\n     * For example, a DNS cache may update the TTL based on the value returned\n     * from a remote DNS server by changing `options.ttl` in the `fetchMethod`.\n     */\n    fetchMethod?: Fetcher\n\n    /**\n     * Method that provides the implementation for {@link LRUCache#memo}\n     */\n    memoMethod?: Memoizer\n\n    /**\n     * Set to true to suppress the deletion of stale data when a\n     * {@link OptionsBase.fetchMethod} returns a rejected promise.\n     */\n    noDeleteOnFetchRejection?: boolean\n\n    /**\n     * Do not delete stale items when they are retrieved with\n     * {@link LRUCache#get}.\n     *\n     * Note that the `get` return value will still be `undefined`\n     * unless {@link OptionsBase.allowStale} is true.\n     *\n     * When using time-expiring entries with `ttl`, by default stale\n     * items will be removed from the cache when the key is accessed\n     * with `cache.get()`.\n     *\n     * Setting this option will cause stale items to remain in the cache, until\n     * they are explicitly deleted with `cache.delete(key)`, or retrieved with\n     * `noDeleteOnStaleGet` set to `false`.\n     *\n     * This may be overridden by passing an options object to `cache.get()`.\n     *\n     * Only relevant if a ttl is used.\n     */\n    noDeleteOnStaleGet?: boolean\n\n    /**\n     * Set to true to allow returning stale data when a\n     * {@link OptionsBase.fetchMethod} throws an error or returns a rejected\n     * promise.\n     *\n     * This differs from using {@link OptionsBase.allowStale} in that stale\n     * data will ONLY be returned in the case that the {@link LRUCache#fetch}\n     * fails, not any other times.\n     *\n     * If a `fetchMethod` fails, and there is no stale value available, the\n     * `fetch()` will resolve to `undefined`. Ie, all `fetchMethod` errors are\n     * suppressed.\n     *\n     * Implies `noDeleteOnFetchRejection`.\n     *\n     * This may be set in calls to `fetch()`, or defaulted on the constructor,\n     * or overridden by modifying the options object in the `fetchMethod`.\n     */\n    allowStaleOnFetchRejection?: boolean\n\n    /**\n     * Set to true to return a stale value from the cache when the\n     * `AbortSignal` passed to the {@link OptionsBase.fetchMethod} dispatches\n     * an `'abort'` event, whether user-triggered, or due to internal cache\n     * behavior.\n     *\n     * Unless {@link OptionsBase.ignoreFetchAbort} is also set, the underlying\n     * {@link OptionsBase.fetchMethod} will still be considered canceled, and\n     * any value it returns will be ignored and not cached.\n     *\n     * Caveat: since fetches are aborted when a new value is explicitly\n     * set in the cache, this can lead to fetch returning a stale value,\n     * since that was the fallback value _at the moment the `fetch()` was\n     * initiated_, even though the new updated value is now present in\n     * the cache.\n     *\n     * For example:\n     *\n     * ```ts\n     * const cache = new LRUCache({\n     *   ttl: 100,\n     *   fetchMethod: async (url, oldValue, { signal }) =>  {\n     *     const res = await fetch(url, { signal })\n     *     return await res.json()\n     *   }\n     * })\n     * cache.set('https://example.com/', { some: 'data' })\n     * // 100ms go by...\n     * const result = cache.fetch('https://example.com/')\n     * cache.set('https://example.com/', { other: 'thing' })\n     * console.log(await result) // { some: 'data' }\n     * console.log(cache.get('https://example.com/')) // { other: 'thing' }\n     * ```\n     */\n    allowStaleOnFetchAbort?: boolean\n\n    /**\n     * Set to true to ignore the `abort` event emitted by the `AbortSignal`\n     * object passed to {@link OptionsBase.fetchMethod}, and still cache the\n     * resulting resolution value, as long as it is not `undefined`.\n     *\n     * When used on its own, this means aborted {@link LRUCache#fetch} calls\n     * are not immediately resolved or rejected when they are aborted, and\n     * instead take the full time to await.\n     *\n     * When used with {@link OptionsBase.allowStaleOnFetchAbort}, aborted\n     * {@link LRUCache#fetch} calls will resolve immediately to their stale\n     * cached value or `undefined`, and will continue to process and eventually\n     * update the cache when they resolve, as long as the resulting value is\n     * not `undefined`, thus supporting a \"return stale on timeout while\n     * refreshing\" mechanism by passing `AbortSignal.timeout(n)` as the signal.\n     *\n     * For example:\n     *\n     * ```ts\n     * const c = new LRUCache({\n     *   ttl: 100,\n     *   ignoreFetchAbort: true,\n     *   allowStaleOnFetchAbort: true,\n     *   fetchMethod: async (key, oldValue, { signal }) => {\n     *     // note: do NOT pass the signal to fetch()!\n     *     // let's say this fetch can take a long time.\n     *     const res = await fetch(`https://slow-backend-server/${key}`)\n     *     return await res.json()\n     *   },\n     * })\n     *\n     * // this will return the stale value after 100ms, while still\n     * // updating in the background for next time.\n     * const val = await c.fetch('key', { signal: AbortSignal.timeout(100) })\n     * ```\n     *\n     * **Note**: regardless of this setting, an `abort` event _is still\n     * emitted on the `AbortSignal` object_, so may result in invalid results\n     * when passed to other underlying APIs that use AbortSignals.\n     *\n     * This may be overridden in the {@link OptionsBase.fetchMethod} or the\n     * call to {@link LRUCache#fetch}.\n     */\n    ignoreFetchAbort?: boolean\n\n    /**\n     * In some cases, you may want to swap out the performance/Date object\n     * used for TTL tracking. This should almost certainly NOT be done in\n     * production environments!\n     *\n     * This value defaults to `global.performance` if it has a `now()` method,\n     * or the `global.Date` object otherwise.\n     */\n    perf?: Perf\n  }\n\n  export interface OptionsMaxLimit\n    extends OptionsBase {\n    max: Count\n  }\n  export interface OptionsTTLLimit\n    extends OptionsBase {\n    ttl: Milliseconds\n    ttlAutopurge: boolean\n  }\n  export interface OptionsSizeLimit\n    extends OptionsBase {\n    maxSize: Size\n  }\n\n  /**\n   * The valid safe options for the {@link LRUCache} constructor\n   */\n  export type Options =\n    | OptionsMaxLimit\n    | OptionsSizeLimit\n    | OptionsTTLLimit\n\n  /**\n   * Entry objects used by {@link LRUCache#load} and {@link LRUCache#dump},\n   * and returned by {@link LRUCache#info}.\n   */\n  export interface Entry {\n    value: V\n    ttl?: Milliseconds\n    size?: Size\n    start?: Milliseconds\n  }\n}\n\n/**\n * Default export, the thing you're using this module to get.\n *\n * The `K` and `V` types define the key and value types, respectively. The\n * optional `FC` type defines the type of the `context` object passed to\n * `cache.fetch()` and `cache.memo()`.\n *\n * Keys and values **must not** be `null` or `undefined`.\n *\n * All properties from the options object (with the exception of `max`,\n * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are\n * added as normal public members. (The listed options are read-only getters.)\n *\n * Changing any of these will alter the defaults for subsequent method calls.\n */\nexport class LRUCache {\n  // options that cannot be changed without disaster\n  readonly #max: LRUCache.Count\n  readonly #maxSize: LRUCache.Size\n  readonly #dispose?: LRUCache.Disposer\n  readonly #onInsert?: LRUCache.Inserter\n  readonly #disposeAfter?: LRUCache.Disposer\n  readonly #fetchMethod?: LRUCache.Fetcher\n  readonly #memoMethod?: LRUCache.Memoizer\n  readonly #perf: Perf\n\n  /**\n   * {@link LRUCache.OptionsBase.perf}\n   */\n  get perf() {\n    return this.#perf\n  }\n\n  /**\n   * {@link LRUCache.OptionsBase.ttl}\n   */\n  ttl: LRUCache.Milliseconds\n\n  /**\n   * {@link LRUCache.OptionsBase.ttlResolution}\n   */\n  ttlResolution: LRUCache.Milliseconds\n  /**\n   * {@link LRUCache.OptionsBase.ttlAutopurge}\n   */\n  ttlAutopurge: boolean\n  /**\n   * {@link LRUCache.OptionsBase.updateAgeOnGet}\n   */\n  updateAgeOnGet: boolean\n  /**\n   * {@link LRUCache.OptionsBase.updateAgeOnHas}\n   */\n  updateAgeOnHas: boolean\n  /**\n   * {@link LRUCache.OptionsBase.allowStale}\n   */\n  allowStale: boolean\n\n  /**\n   * {@link LRUCache.OptionsBase.noDisposeOnSet}\n   */\n  noDisposeOnSet: boolean\n  /**\n   * {@link LRUCache.OptionsBase.noUpdateTTL}\n   */\n  noUpdateTTL: boolean\n  /**\n   * {@link LRUCache.OptionsBase.maxEntrySize}\n   */\n  maxEntrySize: LRUCache.Size\n  /**\n   * {@link LRUCache.OptionsBase.sizeCalculation}\n   */\n  sizeCalculation?: LRUCache.SizeCalculator\n  /**\n   * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}\n   */\n  noDeleteOnFetchRejection: boolean\n  /**\n   * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}\n   */\n  noDeleteOnStaleGet: boolean\n  /**\n   * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}\n   */\n  allowStaleOnFetchAbort: boolean\n  /**\n   * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}\n   */\n  allowStaleOnFetchRejection: boolean\n  /**\n   * {@link LRUCache.OptionsBase.ignoreFetchAbort}\n   */\n  ignoreFetchAbort: boolean\n\n  // computed properties\n  #size: LRUCache.Count\n  #calculatedSize: LRUCache.Size\n  #keyMap: Map\n  #keyList: (K | undefined)[]\n  #valList: (V | BackgroundFetch | undefined)[]\n  #next: NumberArray\n  #prev: NumberArray\n  #head: Index\n  #tail: Index\n  #free: StackLike\n  #disposed?: DisposeTask[]\n  #sizes?: ZeroArray\n  #starts?: ZeroArray\n  #ttls?: ZeroArray\n  #autopurgeTimers?: (undefined | ReturnType)[]\n\n  #hasDispose: boolean\n  #hasFetchMethod: boolean\n  #hasDisposeAfter: boolean\n  #hasOnInsert: boolean\n\n  /**\n   * Do not call this method unless you need to inspect the\n   * inner workings of the cache.  If anything returned by this\n   * object is modified in any way, strange breakage may occur.\n   *\n   * These fields are private for a reason!\n   *\n   * @internal\n   */\n  static unsafeExposeInternals<\n    K extends {},\n    V extends {},\n    FC extends unknown = unknown,\n  >(c: LRUCache) {\n    return {\n      // properties\n      starts: c.#starts,\n      ttls: c.#ttls,\n      autopurgeTimers: c.#autopurgeTimers,\n      sizes: c.#sizes,\n      keyMap: c.#keyMap as Map,\n      keyList: c.#keyList,\n      valList: c.#valList,\n      next: c.#next,\n      prev: c.#prev,\n      get head() {\n        return c.#head\n      },\n      get tail() {\n        return c.#tail\n      },\n      free: c.#free,\n      // methods\n      isBackgroundFetch: (p: any) => c.#isBackgroundFetch(p),\n      backgroundFetch: (\n        k: K,\n        index: number | undefined,\n        options: LRUCache.FetchOptions,\n        context: any,\n      ): BackgroundFetch =>\n        c.#backgroundFetch(\n          k,\n          index as Index | undefined,\n          options,\n          context,\n        ),\n      moveToTail: (index: number): void => c.#moveToTail(index as Index),\n      indexes: (options?: { allowStale: boolean }) => c.#indexes(options),\n      rindexes: (options?: { allowStale: boolean }) =>\n        c.#rindexes(options),\n      isStale: (index: number | undefined) => c.#isStale(index as Index),\n    }\n  }\n\n  // Protected read-only members\n\n  /**\n   * {@link LRUCache.OptionsBase.max} (read-only)\n   */\n  get max(): LRUCache.Count {\n    return this.#max\n  }\n  /**\n   * {@link LRUCache.OptionsBase.maxSize} (read-only)\n   */\n  get maxSize(): LRUCache.Count {\n    return this.#maxSize\n  }\n  /**\n   * The total computed size of items in the cache (read-only)\n   */\n  get calculatedSize(): LRUCache.Size {\n    return this.#calculatedSize\n  }\n  /**\n   * The number of items stored in the cache (read-only)\n   */\n  get size(): LRUCache.Count {\n    return this.#size\n  }\n  /**\n   * {@link LRUCache.OptionsBase.fetchMethod} (read-only)\n   */\n  get fetchMethod(): LRUCache.Fetcher | undefined {\n    return this.#fetchMethod\n  }\n  get memoMethod(): LRUCache.Memoizer | undefined {\n    return this.#memoMethod\n  }\n  /**\n   * {@link LRUCache.OptionsBase.dispose} (read-only)\n   */\n  get dispose() {\n    return this.#dispose\n  }\n  /**\n   * {@link LRUCache.OptionsBase.onInsert} (read-only)\n   */\n  get onInsert() {\n    return this.#onInsert\n  }\n  /**\n   * {@link LRUCache.OptionsBase.disposeAfter} (read-only)\n   */\n  get disposeAfter() {\n    return this.#disposeAfter\n  }\n\n  constructor(options: LRUCache.Options | LRUCache) {\n    const {\n      max = 0,\n      ttl,\n      ttlResolution = 1,\n      ttlAutopurge,\n      updateAgeOnGet,\n      updateAgeOnHas,\n      allowStale,\n      dispose,\n      onInsert,\n      disposeAfter,\n      noDisposeOnSet,\n      noUpdateTTL,\n      maxSize = 0,\n      maxEntrySize = 0,\n      sizeCalculation,\n      fetchMethod,\n      memoMethod,\n      noDeleteOnFetchRejection,\n      noDeleteOnStaleGet,\n      allowStaleOnFetchRejection,\n      allowStaleOnFetchAbort,\n      ignoreFetchAbort,\n      perf,\n    } = options\n\n    if (perf !== undefined) {\n      if (typeof perf?.now !== 'function') {\n        throw new TypeError(\n          'perf option must have a now() method if specified',\n        )\n      }\n    }\n\n    this.#perf = perf ?? defaultPerf\n\n    if (max !== 0 && !isPosInt(max)) {\n      throw new TypeError('max option must be a nonnegative integer')\n    }\n\n    const UintArray = max ? getUintArray(max) : Array\n    if (!UintArray) {\n      throw new Error('invalid max value: ' + max)\n    }\n\n    this.#max = max\n    this.#maxSize = maxSize\n    this.maxEntrySize = maxEntrySize || this.#maxSize\n    this.sizeCalculation = sizeCalculation\n    if (this.sizeCalculation) {\n      if (!this.#maxSize && !this.maxEntrySize) {\n        throw new TypeError(\n          'cannot set sizeCalculation without setting maxSize or maxEntrySize',\n        )\n      }\n      if (typeof this.sizeCalculation !== 'function') {\n        throw new TypeError('sizeCalculation set to non-function')\n      }\n    }\n\n    if (memoMethod !== undefined && typeof memoMethod !== 'function') {\n      throw new TypeError('memoMethod must be a function if defined')\n    }\n    this.#memoMethod = memoMethod\n\n    if (fetchMethod !== undefined && typeof fetchMethod !== 'function') {\n      throw new TypeError('fetchMethod must be a function if specified')\n    }\n    this.#fetchMethod = fetchMethod\n    this.#hasFetchMethod = !!fetchMethod\n\n    this.#keyMap = new Map()\n    this.#keyList = new Array(max).fill(undefined)\n    this.#valList = new Array(max).fill(undefined)\n    this.#next = new UintArray(max)\n    this.#prev = new UintArray(max)\n    this.#head = 0 as Index\n    this.#tail = 0 as Index\n    this.#free = Stack.create(max)\n    this.#size = 0\n    this.#calculatedSize = 0\n\n    if (typeof dispose === 'function') {\n      this.#dispose = dispose\n    }\n    if (typeof onInsert === 'function') {\n      this.#onInsert = onInsert\n    }\n    if (typeof disposeAfter === 'function') {\n      this.#disposeAfter = disposeAfter\n      this.#disposed = []\n    } else {\n      this.#disposeAfter = undefined\n      this.#disposed = undefined\n    }\n    this.#hasDispose = !!this.#dispose\n    this.#hasOnInsert = !!this.#onInsert\n    this.#hasDisposeAfter = !!this.#disposeAfter\n\n    this.noDisposeOnSet = !!noDisposeOnSet\n    this.noUpdateTTL = !!noUpdateTTL\n    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection\n    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection\n    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort\n    this.ignoreFetchAbort = !!ignoreFetchAbort\n\n    // NB: maxEntrySize is set to maxSize if it's set\n    if (this.maxEntrySize !== 0) {\n      if (this.#maxSize !== 0) {\n        if (!isPosInt(this.#maxSize)) {\n          throw new TypeError(\n            'maxSize must be a positive integer if specified',\n          )\n        }\n      }\n      if (!isPosInt(this.maxEntrySize)) {\n        throw new TypeError(\n          'maxEntrySize must be a positive integer if specified',\n        )\n      }\n      this.#initializeSizeTracking()\n    }\n\n    this.allowStale = !!allowStale\n    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet\n    this.updateAgeOnGet = !!updateAgeOnGet\n    this.updateAgeOnHas = !!updateAgeOnHas\n    this.ttlResolution =\n      isPosInt(ttlResolution) || ttlResolution === 0 ? ttlResolution : 1\n    this.ttlAutopurge = !!ttlAutopurge\n    this.ttl = ttl || 0\n    if (this.ttl) {\n      if (!isPosInt(this.ttl)) {\n        throw new TypeError('ttl must be a positive integer if specified')\n      }\n      this.#initializeTTLTracking()\n    }\n\n    // do not allow completely unbounded caches\n    if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {\n      throw new TypeError(\n        'At least one of max, maxSize, or ttl is required',\n      )\n    }\n    if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {\n      const code = 'LRU_CACHE_UNBOUNDED'\n      if (shouldWarn(code)) {\n        warned.add(code)\n        const msg =\n          'TTL caching without ttlAutopurge, max, or maxSize can ' +\n          'result in unbounded memory consumption.'\n        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)\n      }\n    }\n  }\n\n  /**\n   * Return the number of ms left in the item's TTL. If item is not in cache,\n   * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.\n   */\n  getRemainingTTL(key: K) {\n    return this.#keyMap.has(key) ? Infinity : 0\n  }\n\n  #initializeTTLTracking() {\n    const ttls = new ZeroArray(this.#max)\n    const starts = new ZeroArray(this.#max)\n    this.#ttls = ttls\n    this.#starts = starts\n    const purgeTimers =\n      this.ttlAutopurge ?\n        new Array>(this.#max)\n      : undefined\n    this.#autopurgeTimers = purgeTimers\n\n    this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {\n      starts[index] = ttl !== 0 ? start : 0\n      ttls[index] = ttl\n      // clear out the purge timer if we're setting TTL to 0, and\n      // previously had a ttl purge timer running, so it doesn't\n      // fire unnecessarily.\n      if (purgeTimers?.[index]) {\n        clearTimeout(purgeTimers[index])\n        purgeTimers[index] = undefined\n      }\n      if (ttl !== 0 && purgeTimers) {\n        const t = setTimeout(() => {\n          if (this.#isStale(index)) {\n            this.#delete(this.#keyList[index] as K, 'expire')\n          }\n        }, ttl + 1)\n        // unref() not supported on all platforms\n        /* c8 ignore start */\n        if (t.unref) {\n          t.unref()\n        }\n        /* c8 ignore stop */\n        purgeTimers[index] = t\n      }\n    }\n\n    this.#updateItemAge = index => {\n      starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0\n    }\n\n    this.#statusTTL = (status, index) => {\n      if (ttls[index]) {\n        const ttl = ttls[index]\n        const start = starts[index]\n        /* c8 ignore next */\n        if (!ttl || !start) return\n        status.ttl = ttl\n        status.start = start\n        status.now = cachedNow || getNow()\n        const age = status.now - start\n        status.remainingTTL = ttl - age\n      }\n    }\n\n    // debounce calls to perf.now() to 1s so we're not hitting\n    // that costly call repeatedly.\n    let cachedNow = 0\n    const getNow = () => {\n      const n = this.#perf.now()\n      if (this.ttlResolution > 0) {\n        cachedNow = n\n        const t = setTimeout(() => (cachedNow = 0), this.ttlResolution)\n        // not available on all platforms\n        /* c8 ignore start */\n        if (t.unref) {\n          t.unref()\n        }\n        /* c8 ignore stop */\n      }\n      return n\n    }\n\n    this.getRemainingTTL = key => {\n      const index = this.#keyMap.get(key)\n      if (index === undefined) {\n        return 0\n      }\n      const ttl = ttls[index]\n      const start = starts[index]\n      if (!ttl || !start) {\n        return Infinity\n      }\n      const age = (cachedNow || getNow()) - start\n      return ttl - age\n    }\n\n    this.#isStale = index => {\n      const s = starts[index]\n      const t = ttls[index]\n      return !!t && !!s && (cachedNow || getNow()) - s > t\n    }\n  }\n\n  // conditionally set private methods related to TTL\n  #updateItemAge: (index: Index) => void = () => {}\n  #statusTTL: (status: LRUCache.Status, index: Index) => void = () => {}\n  #setItemTTL: (\n    index: Index,\n    ttl: LRUCache.Milliseconds,\n    start?: LRUCache.Milliseconds,\n    // ignore because we never call this if we're not already in TTL mode\n    /* c8 ignore start */\n  ) => void = () => {}\n  /* c8 ignore stop */\n\n  #isStale: (index: Index) => boolean = () => false\n\n  #initializeSizeTracking() {\n    const sizes = new ZeroArray(this.#max)\n    this.#calculatedSize = 0\n    this.#sizes = sizes\n    this.#removeItemSize = index => {\n      this.#calculatedSize -= sizes[index] as number\n      sizes[index] = 0\n    }\n    this.#requireSize = (k, v, size, sizeCalculation) => {\n      // provisionally accept background fetches.\n      // actual value size will be checked when they return.\n      if (this.#isBackgroundFetch(v)) {\n        return 0\n      }\n      if (!isPosInt(size)) {\n        if (sizeCalculation) {\n          if (typeof sizeCalculation !== 'function') {\n            throw new TypeError('sizeCalculation must be a function')\n          }\n          size = sizeCalculation(v, k)\n          if (!isPosInt(size)) {\n            throw new TypeError(\n              'sizeCalculation return invalid (expect positive integer)',\n            )\n          }\n        } else {\n          throw new TypeError(\n            'invalid size value (must be positive integer). ' +\n              'When maxSize or maxEntrySize is used, sizeCalculation ' +\n              'or size must be set.',\n          )\n        }\n      }\n      return size\n    }\n    this.#addItemSize = (\n      index: Index,\n      size: LRUCache.Size,\n      status?: LRUCache.Status,\n    ) => {\n      sizes[index] = size\n      if (this.#maxSize) {\n        const maxSize = this.#maxSize - (sizes[index] as number)\n        while (this.#calculatedSize > maxSize) {\n          this.#evict(true)\n        }\n      }\n      this.#calculatedSize += sizes[index] as number\n      if (status) {\n        status.entrySize = size\n        status.totalCalculatedSize = this.#calculatedSize\n      }\n    }\n  }\n\n  #removeItemSize: (index: Index) => void = _i => {}\n  #addItemSize: (\n    index: Index,\n    size: LRUCache.Size,\n    status?: LRUCache.Status,\n  ) => void = (_i, _s, _st) => {}\n  #requireSize: (\n    k: K,\n    v: V | BackgroundFetch,\n    size?: LRUCache.Size,\n    sizeCalculation?: LRUCache.SizeCalculator,\n  ) => LRUCache.Size = (\n    _k: K,\n    _v: V | BackgroundFetch,\n    size?: LRUCache.Size,\n    sizeCalculation?: LRUCache.SizeCalculator,\n  ) => {\n    if (size || sizeCalculation) {\n      throw new TypeError(\n        'cannot set size without setting maxSize or maxEntrySize on cache',\n      )\n    }\n    return 0\n  };\n\n  *#indexes({ allowStale = this.allowStale } = {}) {\n    if (this.#size) {\n      for (let i = this.#tail; true; ) {\n        if (!this.#isValidIndex(i)) {\n          break\n        }\n        if (allowStale || !this.#isStale(i)) {\n          yield i\n        }\n        if (i === this.#head) {\n          break\n        } else {\n          i = this.#prev[i] as Index\n        }\n      }\n    }\n  }\n\n  *#rindexes({ allowStale = this.allowStale } = {}) {\n    if (this.#size) {\n      for (let i = this.#head; true; ) {\n        if (!this.#isValidIndex(i)) {\n          break\n        }\n        if (allowStale || !this.#isStale(i)) {\n          yield i\n        }\n        if (i === this.#tail) {\n          break\n        } else {\n          i = this.#next[i] as Index\n        }\n      }\n    }\n  }\n\n  #isValidIndex(index: Index) {\n    return (\n      index !== undefined &&\n      this.#keyMap.get(this.#keyList[index] as K) === index\n    )\n  }\n\n  /**\n   * Return a generator yielding `[key, value]` pairs,\n   * in order from most recently used to least recently used.\n   */\n  *entries() {\n    for (const i of this.#indexes()) {\n      if (\n        this.#valList[i] !== undefined &&\n        this.#keyList[i] !== undefined &&\n        !this.#isBackgroundFetch(this.#valList[i])\n      ) {\n        yield [this.#keyList[i], this.#valList[i]] as [K, V]\n      }\n    }\n  }\n\n  /**\n   * Inverse order version of {@link LRUCache.entries}\n   *\n   * Return a generator yielding `[key, value]` pairs,\n   * in order from least recently used to most recently used.\n   */\n  *rentries() {\n    for (const i of this.#rindexes()) {\n      if (\n        this.#valList[i] !== undefined &&\n        this.#keyList[i] !== undefined &&\n        !this.#isBackgroundFetch(this.#valList[i])\n      ) {\n        yield [this.#keyList[i], this.#valList[i]]\n      }\n    }\n  }\n\n  /**\n   * Return a generator yielding the keys in the cache,\n   * in order from most recently used to least recently used.\n   */\n  *keys() {\n    for (const i of this.#indexes()) {\n      const k = this.#keyList[i]\n      if (k !== undefined && !this.#isBackgroundFetch(this.#valList[i])) {\n        yield k\n      }\n    }\n  }\n\n  /**\n   * Inverse order version of {@link LRUCache.keys}\n   *\n   * Return a generator yielding the keys in the cache,\n   * in order from least recently used to most recently used.\n   */\n  *rkeys() {\n    for (const i of this.#rindexes()) {\n      const k = this.#keyList[i]\n      if (k !== undefined && !this.#isBackgroundFetch(this.#valList[i])) {\n        yield k\n      }\n    }\n  }\n\n  /**\n   * Return a generator yielding the values in the cache,\n   * in order from most recently used to least recently used.\n   */\n  *values() {\n    for (const i of this.#indexes()) {\n      const v = this.#valList[i]\n      if (v !== undefined && !this.#isBackgroundFetch(this.#valList[i])) {\n        yield this.#valList[i] as V\n      }\n    }\n  }\n\n  /**\n   * Inverse order version of {@link LRUCache.values}\n   *\n   * Return a generator yielding the values in the cache,\n   * in order from least recently used to most recently used.\n   */\n  *rvalues() {\n    for (const i of this.#rindexes()) {\n      const v = this.#valList[i]\n      if (v !== undefined && !this.#isBackgroundFetch(this.#valList[i])) {\n        yield this.#valList[i]\n      }\n    }\n  }\n\n  /**\n   * Iterating over the cache itself yields the same results as\n   * {@link LRUCache.entries}\n   */\n  [Symbol.iterator]() {\n    return this.entries()\n  }\n\n  /**\n   * A String value that is used in the creation of the default string\n   * description of an object. Called by the built-in method\n   * `Object.prototype.toString`.\n   */\n  [Symbol.toStringTag] = 'LRUCache'\n\n  /**\n   * Find a value for which the supplied fn method returns a truthy value,\n   * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.\n   */\n  find(\n    fn: (v: V, k: K, self: LRUCache) => boolean,\n    getOptions: LRUCache.GetOptions = {},\n  ) {\n    for (const i of this.#indexes()) {\n      const v = this.#valList[i]\n      const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v\n      if (value === undefined) continue\n      if (fn(value, this.#keyList[i] as K, this)) {\n        return this.get(this.#keyList[i] as K, getOptions)\n      }\n    }\n  }\n\n  /**\n   * Call the supplied function on each item in the cache, in order from most\n   * recently used to least recently used.\n   *\n   * `fn` is called as `fn(value, key, cache)`.\n   *\n   * If `thisp` is provided, function will be called in the `this`-context of\n   * the provided object, or the cache if no `thisp` object is provided.\n   *\n   * Does not update age or recenty of use, or iterate over stale values.\n   */\n  forEach(\n    fn: (v: V, k: K, self: LRUCache) => any,\n    thisp: any = this,\n  ) {\n    for (const i of this.#indexes()) {\n      const v = this.#valList[i]\n      const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v\n      if (value === undefined) continue\n      fn.call(thisp, value, this.#keyList[i] as K, this)\n    }\n  }\n\n  /**\n   * The same as {@link LRUCache.forEach} but items are iterated over in\n   * reverse order.  (ie, less recently used items are iterated over first.)\n   */\n  rforEach(\n    fn: (v: V, k: K, self: LRUCache) => any,\n    thisp: any = this,\n  ) {\n    for (const i of this.#rindexes()) {\n      const v = this.#valList[i]\n      const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v\n      if (value === undefined) continue\n      fn.call(thisp, value, this.#keyList[i] as K, this)\n    }\n  }\n\n  /**\n   * Delete any stale entries. Returns true if anything was removed,\n   * false otherwise.\n   */\n  purgeStale() {\n    let deleted = false\n    for (const i of this.#rindexes({ allowStale: true })) {\n      if (this.#isStale(i)) {\n        this.#delete(this.#keyList[i] as K, 'expire')\n        deleted = true\n      }\n    }\n    return deleted\n  }\n\n  /**\n   * Get the extended info about a given entry, to get its value, size, and\n   * TTL info simultaneously. Returns `undefined` if the key is not present.\n   *\n   * Unlike {@link LRUCache#dump}, which is designed to be portable and survive\n   * serialization, the `start` value is always the current timestamp, and the\n   * `ttl` is a calculated remaining time to live (negative if expired).\n   *\n   * Always returns stale values, if their info is found in the cache, so be\n   * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})\n   * if relevant.\n   */\n  info(key: K): LRUCache.Entry | undefined {\n    const i = this.#keyMap.get(key)\n    if (i === undefined) return undefined\n    const v = this.#valList[i]\n    /* c8 ignore start - this isn't tested for the info function,\n     * but it's the same logic as found in other places. */\n    const value: V | undefined =\n      this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v\n    if (value === undefined) return undefined\n    /* c8 ignore end */\n    const entry: LRUCache.Entry = { value }\n    if (this.#ttls && this.#starts) {\n      const ttl = this.#ttls[i]\n      const start = this.#starts[i]\n      if (ttl && start) {\n        const remain = ttl - (this.#perf.now() - start)\n        entry.ttl = remain\n        entry.start = Date.now()\n      }\n    }\n    if (this.#sizes) {\n      entry.size = this.#sizes[i]\n    }\n    return entry\n  }\n\n  /**\n   * Return an array of [key, {@link LRUCache.Entry}] tuples which can be\n   * passed to {@link LRUCache#load}.\n   *\n   * The `start` fields are calculated relative to a portable `Date.now()`\n   * timestamp, even if `performance.now()` is available.\n   *\n   * Stale entries are always included in the `dump`, even if\n   * {@link LRUCache.OptionsBase.allowStale} is false.\n   *\n   * Note: this returns an actual array, not a generator, so it can be more\n   * easily passed around.\n   */\n  dump() {\n    const arr: [K, LRUCache.Entry][] = []\n    for (const i of this.#indexes({ allowStale: true })) {\n      const key = this.#keyList[i]\n      const v = this.#valList[i]\n      const value: V | undefined =\n        this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v\n      if (value === undefined || key === undefined) continue\n      const entry: LRUCache.Entry = { value }\n      if (this.#ttls && this.#starts) {\n        entry.ttl = this.#ttls[i]\n        // always dump the start relative to a portable timestamp\n        // it's ok for this to be a bit slow, it's a rare operation.\n        const age = this.#perf.now() - (this.#starts[i] as number)\n        entry.start = Math.floor(Date.now() - age)\n      }\n      if (this.#sizes) {\n        entry.size = this.#sizes[i]\n      }\n      arr.unshift([key, entry])\n    }\n    return arr\n  }\n\n  /**\n   * Reset the cache and load in the items in entries in the order listed.\n   *\n   * The shape of the resulting cache may be different if the same options are\n   * not used in both caches.\n   *\n   * The `start` fields are assumed to be calculated relative to a portable\n   * `Date.now()` timestamp, even if `performance.now()` is available.\n   */\n  load(arr: [K, LRUCache.Entry][]) {\n    this.clear()\n    for (const [key, entry] of arr) {\n      if (entry.start) {\n        // entry.start is a portable timestamp, but we may be using\n        // node's performance.now(), so calculate the offset, so that\n        // we get the intended remaining TTL, no matter how long it's\n        // been on ice.\n        //\n        // it's ok for this to be a bit slow, it's a rare operation.\n        const age = Date.now() - entry.start\n        entry.start = this.#perf.now() - age\n      }\n      this.set(key, entry.value, entry)\n    }\n  }\n\n  /**\n   * Add a value to the cache.\n   *\n   * Note: if `undefined` is specified as a value, this is an alias for\n   * {@link LRUCache#delete}\n   *\n   * Fields on the {@link LRUCache.SetOptions} options param will override\n   * their corresponding values in the constructor options for the scope\n   * of this single `set()` operation.\n   *\n   * If `start` is provided, then that will set the effective start\n   * time for the TTL calculation. Note that this must be a previous\n   * value of `performance.now()` if supported, or a previous value of\n   * `Date.now()` if not.\n   *\n   * Options object may also include `size`, which will prevent\n   * calling the `sizeCalculation` function and just use the specified\n   * number if it is a positive integer, and `noDisposeOnSet` which\n   * will prevent calling a `dispose` function in the case of\n   * overwrites.\n   *\n   * If the `size` (or return value of `sizeCalculation`) for a given\n   * entry is greater than `maxEntrySize`, then the item will not be\n   * added to the cache.\n   *\n   * Will update the recency of the entry.\n   *\n   * If the value is `undefined`, then this is an alias for\n   * `cache.delete(key)`. `undefined` is never stored in the cache.\n   */\n  set(\n    k: K,\n    v: V | BackgroundFetch | undefined,\n    setOptions: LRUCache.SetOptions = {},\n  ) {\n    if (v === undefined) {\n      this.delete(k)\n      return this\n    }\n    const {\n      ttl = this.ttl,\n      start,\n      noDisposeOnSet = this.noDisposeOnSet,\n      sizeCalculation = this.sizeCalculation,\n      status,\n    } = setOptions\n    let { noUpdateTTL = this.noUpdateTTL } = setOptions\n\n    const size = this.#requireSize(\n      k,\n      v,\n      setOptions.size || 0,\n      sizeCalculation,\n    )\n    // if the item doesn't fit, don't do anything\n    // NB: maxEntrySize set to maxSize by default\n    if (this.maxEntrySize && size > this.maxEntrySize) {\n      if (status) {\n        status.set = 'miss'\n        status.maxEntrySizeExceeded = true\n      }\n      // have to delete, in case something is there already.\n      this.#delete(k, 'set')\n      return this\n    }\n    let index = this.#size === 0 ? undefined : this.#keyMap.get(k)\n    if (index === undefined) {\n      // addition\n      index = (\n        this.#size === 0 ? this.#tail\n        : this.#free.length !== 0 ? this.#free.pop()\n        : this.#size === this.#max ? this.#evict(false)\n        : this.#size) as Index\n      this.#keyList[index] = k\n      this.#valList[index] = v\n      this.#keyMap.set(k, index)\n      this.#next[this.#tail] = index\n      this.#prev[index] = this.#tail\n      this.#tail = index\n      this.#size++\n      this.#addItemSize(index, size, status)\n      if (status) status.set = 'add'\n      noUpdateTTL = false\n      if (this.#hasOnInsert) {\n        this.#onInsert?.(v as V, k, 'add')\n      }\n    } else {\n      // update\n      this.#moveToTail(index)\n      const oldVal = this.#valList[index] as V | BackgroundFetch\n      if (v !== oldVal) {\n        if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {\n          oldVal.__abortController.abort(new Error('replaced'))\n          const { __staleWhileFetching: s } = oldVal\n          if (s !== undefined && !noDisposeOnSet) {\n            if (this.#hasDispose) {\n              this.#dispose?.(s as V, k, 'set')\n            }\n            if (this.#hasDisposeAfter) {\n              this.#disposed?.push([s as V, k, 'set'])\n            }\n          }\n        } else if (!noDisposeOnSet) {\n          if (this.#hasDispose) {\n            this.#dispose?.(oldVal as V, k, 'set')\n          }\n          if (this.#hasDisposeAfter) {\n            this.#disposed?.push([oldVal as V, k, 'set'])\n          }\n        }\n        this.#removeItemSize(index)\n        this.#addItemSize(index, size, status)\n        this.#valList[index] = v\n        if (status) {\n          status.set = 'replace'\n          const oldValue =\n            oldVal && this.#isBackgroundFetch(oldVal) ?\n              oldVal.__staleWhileFetching\n            : oldVal\n          if (oldValue !== undefined) status.oldValue = oldValue\n        }\n      } else if (status) {\n        status.set = 'update'\n      }\n\n      if (this.#hasOnInsert) {\n        this.onInsert?.(v as V, k, v === oldVal ? 'update' : 'replace')\n      }\n    }\n    if (ttl !== 0 && !this.#ttls) {\n      this.#initializeTTLTracking()\n    }\n    if (this.#ttls) {\n      if (!noUpdateTTL) {\n        this.#setItemTTL(index, ttl, start)\n      }\n      if (status) this.#statusTTL(status, index)\n    }\n    if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {\n      const dt = this.#disposed\n      let task: DisposeTask | undefined\n      while ((task = dt?.shift())) {\n        this.#disposeAfter?.(...task)\n      }\n    }\n    return this\n  }\n\n  /**\n   * Evict the least recently used item, returning its value or\n   * `undefined` if cache is empty.\n   */\n  pop(): V | undefined {\n    try {\n      while (this.#size) {\n        const val = this.#valList[this.#head]\n        this.#evict(true)\n        if (this.#isBackgroundFetch(val)) {\n          if (val.__staleWhileFetching) {\n            return val.__staleWhileFetching\n          }\n        } else if (val !== undefined) {\n          return val\n        }\n      }\n    } finally {\n      if (this.#hasDisposeAfter && this.#disposed) {\n        const dt = this.#disposed\n        let task: DisposeTask | undefined\n        while ((task = dt?.shift())) {\n          this.#disposeAfter?.(...task)\n        }\n      }\n    }\n  }\n\n  #evict(free: boolean) {\n    const head = this.#head\n    const k = this.#keyList[head] as K\n    const v = this.#valList[head] as V\n    if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {\n      v.__abortController.abort(new Error('evicted'))\n    } else if (this.#hasDispose || this.#hasDisposeAfter) {\n      if (this.#hasDispose) {\n        this.#dispose?.(v, k, 'evict')\n      }\n      if (this.#hasDisposeAfter) {\n        this.#disposed?.push([v, k, 'evict'])\n      }\n    }\n    this.#removeItemSize(head)\n    if (this.#autopurgeTimers?.[head]) {\n      clearTimeout(this.#autopurgeTimers[head])\n      this.#autopurgeTimers[head] = undefined\n    }\n    // if we aren't about to use the index, then null these out\n    if (free) {\n      this.#keyList[head] = undefined\n      this.#valList[head] = undefined\n      this.#free.push(head)\n    }\n    if (this.#size === 1) {\n      this.#head = this.#tail = 0 as Index\n      this.#free.length = 0\n    } else {\n      this.#head = this.#next[head] as Index\n    }\n    this.#keyMap.delete(k)\n    this.#size--\n    return head\n  }\n\n  /**\n   * Check if a key is in the cache, without updating the recency of use.\n   * Will return false if the item is stale, even though it is technically\n   * in the cache.\n   *\n   * Check if a key is in the cache, without updating the recency of\n   * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set\n   * to `true` in either the options or the constructor.\n   *\n   * Will return `false` if the item is stale, even though it is technically in\n   * the cache. The difference can be determined (if it matters) by using a\n   * `status` argument, and inspecting the `has` field.\n   *\n   * Will not update item age unless\n   * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.\n   */\n  has(k: K, hasOptions: LRUCache.HasOptions = {}) {\n    const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions\n    const index = this.#keyMap.get(k)\n    if (index !== undefined) {\n      const v = this.#valList[index]\n      if (\n        this.#isBackgroundFetch(v) &&\n        v.__staleWhileFetching === undefined\n      ) {\n        return false\n      }\n      if (!this.#isStale(index)) {\n        if (updateAgeOnHas) {\n          this.#updateItemAge(index)\n        }\n        if (status) {\n          status.has = 'hit'\n          this.#statusTTL(status, index)\n        }\n        return true\n      } else if (status) {\n        status.has = 'stale'\n        this.#statusTTL(status, index)\n      }\n    } else if (status) {\n      status.has = 'miss'\n    }\n    return false\n  }\n\n  /**\n   * Like {@link LRUCache#get} but doesn't update recency or delete stale\n   * items.\n   *\n   * Returns `undefined` if the item is stale, unless\n   * {@link LRUCache.OptionsBase.allowStale} is set.\n   */\n  peek(k: K, peekOptions: LRUCache.PeekOptions = {}) {\n    const { allowStale = this.allowStale } = peekOptions\n    const index = this.#keyMap.get(k)\n    if (index === undefined || (!allowStale && this.#isStale(index))) {\n      return\n    }\n    const v = this.#valList[index]\n    // either stale and allowed, or forcing a refresh of non-stale value\n    return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v\n  }\n\n  #backgroundFetch(\n    k: K,\n    index: Index | undefined,\n    options: LRUCache.FetchOptions,\n    context: any,\n  ): BackgroundFetch {\n    const v = index === undefined ? undefined : this.#valList[index]\n    if (this.#isBackgroundFetch(v)) {\n      return v\n    }\n\n    const ac = new AC()\n    const { signal } = options\n    // when/if our AC signals, then stop listening to theirs.\n    signal?.addEventListener('abort', () => ac.abort(signal.reason), {\n      signal: ac.signal,\n    })\n\n    const fetchOpts = {\n      signal: ac.signal,\n      options,\n      context,\n    }\n\n    const cb = (v: V | undefined, updateCache = false): V | undefined => {\n      const { aborted } = ac.signal\n      const ignoreAbort = options.ignoreFetchAbort && v !== undefined\n      const proceed = options.ignoreFetchAbort ||\n        !!(options.allowStaleOnFetchAbort && v !== undefined)\n      if (options.status) {\n        if (aborted && !updateCache) {\n          options.status.fetchAborted = true\n          options.status.fetchError = ac.signal.reason\n          if (ignoreAbort) options.status.fetchAbortIgnored = true\n        } else {\n          options.status.fetchResolved = true\n        }\n      }\n      if (aborted && !ignoreAbort && !updateCache) {\n        return fetchFail(ac.signal.reason, proceed)\n      }\n      // either we didn't abort, and are still here, or we did, and ignored\n      const bf = p as BackgroundFetch\n      // if nothing else has been written there but we're set to update the\n      // cache and ignore the abort, or if it's still pending on this specific\n      // background request, then write it to the cache.\n      const vl = this.#valList[index as Index]\n      if (vl === p || (ignoreAbort && updateCache && vl === undefined)) {\n        if (v === undefined) {\n          if (bf.__staleWhileFetching !== undefined) {\n            this.#valList[index as Index] = bf.__staleWhileFetching\n          } else {\n            this.#delete(k, 'fetch')\n          }\n        } else {\n          if (options.status) options.status.fetchUpdated = true\n          this.set(k, v, fetchOpts.options)\n        }\n      }\n      return v\n    }\n\n    const eb = (er: any) => {\n      if (options.status) {\n        options.status.fetchRejected = true\n        options.status.fetchError = er\n      }\n      // do not pass go, do not collect $200\n      return fetchFail(er, false)\n    }\n\n    const fetchFail = (er: any, proceed: boolean): V | undefined => {\n      const { aborted } = ac.signal\n      const allowStaleAborted = aborted && options.allowStaleOnFetchAbort\n      const allowStale =\n        allowStaleAborted || options.allowStaleOnFetchRejection\n      const noDelete = allowStale || options.noDeleteOnFetchRejection\n      const bf = p as BackgroundFetch\n      if (this.#valList[index as Index] === p) {\n        // if we allow stale on fetch rejections, then we need to ensure that\n        // the stale value is not removed from the cache when the fetch fails.\n        const del = !noDelete ||\n          !proceed && bf.__staleWhileFetching === undefined\n        if (del) {\n          this.#delete(k, 'fetch')\n        } else if (!allowStaleAborted) {\n          // still replace the *promise* with the stale value,\n          // since we are done with the promise at this point.\n          // leave it untouched if we're still waiting for an\n          // aborted background fetch that hasn't yet returned.\n          this.#valList[index as Index] = bf.__staleWhileFetching\n        }\n      }\n      if (allowStale) {\n        if (options.status && bf.__staleWhileFetching !== undefined) {\n          options.status.returnedStale = true\n        }\n        return bf.__staleWhileFetching\n      } else if (bf.__returned === bf) {\n        throw er\n      }\n    }\n\n    const pcall = (\n      res: (v: V | undefined) => void,\n      rej: (e: any) => void,\n    ) => {\n      const fmp = this.#fetchMethod?.(k, v, fetchOpts)\n      if (fmp && fmp instanceof Promise) {\n        fmp.then(v => res(v === undefined ? undefined : v), rej)\n      }\n      // ignored, we go until we finish, regardless.\n      // defer check until we are actually aborting,\n      // so fetchMethod can override.\n      ac.signal.addEventListener('abort', () => {\n        if (!options.ignoreFetchAbort || options.allowStaleOnFetchAbort) {\n          res(undefined)\n          // when it eventually resolves, update the cache.\n          if (options.allowStaleOnFetchAbort) {\n            res = v => cb(v, true)\n          }\n        }\n      })\n    }\n\n    if (options.status) options.status.fetchDispatched = true\n    const p = new Promise(pcall).then(cb, eb)\n    const bf: BackgroundFetch = Object.assign(p, {\n      __abortController: ac,\n      __staleWhileFetching: v,\n      __returned: undefined,\n    })\n\n    if (index === undefined) {\n      // internal, don't expose status.\n      this.set(k, bf, { ...fetchOpts.options, status: undefined })\n      index = this.#keyMap.get(k)\n    } else {\n      this.#valList[index] = bf\n    }\n    return bf\n  }\n\n  #isBackgroundFetch(p: any): p is BackgroundFetch {\n    if (!this.#hasFetchMethod) return false\n    const b = p as BackgroundFetch\n    return (\n      !!b &&\n      b instanceof Promise &&\n      b.hasOwnProperty('__staleWhileFetching') &&\n      b.__abortController instanceof AC\n    )\n  }\n\n  /**\n   * Make an asynchronous cached fetch using the\n   * {@link LRUCache.OptionsBase.fetchMethod} function.\n   *\n   * If the value is in the cache and not stale, then the returned\n   * Promise resolves to the value.\n   *\n   * If not in the cache, or beyond its TTL staleness, then\n   * `fetchMethod(key, staleValue, { options, signal, context })` is\n   * called, and the value returned will be added to the cache once\n   * resolved.\n   *\n   * If called with `allowStale`, and an asynchronous fetch is\n   * currently in progress to reload a stale value, then the former\n   * stale value will be returned.\n   *\n   * If called with `forceRefresh`, then the cached item will be\n   * re-fetched, even if it is not stale. However, if `allowStale` is also\n   * set, then the old value will still be returned. This is useful\n   * in cases where you want to force a reload of a cached value. If\n   * a background fetch is already in progress, then `forceRefresh`\n   * has no effect.\n   *\n   * If multiple fetches for the same key are issued, then they will all be\n   * coalesced into a single call to fetchMethod.\n   *\n   * Note that this means that handling options such as\n   * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort},\n   * {@link LRUCache.FetchOptions.signal},\n   * and {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} will be\n   * determined by the FIRST fetch() call for a given key.\n   *\n   * This is a known (fixable) shortcoming which will be addresed on when\n   * someone complains about it, as the fix would involve added complexity and\n   * may not be worth the costs for this edge case.\n   *\n   * If {@link LRUCache.OptionsBase.fetchMethod} is not specified, then this is\n   * effectively an alias for `Promise.resolve(cache.get(key))`.\n   *\n   * When the fetch method resolves to a value, if the fetch has not\n   * been aborted due to deletion, eviction, or being overwritten,\n   * then it is added to the cache using the options provided.\n   *\n   * If the key is evicted or deleted before the `fetchMethod`\n   * resolves, then the AbortSignal passed to the `fetchMethod` will\n   * receive an `abort` event, and the promise returned by `fetch()`\n   * will reject with the reason for the abort.\n   *\n   * If a `signal` is passed to the `fetch()` call, then aborting the\n   * signal will abort the fetch and cause the `fetch()` promise to\n   * reject with the reason provided.\n   *\n   * **Setting `context`**\n   *\n   * If an `FC` type is set to a type other than `unknown`, `void`, or\n   * `undefined` in the {@link LRUCache} constructor, then all\n   * calls to `cache.fetch()` _must_ provide a `context` option. If\n   * set to `undefined` or `void`, then calls to fetch _must not_\n   * provide a `context` option.\n   *\n   * The `context` param allows you to provide arbitrary data that\n   * might be relevant in the course of fetching the data. It is only\n   * relevant for the course of a single `fetch()` operation, and\n   * discarded afterwards.\n   *\n   * **Note: `fetch()` calls are inflight-unique**\n   *\n   * If you call `fetch()` multiple times with the same key value,\n   * then every call after the first will resolve on the same\n   * promise1,\n   * _even if they have different settings that would otherwise change\n   * the behavior of the fetch_, such as `noDeleteOnFetchRejection`\n   * or `ignoreFetchAbort`.\n   *\n   * In most cases, this is not a problem (in fact, only fetching\n   * something once is what you probably want, if you're caching in\n   * the first place). If you are changing the fetch() options\n   * dramatically between runs, there's a good chance that you might\n   * be trying to fit divergent semantics into a single object, and\n   * would be better off with multiple cache instances.\n   *\n   * **1**: Ie, they're not the \"same Promise\", but they resolve at\n   * the same time, because they're both waiting on the same\n   * underlying fetchMethod response.\n   */\n\n  fetch(\n    k: K,\n    fetchOptions: unknown extends FC ? LRUCache.FetchOptions\n    : FC extends undefined | void ? LRUCache.FetchOptionsNoContext\n    : LRUCache.FetchOptionsWithContext,\n  ): Promise\n\n  // this overload not allowed if context is required\n  fetch(\n    k: unknown extends FC ? K\n    : FC extends undefined | void ? K\n    : never,\n    fetchOptions?: unknown extends FC ? LRUCache.FetchOptions\n    : FC extends undefined | void ? LRUCache.FetchOptionsNoContext\n    : never,\n  ): Promise\n\n  async fetch(\n    k: K,\n    fetchOptions: LRUCache.FetchOptions = {},\n  ): Promise {\n    const {\n      // get options\n      allowStale = this.allowStale,\n      updateAgeOnGet = this.updateAgeOnGet,\n      noDeleteOnStaleGet = this.noDeleteOnStaleGet,\n      // set options\n      ttl = this.ttl,\n      noDisposeOnSet = this.noDisposeOnSet,\n      size = 0,\n      sizeCalculation = this.sizeCalculation,\n      noUpdateTTL = this.noUpdateTTL,\n      // fetch exclusive options\n      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,\n      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,\n      ignoreFetchAbort = this.ignoreFetchAbort,\n      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,\n      context,\n      forceRefresh = false,\n      status,\n      signal,\n    } = fetchOptions\n\n    if (!this.#hasFetchMethod) {\n      if (status) status.fetch = 'get'\n      return this.get(k, {\n        allowStale,\n        updateAgeOnGet,\n        noDeleteOnStaleGet,\n        status,\n      })\n    }\n\n    const options = {\n      allowStale,\n      updateAgeOnGet,\n      noDeleteOnStaleGet,\n      ttl,\n      noDisposeOnSet,\n      size,\n      sizeCalculation,\n      noUpdateTTL,\n      noDeleteOnFetchRejection,\n      allowStaleOnFetchRejection,\n      allowStaleOnFetchAbort,\n      ignoreFetchAbort,\n      status,\n      signal,\n    }\n\n    let index = this.#keyMap.get(k)\n    if (index === undefined) {\n      if (status) status.fetch = 'miss'\n      const p = this.#backgroundFetch(k, index, options, context)\n      return (p.__returned = p)\n    } else {\n      // in cache, maybe already fetching\n      const v = this.#valList[index]\n      if (this.#isBackgroundFetch(v)) {\n        const stale = allowStale && v.__staleWhileFetching !== undefined\n        if (status) {\n          status.fetch = 'inflight'\n          if (stale) status.returnedStale = true\n        }\n        return stale ? v.__staleWhileFetching : (v.__returned = v)\n      }\n\n      // if we force a refresh, that means do NOT serve the cached value,\n      // unless we are already in the process of refreshing the cache.\n      const isStale = this.#isStale(index)\n      if (!forceRefresh && !isStale) {\n        if (status) status.fetch = 'hit'\n        this.#moveToTail(index)\n        if (updateAgeOnGet) {\n          this.#updateItemAge(index)\n        }\n        if (status) this.#statusTTL(status, index)\n        return v\n      }\n\n      // ok, it is stale or a forced refresh, and not already fetching.\n      // refresh the cache.\n      const p = this.#backgroundFetch(k, index, options, context)\n      const hasStale = p.__staleWhileFetching !== undefined\n      const staleVal = hasStale && allowStale\n      if (status) {\n        status.fetch = isStale ? 'stale' : 'refresh'\n        if (staleVal && isStale) status.returnedStale = true\n      }\n      return staleVal ? p.__staleWhileFetching : (p.__returned = p)\n    }\n  }\n\n  /**\n   * In some cases, `cache.fetch()` may resolve to `undefined`, either because\n   * a {@link LRUCache.OptionsBase#fetchMethod} was not provided (turning\n   * `cache.fetch(k)` into just an async wrapper around `cache.get(k)`) or\n   * because `ignoreFetchAbort` was specified (either to the constructor or\n   * in the {@link LRUCache.FetchOptions}). Also, the\n   * {@link LRUCache.OptionsBase.fetchMethod} may return `undefined` or `void`, making\n   * the test even more complicated.\n   *\n   * Because inferring the cases where `undefined` might be returned are so\n   * cumbersome, but testing for `undefined` can also be annoying, this method\n   * can be used, which will reject if `this.fetch()` resolves to undefined.\n   */\n  forceFetch(\n    k: K,\n    fetchOptions: unknown extends FC ? LRUCache.FetchOptions\n    : FC extends undefined | void ? LRUCache.FetchOptionsNoContext\n    : LRUCache.FetchOptionsWithContext,\n  ): Promise\n  // this overload not allowed if context is required\n  forceFetch(\n    k: unknown extends FC ? K\n    : FC extends undefined | void ? K\n    : never,\n    fetchOptions?: unknown extends FC ? LRUCache.FetchOptions\n    : FC extends undefined | void ? LRUCache.FetchOptionsNoContext\n    : never,\n  ): Promise\n  async forceFetch(\n    k: K,\n    fetchOptions: LRUCache.FetchOptions = {},\n  ): Promise {\n    const v = await this.fetch(\n      k,\n      fetchOptions as unknown extends FC ? LRUCache.FetchOptions\n      : FC extends undefined | void ? LRUCache.FetchOptionsNoContext\n      : LRUCache.FetchOptionsWithContext,\n    )\n    if (v === undefined) throw new Error('fetch() returned undefined')\n    return v\n  }\n\n  /**\n   * If the key is found in the cache, then this is equivalent to\n   * {@link LRUCache#get}. If not, in the cache, then calculate the value using\n   * the {@link LRUCache.OptionsBase.memoMethod}, and add it to the cache.\n   *\n   * If an `FC` type is set to a type other than `unknown`, `void`, or\n   * `undefined` in the LRUCache constructor, then all calls to `cache.memo()`\n   * _must_ provide a `context` option. If set to `undefined` or `void`, then\n   * calls to memo _must not_ provide a `context` option.\n   *\n   * The `context` param allows you to provide arbitrary data that might be\n   * relevant in the course of fetching the data. It is only relevant for the\n   * course of a single `memo()` operation, and discarded afterwards.\n   */\n  memo(\n    k: K,\n    memoOptions: unknown extends FC ? LRUCache.MemoOptions\n    : FC extends undefined | void ? LRUCache.MemoOptionsNoContext\n    : LRUCache.MemoOptionsWithContext,\n  ): V\n  // this overload not allowed if context is required\n  memo(\n    k: unknown extends FC ? K\n    : FC extends undefined | void ? K\n    : never,\n    memoOptions?: unknown extends FC ? LRUCache.MemoOptions\n    : FC extends undefined | void ? LRUCache.MemoOptionsNoContext\n    : never,\n  ): V\n  memo(k: K, memoOptions: LRUCache.MemoOptions = {}) {\n    const memoMethod = this.#memoMethod\n    if (!memoMethod) {\n      throw new Error('no memoMethod provided to constructor')\n    }\n    const { context, forceRefresh, ...options } = memoOptions\n    const v = this.get(k, options)\n    if (!forceRefresh && v !== undefined) return v\n    const vv = memoMethod(k, v, {\n      options,\n      context,\n    } as LRUCache.MemoizerOptions)\n    this.set(k, vv, options)\n    return vv\n  }\n\n  /**\n   * Return a value from the cache. Will update the recency of the cache\n   * entry found.\n   *\n   * If the key is not found, get() will return `undefined`.\n   */\n  get(k: K, getOptions: LRUCache.GetOptions = {}) {\n    const {\n      allowStale = this.allowStale,\n      updateAgeOnGet = this.updateAgeOnGet,\n      noDeleteOnStaleGet = this.noDeleteOnStaleGet,\n      status,\n    } = getOptions\n    const index = this.#keyMap.get(k)\n    if (index !== undefined) {\n      const value = this.#valList[index]\n      const fetching = this.#isBackgroundFetch(value)\n      if (status) this.#statusTTL(status, index)\n      if (this.#isStale(index)) {\n        if (status) status.get = 'stale'\n        // delete only if not an in-flight background fetch\n        if (!fetching) {\n          if (!noDeleteOnStaleGet) {\n            this.#delete(k, 'expire')\n          }\n          if (status && allowStale) status.returnedStale = true\n          return allowStale ? value : undefined\n        } else {\n          if (\n            status &&\n            allowStale &&\n            value.__staleWhileFetching !== undefined\n          ) {\n            status.returnedStale = true\n          }\n          return allowStale ? value.__staleWhileFetching : undefined\n        }\n      } else {\n        if (status) status.get = 'hit'\n        // if we're currently fetching it, we don't actually have it yet\n        // it's not stale, which means this isn't a staleWhileRefetching.\n        // If it's not stale, and fetching, AND has a __staleWhileFetching\n        // value, then that means the user fetched with {forceRefresh:true},\n        // so it's safe to return that value.\n        if (fetching) {\n          return value.__staleWhileFetching\n        }\n        this.#moveToTail(index)\n        if (updateAgeOnGet) {\n          this.#updateItemAge(index)\n        }\n        return value\n      }\n    } else if (status) {\n      status.get = 'miss'\n    }\n  }\n\n  #connect(p: Index, n: Index) {\n    this.#prev[n] = p\n    this.#next[p] = n\n  }\n\n  #moveToTail(index: Index): void {\n    // if tail already, nothing to do\n    // if head, move head to next[index]\n    // else\n    //   move next[prev[index]] to next[index] (head has no prev)\n    //   move prev[next[index]] to prev[index]\n    // prev[index] = tail\n    // next[tail] = index\n    // tail = index\n    if (index !== this.#tail) {\n      if (index === this.#head) {\n        this.#head = this.#next[index] as Index\n      } else {\n        this.#connect(\n          this.#prev[index] as Index,\n          this.#next[index] as Index,\n        )\n      }\n      this.#connect(this.#tail, index)\n      this.#tail = index\n    }\n  }\n\n  /**\n   * Deletes a key out of the cache.\n   *\n   * Returns true if the key was deleted, false otherwise.\n   */\n  delete(k: K) {\n    return this.#delete(k, 'delete')\n  }\n\n  #delete(k: K, reason: LRUCache.DisposeReason) {\n    let deleted = false\n    if (this.#size !== 0) {\n      const index = this.#keyMap.get(k)\n      if (index !== undefined) {\n        if (this.#autopurgeTimers?.[index]) {\n          clearTimeout(this.#autopurgeTimers?.[index])\n          this.#autopurgeTimers[index] = undefined\n        }\n        deleted = true\n        if (this.#size === 1) {\n          this.#clear(reason)\n        } else {\n          this.#removeItemSize(index)\n          const v = this.#valList[index]\n          if (this.#isBackgroundFetch(v)) {\n            v.__abortController.abort(new Error('deleted'))\n          } else if (this.#hasDispose || this.#hasDisposeAfter) {\n            if (this.#hasDispose) {\n              this.#dispose?.(v as V, k, reason)\n            }\n            if (this.#hasDisposeAfter) {\n              this.#disposed?.push([v as V, k, reason])\n            }\n          }\n          this.#keyMap.delete(k)\n          this.#keyList[index] = undefined\n          this.#valList[index] = undefined\n          if (index === this.#tail) {\n            this.#tail = this.#prev[index] as Index\n          } else if (index === this.#head) {\n            this.#head = this.#next[index] as Index\n          } else {\n            const pi = this.#prev[index] as number\n            this.#next[pi] = this.#next[index] as number\n            const ni = this.#next[index] as number\n            this.#prev[ni] = this.#prev[index] as number\n          }\n          this.#size--\n          this.#free.push(index)\n        }\n      }\n    }\n    if (this.#hasDisposeAfter && this.#disposed?.length) {\n      const dt = this.#disposed\n      let task: DisposeTask | undefined\n      while ((task = dt?.shift())) {\n        this.#disposeAfter?.(...task)\n      }\n    }\n    return deleted\n  }\n\n  /**\n   * Clear the cache entirely, throwing away all values.\n   */\n  clear() {\n    return this.#clear('delete')\n  }\n  #clear(reason: LRUCache.DisposeReason) {\n    for (const index of this.#rindexes({ allowStale: true })) {\n      const v = this.#valList[index]\n      if (this.#isBackgroundFetch(v)) {\n        v.__abortController.abort(new Error('deleted'))\n      } else {\n        const k = this.#keyList[index]\n        if (this.#hasDispose) {\n          this.#dispose?.(v as V, k as K, reason)\n        }\n        if (this.#hasDisposeAfter) {\n          this.#disposed?.push([v as V, k as K, reason])\n        }\n      }\n    }\n\n    this.#keyMap.clear()\n    this.#valList.fill(undefined)\n    this.#keyList.fill(undefined)\n    if (this.#ttls && this.#starts) {\n      this.#ttls.fill(0)\n      this.#starts.fill(0)\n      for (const t of this.#autopurgeTimers ?? []) {\n        if (t !== undefined) clearTimeout(t)\n      }\n      this.#autopurgeTimers?.fill(undefined)\n    }\n    if (this.#sizes) {\n      this.#sizes.fill(0)\n    }\n    this.#head = 0 as Index\n    this.#tail = 0 as Index\n    this.#free.length = 0\n    this.#calculatedSize = 0\n    this.#size = 0\n    if (this.#hasDisposeAfter && this.#disposed) {\n      const dt = this.#disposed\n      let task: DisposeTask | undefined\n      while ((task = dt?.shift())) {\n        this.#disposeAfter?.(...task)\n      }\n    }\n  }\n}\n", "import { LRUCache } from 'lru-cache'\nimport { posix, win32 } from 'node:path'\n\nimport { fileURLToPath } from 'node:url'\n\nimport {\n  lstatSync,\n  readdir as readdirCB,\n  readdirSync,\n  readlinkSync,\n  realpathSync as rps,\n} from 'fs'\nimport * as actualFS from 'node:fs'\n\nconst realpathSync = rps.native\n// TODO: test perf of fs/promises realpath vs realpathCB,\n// since the promises one uses realpath.native\n\nimport { lstat, readdir, readlink, realpath } from 'node:fs/promises'\n\nimport { Minipass } from 'minipass'\nimport type { Dirent, Stats } from 'node:fs'\n\n/**\n * An object that will be used to override the default `fs`\n * methods.  Any methods that are not overridden will use Node's\n * built-in implementations.\n *\n * - lstatSync\n * - readdir (callback `withFileTypes` Dirent variant, used for\n *   readdirCB and most walks)\n * - readdirSync\n * - readlinkSync\n * - realpathSync\n * - promises: Object containing the following async methods:\n *   - lstat\n *   - readdir (Dirent variant only)\n *   - readlink\n *   - realpath\n */\nexport interface FSOption {\n  lstatSync?: (path: string) => Stats\n  readdir?: (\n    path: string,\n    options: { withFileTypes: true },\n    cb: (er: NodeJS.ErrnoException | null, entries?: Dirent[]) => any,\n  ) => void\n  readdirSync?: (\n    path: string,\n    options: { withFileTypes: true },\n  ) => Dirent[]\n  readlinkSync?: (path: string) => string\n  realpathSync?: (path: string) => string\n  promises?: {\n    lstat?: (path: string) => Promise\n    readdir?: (\n      path: string,\n      options: { withFileTypes: true },\n    ) => Promise\n    readlink?: (path: string) => Promise\n    realpath?: (path: string) => Promise\n    [k: string]: any\n  }\n  [k: string]: any\n}\n\ninterface FSValue {\n  lstatSync: (path: string) => Stats\n  readdir: (\n    path: string,\n    options: { withFileTypes: true },\n    cb: (er: NodeJS.ErrnoException | null, entries?: Dirent[]) => any,\n  ) => void\n  readdirSync: (path: string, options: { withFileTypes: true }) => Dirent[]\n  readlinkSync: (path: string) => string\n  realpathSync: (path: string) => string\n  promises: {\n    lstat: (path: string) => Promise\n    readdir: (\n      path: string,\n      options: { withFileTypes: true },\n    ) => Promise\n    readlink: (path: string) => Promise\n    realpath: (path: string) => Promise\n    [k: string]: any\n  }\n  [k: string]: any\n}\n\nconst defaultFS: FSValue = {\n  lstatSync,\n  readdir: readdirCB,\n  readdirSync,\n  readlinkSync,\n  realpathSync,\n  promises: {\n    lstat,\n    readdir,\n    readlink,\n    realpath,\n  },\n}\n\n// if they just gave us require('fs') then use our default\nconst fsFromOption = (fsOption?: FSOption): FSValue =>\n  !fsOption || fsOption === defaultFS || fsOption === actualFS ?\n    defaultFS\n  : {\n      ...defaultFS,\n      ...fsOption,\n      promises: {\n        ...defaultFS.promises,\n        ...(fsOption.promises || {}),\n      },\n    }\n\n// turn something like //?/c:/ into c:\\\nconst uncDriveRegexp = /^\\\\\\\\\\?\\\\([a-z]:)\\\\?$/i\nconst uncToDrive = (rootPath: string): string =>\n  rootPath.replace(/\\//g, '\\\\').replace(uncDriveRegexp, '$1\\\\')\n\n// windows paths are separated by either / or \\\nconst eitherSep = /[\\\\\\/]/\n\nconst UNKNOWN = 0 // may not even exist, for all we know\nconst IFIFO = 0b0001\nconst IFCHR = 0b0010\nconst IFDIR = 0b0100\nconst IFBLK = 0b0110\nconst IFREG = 0b1000\nconst IFLNK = 0b1010\nconst IFSOCK = 0b1100\nconst IFMT = 0b1111\n\nexport type Type =\n  | 'Unknown'\n  | 'FIFO'\n  | 'CharacterDevice'\n  | 'Directory'\n  | 'BlockDevice'\n  | 'File'\n  | 'SymbolicLink'\n  | 'Socket'\n\n// mask to unset low 4 bits\nconst IFMT_UNKNOWN = ~IFMT\n\n// set after successfully calling readdir() and getting entries.\nconst READDIR_CALLED = 0b0000_0001_0000\n// set after a successful lstat()\nconst LSTAT_CALLED = 0b0000_0010_0000\n// set if an entry (or one of its parents) is definitely not a dir\nconst ENOTDIR = 0b0000_0100_0000\n// set if an entry (or one of its parents) does not exist\n// (can also be set on lstat errors like EACCES or ENAMETOOLONG)\nconst ENOENT = 0b0000_1000_0000\n// cannot have child entries -- also verify &IFMT is either IFDIR or IFLNK\n// set if we fail to readlink\nconst ENOREADLINK = 0b0001_0000_0000\n// set if we know realpath() will fail\nconst ENOREALPATH = 0b0010_0000_0000\n\nconst ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH\nconst TYPEMASK = 0b0011_1111_1111\n\nconst entToType = (s: Dirent | Stats) =>\n  s.isFile() ? IFREG\n  : s.isDirectory() ? IFDIR\n  : s.isSymbolicLink() ? IFLNK\n  : s.isCharacterDevice() ? IFCHR\n  : s.isBlockDevice() ? IFBLK\n  : s.isSocket() ? IFSOCK\n  : s.isFIFO() ? IFIFO\n  : UNKNOWN\n\n// normalize unicode path names\nconst normalizeCache = new LRUCache({ max: 2 ** 12 })\nconst normalize = (s: string) => {\n  const c = normalizeCache.get(s)\n  if (c) return c\n  const n = s.normalize('NFKD')\n  normalizeCache.set(s, n)\n  return n\n}\n\nconst normalizeNocaseCache = new LRUCache({ max: 2 ** 12 })\nconst normalizeNocase = (s: string) => {\n  const c = normalizeNocaseCache.get(s)\n  if (c) return c\n  const n = normalize(s.toLowerCase())\n  normalizeNocaseCache.set(s, n)\n  return n\n}\n\n/**\n * Options that may be provided to the Path constructor\n */\nexport interface PathOpts {\n  fullpath?: string\n  relative?: string\n  relativePosix?: string\n  parent?: PathBase\n  /**\n   * See {@link FSOption}\n   */\n  fs?: FSOption\n}\n\n/**\n * An LRUCache for storing resolved path strings or Path objects.\n * @internal\n */\nexport class ResolveCache extends LRUCache {\n  constructor() {\n    super({ max: 256 })\n  }\n}\n\n// In order to prevent blowing out the js heap by allocating hundreds of\n// thousands of Path entries when walking extremely large trees, the \"children\"\n// in this tree are represented by storing an array of Path entries in an\n// LRUCache, indexed by the parent.  At any time, Path.children() may return an\n// empty array, indicating that it doesn't know about any of its children, and\n// thus has to rebuild that cache.  This is fine, it just means that we don't\n// benefit as much from having the cached entries, but huge directory walks\n// don't blow out the stack, and smaller ones are still as fast as possible.\n//\n//It does impose some complexity when building up the readdir data, because we\n//need to pass a reference to the children array that we started with.\n\n/**\n * an LRUCache for storing child entries.\n * @internal\n */\nexport class ChildrenCache extends LRUCache {\n  constructor(maxSize: number = 16 * 1024) {\n    super({\n      maxSize,\n      // parent + children\n      sizeCalculation: a => a.length + 1,\n    })\n  }\n}\n\n/**\n * Array of Path objects, plus a marker indicating the first provisional entry\n *\n * @internal\n */\nexport type Children = PathBase[] & { provisional: number }\n\nconst setAsCwd = Symbol('PathScurry setAsCwd')\n\n/**\n * Path objects are sort of like a super-powered\n * {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent}\n *\n * Each one represents a single filesystem entry on disk, which may or may not\n * exist. It includes methods for reading various types of information via\n * lstat, readlink, and readdir, and caches all information to the greatest\n * degree possible.\n *\n * Note that fs operations that would normally throw will instead return an\n * \"empty\" value. This is in order to prevent excessive overhead from error\n * stack traces.\n */\nexport abstract class PathBase implements Dirent {\n  /**\n   * the basename of this path\n   *\n   * **Important**: *always* test the path name against any test string\n   * usingthe {@link isNamed} method, and not by directly comparing this\n   * string. Otherwise, unicode path strings that the system sees as identical\n   * will not be properly treated as the same path, leading to incorrect\n   * behavior and possible security issues.\n   */\n  name: string\n  /**\n   * the Path entry corresponding to the path root.\n   *\n   * @internal\n   */\n  root: PathBase\n  /**\n   * All roots found within the current PathScurry family\n   *\n   * @internal\n   */\n  roots: { [k: string]: PathBase }\n  /**\n   * a reference to the parent path, or undefined in the case of root entries\n   *\n   * @internal\n   */\n  parent?: PathBase\n  /**\n   * boolean indicating whether paths are compared case-insensitively\n   * @internal\n   */\n  nocase: boolean\n\n  /**\n   * boolean indicating that this path is the current working directory\n   * of the PathScurry collection that contains it.\n   */\n  isCWD: boolean = false\n\n  /**\n   * the string or regexp used to split paths. On posix, it is `'/'`, and on\n   * windows it is a RegExp matching either `'/'` or `'\\\\'`\n   */\n  abstract splitSep: string | RegExp\n  /**\n   * The path separator string to use when joining paths\n   */\n  abstract sep: string\n\n  // potential default fs override\n  #fs: FSValue\n\n  // Stats fields\n  #dev?: number\n  get dev() {\n    return this.#dev\n  }\n  #mode?: number\n  get mode() {\n    return this.#mode\n  }\n  #nlink?: number\n  get nlink() {\n    return this.#nlink\n  }\n  #uid?: number\n  get uid() {\n    return this.#uid\n  }\n  #gid?: number\n  get gid() {\n    return this.#gid\n  }\n  #rdev?: number\n  get rdev() {\n    return this.#rdev\n  }\n  #blksize?: number\n  get blksize() {\n    return this.#blksize\n  }\n  #ino?: number\n  get ino() {\n    return this.#ino\n  }\n  #size?: number\n  get size() {\n    return this.#size\n  }\n  #blocks?: number\n  get blocks() {\n    return this.#blocks\n  }\n  #atimeMs?: number\n  get atimeMs() {\n    return this.#atimeMs\n  }\n  #mtimeMs?: number\n  get mtimeMs() {\n    return this.#mtimeMs\n  }\n  #ctimeMs?: number\n  get ctimeMs() {\n    return this.#ctimeMs\n  }\n  #birthtimeMs?: number\n  get birthtimeMs() {\n    return this.#birthtimeMs\n  }\n  #atime?: Date\n  get atime() {\n    return this.#atime\n  }\n  #mtime?: Date\n  get mtime() {\n    return this.#mtime\n  }\n  #ctime?: Date\n  get ctime() {\n    return this.#ctime\n  }\n  #birthtime?: Date\n  get birthtime() {\n    return this.#birthtime\n  }\n\n  #matchName: string\n  #depth?: number\n  #fullpath?: string\n  #fullpathPosix?: string\n  #relative?: string\n  #relativePosix?: string\n  #type: number\n  #children: ChildrenCache\n  #linkTarget?: PathBase\n  #realpath?: PathBase\n\n  /**\n   * This property is for compatibility with the Dirent class as of\n   * Node v20, where Dirent['parentPath'] refers to the path of the\n   * directory that was passed to readdir. For root entries, it's the path\n   * to the entry itself.\n   */\n  get parentPath(): string {\n    return (this.parent || this).fullpath()\n  }\n\n  /* c8 ignore start */\n  /**\n   * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,\n   * this property refers to the *parent* path, not the path object itself.\n   *\n   * @deprecated\n   */\n  get path(): string {\n    return this.parentPath\n  }\n  /* c8 ignore stop */\n\n  /**\n   * Do not create new Path objects directly.  They should always be accessed\n   * via the PathScurry class or other methods on the Path class.\n   *\n   * @internal\n   */\n  constructor(\n    name: string,\n    type: number = UNKNOWN,\n    root: PathBase | undefined,\n    roots: { [k: string]: PathBase },\n    nocase: boolean,\n    children: ChildrenCache,\n    opts: PathOpts,\n  ) {\n    this.name = name\n    this.#matchName = nocase ? normalizeNocase(name) : normalize(name)\n    this.#type = type & TYPEMASK\n    this.nocase = nocase\n    this.roots = roots\n    this.root = root || this\n    this.#children = children\n    this.#fullpath = opts.fullpath\n    this.#relative = opts.relative\n    this.#relativePosix = opts.relativePosix\n    this.parent = opts.parent\n    if (this.parent) {\n      this.#fs = this.parent.#fs\n    } else {\n      this.#fs = fsFromOption(opts.fs)\n    }\n  }\n\n  /**\n   * Returns the depth of the Path object from its root.\n   *\n   * For example, a path at `/foo/bar` would have a depth of 2.\n   */\n  depth(): number {\n    if (this.#depth !== undefined) return this.#depth\n    if (!this.parent) return (this.#depth = 0)\n    return (this.#depth = this.parent.depth() + 1)\n  }\n\n  /**\n   * @internal\n   */\n  abstract getRootString(path: string): string\n  /**\n   * @internal\n   */\n  abstract getRoot(rootPath: string): PathBase\n  /**\n   * @internal\n   */\n  abstract newChild(name: string, type?: number, opts?: PathOpts): PathBase\n\n  /**\n   * @internal\n   */\n  childrenCache() {\n    return this.#children\n  }\n\n  /**\n   * Get the Path object referenced by the string path, resolved from this Path\n   */\n  resolve(path?: string): PathBase {\n    if (!path) {\n      return this\n    }\n    const rootPath = this.getRootString(path)\n    const dir = path.substring(rootPath.length)\n    const dirParts = dir.split(this.splitSep)\n    const result: PathBase =\n      rootPath ?\n        this.getRoot(rootPath).#resolveParts(dirParts)\n      : this.#resolveParts(dirParts)\n    return result\n  }\n\n  #resolveParts(dirParts: string[]) {\n    let p: PathBase = this\n    for (const part of dirParts) {\n      p = p.child(part)\n    }\n    return p\n  }\n\n  /**\n   * Returns the cached children Path objects, if still available.  If they\n   * have fallen out of the cache, then returns an empty array, and resets the\n   * READDIR_CALLED bit, so that future calls to readdir() will require an fs\n   * lookup.\n   *\n   * @internal\n   */\n  children(): Children {\n    const cached = this.#children.get(this)\n    if (cached) {\n      return cached\n    }\n    const children: Children = Object.assign([], { provisional: 0 })\n    this.#children.set(this, children)\n    this.#type &= ~READDIR_CALLED\n    return children\n  }\n\n  /**\n   * Resolves a path portion and returns or creates the child Path.\n   *\n   * Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is\n   * `'..'`.\n   *\n   * This should not be called directly.  If `pathPart` contains any path\n   * separators, it will lead to unsafe undefined behavior.\n   *\n   * Use `Path.resolve()` instead.\n   *\n   * @internal\n   */\n  child(pathPart: string, opts?: PathOpts): PathBase {\n    if (pathPart === '' || pathPart === '.') {\n      return this\n    }\n    if (pathPart === '..') {\n      return this.parent || this\n    }\n\n    // find the child\n    const children = this.children()\n    const name =\n      this.nocase ? normalizeNocase(pathPart) : normalize(pathPart)\n    for (const p of children) {\n      if (p.#matchName === name) {\n        return p\n      }\n    }\n\n    // didn't find it, create provisional child, since it might not\n    // actually exist.  If we know the parent isn't a dir, then\n    // in fact it CAN'T exist.\n    const s = this.parent ? this.sep : ''\n    const fullpath =\n      this.#fullpath ? this.#fullpath + s + pathPart : undefined\n    const pchild = this.newChild(pathPart, UNKNOWN, {\n      ...opts,\n      parent: this,\n      fullpath,\n    })\n\n    if (!this.canReaddir()) {\n      pchild.#type |= ENOENT\n    }\n\n    // don't have to update provisional, because if we have real children,\n    // then provisional is set to children.length, otherwise a lower number\n    children.push(pchild)\n    return pchild\n  }\n\n  /**\n   * The relative path from the cwd. If it does not share an ancestor with\n   * the cwd, then this ends up being equivalent to the fullpath()\n   */\n  relative(): string {\n    if (this.isCWD) return ''\n    if (this.#relative !== undefined) {\n      return this.#relative\n    }\n    const name = this.name\n    const p = this.parent\n    if (!p) {\n      return (this.#relative = this.name)\n    }\n    const pv = p.relative()\n    return pv + (!pv || !p.parent ? '' : this.sep) + name\n  }\n\n  /**\n   * The relative path from the cwd, using / as the path separator.\n   * If it does not share an ancestor with\n   * the cwd, then this ends up being equivalent to the fullpathPosix()\n   * On posix systems, this is identical to relative().\n   */\n  relativePosix(): string {\n    if (this.sep === '/') return this.relative()\n    if (this.isCWD) return ''\n    if (this.#relativePosix !== undefined) return this.#relativePosix\n    const name = this.name\n    const p = this.parent\n    if (!p) {\n      return (this.#relativePosix = this.fullpathPosix())\n    }\n    const pv = p.relativePosix()\n    return pv + (!pv || !p.parent ? '' : '/') + name\n  }\n\n  /**\n   * The fully resolved path string for this Path entry\n   */\n  fullpath(): string {\n    if (this.#fullpath !== undefined) {\n      return this.#fullpath\n    }\n    const name = this.name\n    const p = this.parent\n    if (!p) {\n      return (this.#fullpath = this.name)\n    }\n    const pv = p.fullpath()\n    const fp = pv + (!p.parent ? '' : this.sep) + name\n    return (this.#fullpath = fp)\n  }\n\n  /**\n   * On platforms other than windows, this is identical to fullpath.\n   *\n   * On windows, this is overridden to return the forward-slash form of the\n   * full UNC path.\n   */\n  fullpathPosix(): string {\n    if (this.#fullpathPosix !== undefined) return this.#fullpathPosix\n    if (this.sep === '/') return (this.#fullpathPosix = this.fullpath())\n    if (!this.parent) {\n      const p = this.fullpath().replace(/\\\\/g, '/')\n      if (/^[a-z]:\\//i.test(p)) {\n        return (this.#fullpathPosix = `//?/${p}`)\n      } else {\n        return (this.#fullpathPosix = p)\n      }\n    }\n    const p = this.parent\n    const pfpp = p.fullpathPosix()\n    const fpp = pfpp + (!pfpp || !p.parent ? '' : '/') + this.name\n    return (this.#fullpathPosix = fpp)\n  }\n\n  /**\n   * Is the Path of an unknown type?\n   *\n   * Note that we might know *something* about it if there has been a previous\n   * filesystem operation, for example that it does not exist, or is not a\n   * link, or whether it has child entries.\n   */\n  isUnknown(): boolean {\n    return (this.#type & IFMT) === UNKNOWN\n  }\n\n  isType(type: Type): boolean {\n    return this[`is${type}`]()\n  }\n\n  getType(): Type {\n    return (\n      this.isUnknown() ? 'Unknown'\n      : this.isDirectory() ? 'Directory'\n      : this.isFile() ? 'File'\n      : this.isSymbolicLink() ? 'SymbolicLink'\n      : this.isFIFO() ? 'FIFO'\n      : this.isCharacterDevice() ? 'CharacterDevice'\n      : this.isBlockDevice() ? 'BlockDevice'\n      : /* c8 ignore start */ this.isSocket() ? 'Socket'\n      : 'Unknown'\n    )\n    /* c8 ignore stop */\n  }\n\n  /**\n   * Is the Path a regular file?\n   */\n  isFile(): boolean {\n    return (this.#type & IFMT) === IFREG\n  }\n\n  /**\n   * Is the Path a directory?\n   */\n  isDirectory(): boolean {\n    return (this.#type & IFMT) === IFDIR\n  }\n\n  /**\n   * Is the path a character device?\n   */\n  isCharacterDevice(): boolean {\n    return (this.#type & IFMT) === IFCHR\n  }\n\n  /**\n   * Is the path a block device?\n   */\n  isBlockDevice(): boolean {\n    return (this.#type & IFMT) === IFBLK\n  }\n\n  /**\n   * Is the path a FIFO pipe?\n   */\n  isFIFO(): boolean {\n    return (this.#type & IFMT) === IFIFO\n  }\n\n  /**\n   * Is the path a socket?\n   */\n  isSocket(): boolean {\n    return (this.#type & IFMT) === IFSOCK\n  }\n\n  /**\n   * Is the path a symbolic link?\n   */\n  isSymbolicLink(): boolean {\n    return (this.#type & IFLNK) === IFLNK\n  }\n\n  /**\n   * Return the entry if it has been subject of a successful lstat, or\n   * undefined otherwise.\n   *\n   * Does not read the filesystem, so an undefined result *could* simply\n   * mean that we haven't called lstat on it.\n   */\n  lstatCached(): PathBase | undefined {\n    return this.#type & LSTAT_CALLED ? this : undefined\n  }\n\n  /**\n   * Return the cached link target if the entry has been the subject of a\n   * successful readlink, or undefined otherwise.\n   *\n   * Does not read the filesystem, so an undefined result *could* just mean we\n   * don't have any cached data. Only use it if you are very sure that a\n   * readlink() has been called at some point.\n   */\n  readlinkCached(): PathBase | undefined {\n    return this.#linkTarget\n  }\n\n  /**\n   * Returns the cached realpath target if the entry has been the subject\n   * of a successful realpath, or undefined otherwise.\n   *\n   * Does not read the filesystem, so an undefined result *could* just mean we\n   * don't have any cached data. Only use it if you are very sure that a\n   * realpath() has been called at some point.\n   */\n  realpathCached(): PathBase | undefined {\n    return this.#realpath\n  }\n\n  /**\n   * Returns the cached child Path entries array if the entry has been the\n   * subject of a successful readdir(), or [] otherwise.\n   *\n   * Does not read the filesystem, so an empty array *could* just mean we\n   * don't have any cached data. Only use it if you are very sure that a\n   * readdir() has been called recently enough to still be valid.\n   */\n  readdirCached(): PathBase[] {\n    const children = this.children()\n    return children.slice(0, children.provisional)\n  }\n\n  /**\n   * Return true if it's worth trying to readlink.  Ie, we don't (yet) have\n   * any indication that readlink will definitely fail.\n   *\n   * Returns false if the path is known to not be a symlink, if a previous\n   * readlink failed, or if the entry does not exist.\n   */\n  canReadlink(): boolean {\n    if (this.#linkTarget) return true\n    if (!this.parent) return false\n    // cases where it cannot possibly succeed\n    const ifmt = this.#type & IFMT\n    return !(\n      (ifmt !== UNKNOWN && ifmt !== IFLNK) ||\n      this.#type & ENOREADLINK ||\n      this.#type & ENOENT\n    )\n  }\n\n  /**\n   * Return true if readdir has previously been successfully called on this\n   * path, indicating that cachedReaddir() is likely valid.\n   */\n  calledReaddir(): boolean {\n    return !!(this.#type & READDIR_CALLED)\n  }\n\n  /**\n   * Returns true if the path is known to not exist. That is, a previous lstat\n   * or readdir failed to verify its existence when that would have been\n   * expected, or a parent entry was marked either enoent or enotdir.\n   */\n  isENOENT(): boolean {\n    return !!(this.#type & ENOENT)\n  }\n\n  /**\n   * Return true if the path is a match for the given path name.  This handles\n   * case sensitivity and unicode normalization.\n   *\n   * Note: even on case-sensitive systems, it is **not** safe to test the\n   * equality of the `.name` property to determine whether a given pathname\n   * matches, due to unicode normalization mismatches.\n   *\n   * Always use this method instead of testing the `path.name` property\n   * directly.\n   */\n  isNamed(n: string): boolean {\n    return !this.nocase ?\n        this.#matchName === normalize(n)\n      : this.#matchName === normalizeNocase(n)\n  }\n\n  /**\n   * Return the Path object corresponding to the target of a symbolic link.\n   *\n   * If the Path is not a symbolic link, or if the readlink call fails for any\n   * reason, `undefined` is returned.\n   *\n   * Result is cached, and thus may be outdated if the filesystem is mutated.\n   */\n  async readlink(): Promise {\n    const target = this.#linkTarget\n    if (target) {\n      return target\n    }\n    if (!this.canReadlink()) {\n      return undefined\n    }\n    /* c8 ignore start */\n    // already covered by the canReadlink test, here for ts grumples\n    if (!this.parent) {\n      return undefined\n    }\n    /* c8 ignore stop */\n    try {\n      const read = await this.#fs.promises.readlink(this.fullpath())\n      const linkTarget = (await this.parent.realpath())?.resolve(read)\n      if (linkTarget) {\n        return (this.#linkTarget = linkTarget)\n      }\n    } catch (er) {\n      this.#readlinkFail((er as NodeJS.ErrnoException).code)\n      return undefined\n    }\n  }\n\n  /**\n   * Synchronous {@link PathBase.readlink}\n   */\n  readlinkSync(): PathBase | undefined {\n    const target = this.#linkTarget\n    if (target) {\n      return target\n    }\n    if (!this.canReadlink()) {\n      return undefined\n    }\n    /* c8 ignore start */\n    // already covered by the canReadlink test, here for ts grumples\n    if (!this.parent) {\n      return undefined\n    }\n    /* c8 ignore stop */\n    try {\n      const read = this.#fs.readlinkSync(this.fullpath())\n      const linkTarget = this.parent.realpathSync()?.resolve(read)\n      if (linkTarget) {\n        return (this.#linkTarget = linkTarget)\n      }\n    } catch (er) {\n      this.#readlinkFail((er as NodeJS.ErrnoException).code)\n      return undefined\n    }\n  }\n\n  #readdirSuccess(children: Children) {\n    // succeeded, mark readdir called bit\n    this.#type |= READDIR_CALLED\n    // mark all remaining provisional children as ENOENT\n    for (let p = children.provisional; p < children.length; p++) {\n      const c = children[p]\n      if (c) c.#markENOENT()\n    }\n  }\n\n  #markENOENT() {\n    // mark as UNKNOWN and ENOENT\n    if (this.#type & ENOENT) return\n    this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN\n    this.#markChildrenENOENT()\n  }\n\n  #markChildrenENOENT() {\n    // all children are provisional and do not exist\n    const children = this.children()\n    children.provisional = 0\n    for (const p of children) {\n      p.#markENOENT()\n    }\n  }\n\n  #markENOREALPATH() {\n    this.#type |= ENOREALPATH\n    this.#markENOTDIR()\n  }\n\n  // save the information when we know the entry is not a dir\n  #markENOTDIR() {\n    // entry is not a directory, so any children can't exist.\n    // this *should* be impossible, since any children created\n    // after it's been marked ENOTDIR should be marked ENOENT,\n    // so it won't even get to this point.\n    /* c8 ignore start */\n    if (this.#type & ENOTDIR) return\n    /* c8 ignore stop */\n    let t = this.#type\n    // this could happen if we stat a dir, then delete it,\n    // then try to read it or one of its children.\n    if ((t & IFMT) === IFDIR) t &= IFMT_UNKNOWN\n    this.#type = t | ENOTDIR\n    this.#markChildrenENOENT()\n  }\n\n  #readdirFail(code: string = '') {\n    // markENOTDIR and markENOENT also set provisional=0\n    if (code === 'ENOTDIR' || code === 'EPERM') {\n      this.#markENOTDIR()\n    } else if (code === 'ENOENT') {\n      this.#markENOENT()\n    } else {\n      this.children().provisional = 0\n    }\n  }\n\n  #lstatFail(code: string = '') {\n    // Windows just raises ENOENT in this case, disable for win CI\n    /* c8 ignore start */\n    if (code === 'ENOTDIR') {\n      // already know it has a parent by this point\n      const p = this.parent as PathBase\n      p.#markENOTDIR()\n    } else if (code === 'ENOENT') {\n      /* c8 ignore stop */\n      this.#markENOENT()\n    }\n  }\n\n  #readlinkFail(code: string = '') {\n    let ter = this.#type\n    ter |= ENOREADLINK\n    if (code === 'ENOENT') ter |= ENOENT\n    // windows gets a weird error when you try to readlink a file\n    if (code === 'EINVAL' || code === 'UNKNOWN') {\n      // exists, but not a symlink, we don't know WHAT it is, so remove\n      // all IFMT bits.\n      ter &= IFMT_UNKNOWN\n    }\n    this.#type = ter\n    // windows just gets ENOENT in this case.  We do cover the case,\n    // just disabled because it's impossible on Windows CI\n    /* c8 ignore start */\n    if (code === 'ENOTDIR' && this.parent) {\n      this.parent.#markENOTDIR()\n    }\n    /* c8 ignore stop */\n  }\n\n  #readdirAddChild(e: Dirent, c: Children) {\n    return (\n      this.#readdirMaybePromoteChild(e, c) ||\n      this.#readdirAddNewChild(e, c)\n    )\n  }\n\n  #readdirAddNewChild(e: Dirent, c: Children): PathBase {\n    // alloc new entry at head, so it's never provisional\n    const type = entToType(e)\n    const child = this.newChild(e.name, type, { parent: this })\n    const ifmt = child.#type & IFMT\n    if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {\n      child.#type |= ENOTDIR\n    }\n    c.unshift(child)\n    c.provisional++\n    return child\n  }\n\n  #readdirMaybePromoteChild(e: Dirent, c: Children): PathBase | undefined {\n    for (let p = c.provisional; p < c.length; p++) {\n      const pchild = c[p]\n      const name =\n        this.nocase ? normalizeNocase(e.name) : normalize(e.name)\n      if (name !== pchild!.#matchName) {\n        continue\n      }\n\n      return this.#readdirPromoteChild(e, pchild!, p, c)\n    }\n  }\n\n  #readdirPromoteChild(\n    e: Dirent,\n    p: PathBase,\n    index: number,\n    c: Children,\n  ): PathBase {\n    const v = p.name\n    // retain any other flags, but set ifmt from dirent\n    p.#type = (p.#type & IFMT_UNKNOWN) | entToType(e)\n    // case sensitivity fixing when we learn the true name.\n    if (v !== e.name) p.name = e.name\n\n    // just advance provisional index (potentially off the list),\n    // otherwise we have to splice/pop it out and re-insert at head\n    if (index !== c.provisional) {\n      if (index === c.length - 1) c.pop()\n      else c.splice(index, 1)\n      c.unshift(p)\n    }\n    c.provisional++\n    return p\n  }\n\n  /**\n   * Call lstat() on this Path, and update all known information that can be\n   * determined.\n   *\n   * Note that unlike `fs.lstat()`, the returned value does not contain some\n   * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that\n   * information is required, you will need to call `fs.lstat` yourself.\n   *\n   * If the Path refers to a nonexistent file, or if the lstat call fails for\n   * any reason, `undefined` is returned.  Otherwise the updated Path object is\n   * returned.\n   *\n   * Results are cached, and thus may be out of date if the filesystem is\n   * mutated.\n   */\n  async lstat(): Promise {\n    if ((this.#type & ENOENT) === 0) {\n      try {\n        this.#applyStat(await this.#fs.promises.lstat(this.fullpath()))\n        return this\n      } catch (er) {\n        this.#lstatFail((er as NodeJS.ErrnoException).code)\n      }\n    }\n  }\n\n  /**\n   * synchronous {@link PathBase.lstat}\n   */\n  lstatSync(): PathBase | undefined {\n    if ((this.#type & ENOENT) === 0) {\n      try {\n        this.#applyStat(this.#fs.lstatSync(this.fullpath()))\n        return this\n      } catch (er) {\n        this.#lstatFail((er as NodeJS.ErrnoException).code)\n      }\n    }\n  }\n\n  #applyStat(st: Stats) {\n    const {\n      atime,\n      atimeMs,\n      birthtime,\n      birthtimeMs,\n      blksize,\n      blocks,\n      ctime,\n      ctimeMs,\n      dev,\n      gid,\n      ino,\n      mode,\n      mtime,\n      mtimeMs,\n      nlink,\n      rdev,\n      size,\n      uid,\n    } = st\n    this.#atime = atime\n    this.#atimeMs = atimeMs\n    this.#birthtime = birthtime\n    this.#birthtimeMs = birthtimeMs\n    this.#blksize = blksize\n    this.#blocks = blocks\n    this.#ctime = ctime\n    this.#ctimeMs = ctimeMs\n    this.#dev = dev\n    this.#gid = gid\n    this.#ino = ino\n    this.#mode = mode\n    this.#mtime = mtime\n    this.#mtimeMs = mtimeMs\n    this.#nlink = nlink\n    this.#rdev = rdev\n    this.#size = size\n    this.#uid = uid\n    const ifmt = entToType(st)\n    // retain any other flags, but set the ifmt\n    this.#type = (this.#type & IFMT_UNKNOWN) | ifmt | LSTAT_CALLED\n    if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {\n      this.#type |= ENOTDIR\n    }\n  }\n\n  #onReaddirCB: ((\n    er: NodeJS.ErrnoException | null,\n    entries: Path[],\n  ) => any)[] = []\n  #readdirCBInFlight: boolean = false\n  #callOnReaddirCB(children: Path[]) {\n    this.#readdirCBInFlight = false\n    const cbs = this.#onReaddirCB.slice()\n    this.#onReaddirCB.length = 0\n    cbs.forEach(cb => cb(null, children))\n  }\n\n  /**\n   * Standard node-style callback interface to get list of directory entries.\n   *\n   * If the Path cannot or does not contain any children, then an empty array\n   * is returned.\n   *\n   * Results are cached, and thus may be out of date if the filesystem is\n   * mutated.\n   *\n   * @param cb The callback called with (er, entries).  Note that the `er`\n   * param is somewhat extraneous, as all readdir() errors are handled and\n   * simply result in an empty set of entries being returned.\n   * @param allowZalgo Boolean indicating that immediately known results should\n   * *not* be deferred with `queueMicrotask`. Defaults to `false`. Release\n   * zalgo at your peril, the dark pony lord is devious and unforgiving.\n   */\n  readdirCB(\n    cb: (er: NodeJS.ErrnoException | null, entries: PathBase[]) => any,\n    allowZalgo: boolean = false,\n  ): void {\n    if (!this.canReaddir()) {\n      if (allowZalgo) cb(null, [])\n      else queueMicrotask(() => cb(null, []))\n      return\n    }\n\n    const children = this.children()\n    if (this.calledReaddir()) {\n      const c = children.slice(0, children.provisional)\n      if (allowZalgo) cb(null, c)\n      else queueMicrotask(() => cb(null, c))\n      return\n    }\n\n    // don't have to worry about zalgo at this point.\n    this.#onReaddirCB.push(cb)\n    if (this.#readdirCBInFlight) {\n      return\n    }\n    this.#readdirCBInFlight = true\n\n    // else read the directory, fill up children\n    // de-provisionalize any provisional children.\n    const fullpath = this.fullpath()\n    this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {\n      if (er) {\n        this.#readdirFail((er as NodeJS.ErrnoException).code)\n        children.provisional = 0\n      } else {\n        // if we didn't get an error, we always get entries.\n        //@ts-ignore\n        for (const e of entries) {\n          this.#readdirAddChild(e, children)\n        }\n        this.#readdirSuccess(children)\n      }\n      this.#callOnReaddirCB(children.slice(0, children.provisional))\n      return\n    })\n  }\n\n  #asyncReaddirInFlight?: Promise\n\n  /**\n   * Return an array of known child entries.\n   *\n   * If the Path cannot or does not contain any children, then an empty array\n   * is returned.\n   *\n   * Results are cached, and thus may be out of date if the filesystem is\n   * mutated.\n   */\n  async readdir(): Promise {\n    if (!this.canReaddir()) {\n      return []\n    }\n\n    const children = this.children()\n    if (this.calledReaddir()) {\n      return children.slice(0, children.provisional)\n    }\n\n    // else read the directory, fill up children\n    // de-provisionalize any provisional children.\n    const fullpath = this.fullpath()\n    if (this.#asyncReaddirInFlight) {\n      await this.#asyncReaddirInFlight\n    } else {\n      /* c8 ignore start */\n      let resolve: () => void = () => {}\n      /* c8 ignore stop */\n      this.#asyncReaddirInFlight = new Promise(\n        res => (resolve = res),\n      )\n      try {\n        for (const e of await this.#fs.promises.readdir(fullpath, {\n          withFileTypes: true,\n        })) {\n          this.#readdirAddChild(e, children)\n        }\n        this.#readdirSuccess(children)\n      } catch (er) {\n        this.#readdirFail((er as NodeJS.ErrnoException).code)\n        children.provisional = 0\n      }\n      this.#asyncReaddirInFlight = undefined\n      resolve()\n    }\n    return children.slice(0, children.provisional)\n  }\n\n  /**\n   * synchronous {@link PathBase.readdir}\n   */\n  readdirSync(): PathBase[] {\n    if (!this.canReaddir()) {\n      return []\n    }\n\n    const children = this.children()\n    if (this.calledReaddir()) {\n      return children.slice(0, children.provisional)\n    }\n\n    // else read the directory, fill up children\n    // de-provisionalize any provisional children.\n    const fullpath = this.fullpath()\n    try {\n      for (const e of this.#fs.readdirSync(fullpath, {\n        withFileTypes: true,\n      })) {\n        this.#readdirAddChild(e, children)\n      }\n      this.#readdirSuccess(children)\n    } catch (er) {\n      this.#readdirFail((er as NodeJS.ErrnoException).code)\n      children.provisional = 0\n    }\n    return children.slice(0, children.provisional)\n  }\n\n  canReaddir() {\n    if (this.#type & ENOCHILD) return false\n    const ifmt = IFMT & this.#type\n    // we always set ENOTDIR when setting IFMT, so should be impossible\n    /* c8 ignore start */\n    if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {\n      return false\n    }\n    /* c8 ignore stop */\n    return true\n  }\n\n  shouldWalk(\n    dirs: Set,\n    walkFilter?: (e: PathBase) => boolean,\n  ): boolean {\n    return (\n      (this.#type & IFDIR) === IFDIR &&\n      !(this.#type & ENOCHILD) &&\n      !dirs.has(this) &&\n      (!walkFilter || walkFilter(this))\n    )\n  }\n\n  /**\n   * Return the Path object corresponding to path as resolved\n   * by realpath(3).\n   *\n   * If the realpath call fails for any reason, `undefined` is returned.\n   *\n   * Result is cached, and thus may be outdated if the filesystem is mutated.\n   * On success, returns a Path object.\n   */\n  async realpath(): Promise {\n    if (this.#realpath) return this.#realpath\n    if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type) return undefined\n    try {\n      const rp = await this.#fs.promises.realpath(this.fullpath())\n      return (this.#realpath = this.resolve(rp))\n    } catch (_) {\n      this.#markENOREALPATH()\n    }\n  }\n\n  /**\n   * Synchronous {@link realpath}\n   */\n  realpathSync(): PathBase | undefined {\n    if (this.#realpath) return this.#realpath\n    if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type) return undefined\n    try {\n      const rp = this.#fs.realpathSync(this.fullpath())\n      return (this.#realpath = this.resolve(rp))\n    } catch (_) {\n      this.#markENOREALPATH()\n    }\n  }\n\n  /**\n   * Internal method to mark this Path object as the scurry cwd,\n   * called by {@link PathScurry#chdir}\n   *\n   * @internal\n   */\n  [setAsCwd](oldCwd: PathBase): void {\n    if (oldCwd === this) return\n    oldCwd.isCWD = false\n    this.isCWD = true\n\n    const changed = new Set([])\n    let rp = []\n    let p: PathBase = this\n    while (p && p.parent) {\n      changed.add(p)\n      p.#relative = rp.join(this.sep)\n      p.#relativePosix = rp.join('/')\n      p = p.parent\n      rp.push('..')\n    }\n    // now un-memoize parents of old cwd\n    p = oldCwd\n    while (p && p.parent && !changed.has(p)) {\n      p.#relative = undefined\n      p.#relativePosix = undefined\n      p = p.parent\n    }\n  }\n}\n\n/**\n * Path class used on win32 systems\n *\n * Uses `'\\\\'` as the path separator for returned paths, either `'\\\\'` or `'/'`\n * as the path separator for parsing paths.\n */\nexport class PathWin32 extends PathBase {\n  /**\n   * Separator for generating path strings.\n   */\n  sep: '\\\\' = '\\\\'\n  /**\n   * Separator for parsing path strings.\n   */\n  splitSep: RegExp = eitherSep\n\n  /**\n   * Do not create new Path objects directly.  They should always be accessed\n   * via the PathScurry class or other methods on the Path class.\n   *\n   * @internal\n   */\n  constructor(\n    name: string,\n    type: number = UNKNOWN,\n    root: PathBase | undefined,\n    roots: { [k: string]: PathBase },\n    nocase: boolean,\n    children: ChildrenCache,\n    opts: PathOpts,\n  ) {\n    super(name, type, root, roots, nocase, children, opts)\n  }\n\n  /**\n   * @internal\n   */\n  newChild(name: string, type: number = UNKNOWN, opts: PathOpts = {}) {\n    return new PathWin32(\n      name,\n      type,\n      this.root,\n      this.roots,\n      this.nocase,\n      this.childrenCache(),\n      opts,\n    )\n  }\n\n  /**\n   * @internal\n   */\n  getRootString(path: string): string {\n    return win32.parse(path).root\n  }\n\n  /**\n   * @internal\n   */\n  getRoot(rootPath: string): PathBase {\n    rootPath = uncToDrive(rootPath.toUpperCase())\n    if (rootPath === this.root.name) {\n      return this.root\n    }\n    // ok, not that one, check if it matches another we know about\n    for (const [compare, root] of Object.entries(this.roots)) {\n      if (this.sameRoot(rootPath, compare)) {\n        return (this.roots[rootPath] = root)\n      }\n    }\n    // otherwise, have to create a new one.\n    return (this.roots[rootPath] = new PathScurryWin32(\n      rootPath,\n      this,\n    ).root)\n  }\n\n  /**\n   * @internal\n   */\n  sameRoot(rootPath: string, compare: string = this.root.name): boolean {\n    // windows can (rarely) have case-sensitive filesystem, but\n    // UNC and drive letters are always case-insensitive, and canonically\n    // represented uppercase.\n    rootPath = rootPath\n      .toUpperCase()\n      .replace(/\\//g, '\\\\')\n      .replace(uncDriveRegexp, '$1\\\\')\n    return rootPath === compare\n  }\n}\n\n/**\n * Path class used on all posix systems.\n *\n * Uses `'/'` as the path separator.\n */\nexport class PathPosix extends PathBase {\n  /**\n   * separator for parsing path strings\n   */\n  splitSep: '/' = '/'\n  /**\n   * separator for generating path strings\n   */\n  sep: '/' = '/'\n\n  /**\n   * Do not create new Path objects directly.  They should always be accessed\n   * via the PathScurry class or other methods on the Path class.\n   *\n   * @internal\n   */\n  constructor(\n    name: string,\n    type: number = UNKNOWN,\n    root: PathBase | undefined,\n    roots: { [k: string]: PathBase },\n    nocase: boolean,\n    children: ChildrenCache,\n    opts: PathOpts,\n  ) {\n    super(name, type, root, roots, nocase, children, opts)\n  }\n\n  /**\n   * @internal\n   */\n  getRootString(path: string): string {\n    return path.startsWith('/') ? '/' : ''\n  }\n\n  /**\n   * @internal\n   */\n  getRoot(_rootPath: string): PathBase {\n    return this.root\n  }\n\n  /**\n   * @internal\n   */\n  newChild(name: string, type: number = UNKNOWN, opts: PathOpts = {}) {\n    return new PathPosix(\n      name,\n      type,\n      this.root,\n      this.roots,\n      this.nocase,\n      this.childrenCache(),\n      opts,\n    )\n  }\n}\n\n/**\n * Options that may be provided to the PathScurry constructor\n */\nexport interface PathScurryOpts {\n  /**\n   * perform case-insensitive path matching. Default based on platform\n   * subclass.\n   */\n  nocase?: boolean\n  /**\n   * Number of Path entries to keep in the cache of Path child references.\n   *\n   * Setting this higher than 65536 will dramatically increase the data\n   * consumption and construction time overhead of each PathScurry.\n   *\n   * Setting this value to 256 or lower will significantly reduce the data\n   * consumption and construction time overhead, but may also reduce resolve()\n   * and readdir() performance on large filesystems.\n   *\n   * Default `16384`.\n   */\n  childrenCacheSize?: number\n  /**\n   * An object that overrides the built-in functions from the fs and\n   * fs/promises modules.\n   *\n   * See {@link FSOption}\n   */\n  fs?: FSOption\n}\n\n/**\n * The base class for all PathScurry classes, providing the interface for path\n * resolution and filesystem operations.\n *\n * Typically, you should *not* instantiate this class directly, but rather one\n * of the platform-specific classes, or the exported {@link PathScurry} which\n * defaults to the current platform.\n */\nexport abstract class PathScurryBase {\n  /**\n   * The root Path entry for the current working directory of this Scurry\n   */\n  root: PathBase\n  /**\n   * The string path for the root of this Scurry's current working directory\n   */\n  rootPath: string\n  /**\n   * A collection of all roots encountered, referenced by rootPath\n   */\n  roots: { [k: string]: PathBase }\n  /**\n   * The Path entry corresponding to this PathScurry's current working directory.\n   */\n  cwd: PathBase\n  #resolveCache: ResolveCache\n  #resolvePosixCache: ResolveCache\n  #children: ChildrenCache\n  /**\n   * Perform path comparisons case-insensitively.\n   *\n   * Defaults true on Darwin and Windows systems, false elsewhere.\n   */\n  nocase: boolean\n\n  /**\n   * The path separator used for parsing paths\n   *\n   * `'/'` on Posix systems, either `'/'` or `'\\\\'` on Windows\n   */\n  abstract sep: string | RegExp\n\n  #fs: FSValue\n\n  /**\n   * This class should not be instantiated directly.\n   *\n   * Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry\n   *\n   * @internal\n   */\n  constructor(\n    cwd: URL | string = process.cwd(),\n    pathImpl: typeof win32 | typeof posix,\n    sep: string | RegExp,\n    {\n      nocase,\n      childrenCacheSize = 16 * 1024,\n      fs = defaultFS,\n    }: PathScurryOpts = {},\n  ) {\n    this.#fs = fsFromOption(fs)\n    if (cwd instanceof URL || cwd.startsWith('file://')) {\n      cwd = fileURLToPath(cwd)\n    }\n    // resolve and split root, and then add to the store.\n    // this is the only time we call path.resolve()\n    const cwdPath = pathImpl.resolve(cwd)\n    this.roots = Object.create(null)\n    this.rootPath = this.parseRootPath(cwdPath)\n    this.#resolveCache = new ResolveCache()\n    this.#resolvePosixCache = new ResolveCache()\n    this.#children = new ChildrenCache(childrenCacheSize)\n\n    const split = cwdPath.substring(this.rootPath.length).split(sep)\n    // resolve('/') leaves '', splits to [''], we don't want that.\n    if (split.length === 1 && !split[0]) {\n      split.pop()\n    }\n    /* c8 ignore start */\n    if (nocase === undefined) {\n      throw new TypeError(\n        'must provide nocase setting to PathScurryBase ctor',\n      )\n    }\n    /* c8 ignore stop */\n    this.nocase = nocase\n    this.root = this.newRoot(this.#fs)\n    this.roots[this.rootPath] = this.root\n    let prev: PathBase = this.root\n    let len = split.length - 1\n    const joinSep = pathImpl.sep\n    let abs = this.rootPath\n    let sawFirst = false\n    for (const part of split) {\n      const l = len--\n      prev = prev.child(part, {\n        relative: new Array(l).fill('..').join(joinSep),\n        relativePosix: new Array(l).fill('..').join('/'),\n        fullpath: (abs += (sawFirst ? '' : joinSep) + part),\n      })\n      sawFirst = true\n    }\n    this.cwd = prev\n  }\n\n  /**\n   * Get the depth of a provided path, string, or the cwd\n   */\n  depth(path: Path | string = this.cwd): number {\n    if (typeof path === 'string') {\n      path = this.cwd.resolve(path)\n    }\n    return path.depth()\n  }\n\n  /**\n   * Parse the root portion of a path string\n   *\n   * @internal\n   */\n  abstract parseRootPath(dir: string): string\n  /**\n   * create a new Path to use as root during construction.\n   *\n   * @internal\n   */\n  abstract newRoot(fs: FSValue): PathBase\n  /**\n   * Determine whether a given path string is absolute\n   */\n  abstract isAbsolute(p: string): boolean\n\n  /**\n   * Return the cache of child entries.  Exposed so subclasses can create\n   * child Path objects in a platform-specific way.\n   *\n   * @internal\n   */\n  childrenCache() {\n    return this.#children\n  }\n\n  /**\n   * Resolve one or more path strings to a resolved string\n   *\n   * Same interface as require('path').resolve.\n   *\n   * Much faster than path.resolve() when called multiple times for the same\n   * path, because the resolved Path objects are cached.  Much slower\n   * otherwise.\n   */\n  resolve(...paths: string[]): string {\n    // first figure out the minimum number of paths we have to test\n    // we always start at cwd, but any absolutes will bump the start\n    let r = ''\n    for (let i = paths.length - 1; i >= 0; i--) {\n      const p = paths[i]\n      if (!p || p === '.') continue\n      r = r ? `${p}/${r}` : p\n      if (this.isAbsolute(p)) {\n        break\n      }\n    }\n    const cached = this.#resolveCache.get(r)\n    if (cached !== undefined) {\n      return cached\n    }\n    const result = this.cwd.resolve(r).fullpath()\n    this.#resolveCache.set(r, result)\n    return result\n  }\n\n  /**\n   * Resolve one or more path strings to a resolved string, returning\n   * the posix path.  Identical to .resolve() on posix systems, but on\n   * windows will return a forward-slash separated UNC path.\n   *\n   * Same interface as require('path').resolve.\n   *\n   * Much faster than path.resolve() when called multiple times for the same\n   * path, because the resolved Path objects are cached.  Much slower\n   * otherwise.\n   */\n  resolvePosix(...paths: string[]): string {\n    // first figure out the minimum number of paths we have to test\n    // we always start at cwd, but any absolutes will bump the start\n    let r = ''\n    for (let i = paths.length - 1; i >= 0; i--) {\n      const p = paths[i]\n      if (!p || p === '.') continue\n      r = r ? `${p}/${r}` : p\n      if (this.isAbsolute(p)) {\n        break\n      }\n    }\n    const cached = this.#resolvePosixCache.get(r)\n    if (cached !== undefined) {\n      return cached\n    }\n    const result = this.cwd.resolve(r).fullpathPosix()\n    this.#resolvePosixCache.set(r, result)\n    return result\n  }\n\n  /**\n   * find the relative path from the cwd to the supplied path string or entry\n   */\n  relative(entry: PathBase | string = this.cwd): string {\n    if (typeof entry === 'string') {\n      entry = this.cwd.resolve(entry)\n    }\n    return entry.relative()\n  }\n\n  /**\n   * find the relative path from the cwd to the supplied path string or\n   * entry, using / as the path delimiter, even on Windows.\n   */\n  relativePosix(entry: PathBase | string = this.cwd): string {\n    if (typeof entry === 'string') {\n      entry = this.cwd.resolve(entry)\n    }\n    return entry.relativePosix()\n  }\n\n  /**\n   * Return the basename for the provided string or Path object\n   */\n  basename(entry: PathBase | string = this.cwd): string {\n    if (typeof entry === 'string') {\n      entry = this.cwd.resolve(entry)\n    }\n    return entry.name\n  }\n\n  /**\n   * Return the dirname for the provided string or Path object\n   */\n  dirname(entry: PathBase | string = this.cwd): string {\n    if (typeof entry === 'string') {\n      entry = this.cwd.resolve(entry)\n    }\n    return (entry.parent || entry).fullpath()\n  }\n\n  /**\n   * Return an array of known child entries.\n   *\n   * First argument may be either a string, or a Path object.\n   *\n   * If the Path cannot or does not contain any children, then an empty array\n   * is returned.\n   *\n   * Results are cached, and thus may be out of date if the filesystem is\n   * mutated.\n   *\n   * Unlike `fs.readdir()`, the `withFileTypes` option defaults to `true`. Set\n   * `{ withFileTypes: false }` to return strings.\n   */\n\n  readdir(): Promise\n  readdir(opts: { withFileTypes: true }): Promise\n  readdir(opts: { withFileTypes: false }): Promise\n  readdir(opts: { withFileTypes: boolean }): Promise\n  readdir(entry: PathBase | string): Promise\n  readdir(\n    entry: PathBase | string,\n    opts: { withFileTypes: true },\n  ): Promise\n  readdir(\n    entry: PathBase | string,\n    opts: { withFileTypes: false },\n  ): Promise\n  readdir(\n    entry: PathBase | string,\n    opts: { withFileTypes: boolean },\n  ): Promise\n  async readdir(\n    entry: PathBase | string | { withFileTypes: boolean } = this.cwd,\n    opts: { withFileTypes: boolean } = {\n      withFileTypes: true,\n    },\n  ): Promise {\n    if (typeof entry === 'string') {\n      entry = this.cwd.resolve(entry)\n    } else if (!(entry instanceof PathBase)) {\n      opts = entry\n      entry = this.cwd\n    }\n    const { withFileTypes } = opts\n    if (!entry.canReaddir()) {\n      return []\n    } else {\n      const p = await entry.readdir()\n      return withFileTypes ? p : p.map(e => e.name)\n    }\n  }\n\n  /**\n   * synchronous {@link PathScurryBase.readdir}\n   */\n  readdirSync(): PathBase[]\n  readdirSync(opts: { withFileTypes: true }): PathBase[]\n  readdirSync(opts: { withFileTypes: false }): string[]\n  readdirSync(opts: { withFileTypes: boolean }): PathBase[] | string[]\n  readdirSync(entry: PathBase | string): PathBase[]\n  readdirSync(\n    entry: PathBase | string,\n    opts: { withFileTypes: true },\n  ): PathBase[]\n  readdirSync(\n    entry: PathBase | string,\n    opts: { withFileTypes: false },\n  ): string[]\n  readdirSync(\n    entry: PathBase | string,\n    opts: { withFileTypes: boolean },\n  ): PathBase[] | string[]\n  readdirSync(\n    entry: PathBase | string | { withFileTypes: boolean } = this.cwd,\n    opts: { withFileTypes: boolean } = {\n      withFileTypes: true,\n    },\n  ): PathBase[] | string[] {\n    if (typeof entry === 'string') {\n      entry = this.cwd.resolve(entry)\n    } else if (!(entry instanceof PathBase)) {\n      opts = entry\n      entry = this.cwd\n    }\n    const { withFileTypes = true } = opts\n    if (!entry.canReaddir()) {\n      return []\n    } else if (withFileTypes) {\n      return entry.readdirSync()\n    } else {\n      return entry.readdirSync().map(e => e.name)\n    }\n  }\n\n  /**\n   * Call lstat() on the string or Path object, and update all known\n   * information that can be determined.\n   *\n   * Note that unlike `fs.lstat()`, the returned value does not contain some\n   * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that\n   * information is required, you will need to call `fs.lstat` yourself.\n   *\n   * If the Path refers to a nonexistent file, or if the lstat call fails for\n   * any reason, `undefined` is returned.  Otherwise the updated Path object is\n   * returned.\n   *\n   * Results are cached, and thus may be out of date if the filesystem is\n   * mutated.\n   */\n  async lstat(\n    entry: string | PathBase = this.cwd,\n  ): Promise {\n    if (typeof entry === 'string') {\n      entry = this.cwd.resolve(entry)\n    }\n    return entry.lstat()\n  }\n\n  /**\n   * synchronous {@link PathScurryBase.lstat}\n   */\n  lstatSync(entry: string | PathBase = this.cwd): PathBase | undefined {\n    if (typeof entry === 'string') {\n      entry = this.cwd.resolve(entry)\n    }\n    return entry.lstatSync()\n  }\n\n  /**\n   * Return the Path object or string path corresponding to the target of a\n   * symbolic link.\n   *\n   * If the path is not a symbolic link, or if the readlink call fails for any\n   * reason, `undefined` is returned.\n   *\n   * Result is cached, and thus may be outdated if the filesystem is mutated.\n   *\n   * `{withFileTypes}` option defaults to `false`.\n   *\n   * On success, returns a Path object if `withFileTypes` option is true,\n   * otherwise a string.\n   */\n  readlink(): Promise\n  readlink(opt: { withFileTypes: false }): Promise\n  readlink(opt: { withFileTypes: true }): Promise\n  readlink(opt: {\n    withFileTypes: boolean\n  }): Promise\n  readlink(\n    entry: string | PathBase,\n    opt?: { withFileTypes: false },\n  ): Promise\n  readlink(\n    entry: string | PathBase,\n    opt: { withFileTypes: true },\n  ): Promise\n  readlink(\n    entry: string | PathBase,\n    opt: { withFileTypes: boolean },\n  ): Promise\n  async readlink(\n    entry: string | PathBase | { withFileTypes: boolean } = this.cwd,\n    { withFileTypes }: { withFileTypes: boolean } = {\n      withFileTypes: false,\n    },\n  ): Promise {\n    if (typeof entry === 'string') {\n      entry = this.cwd.resolve(entry)\n    } else if (!(entry instanceof PathBase)) {\n      withFileTypes = entry.withFileTypes\n      entry = this.cwd\n    }\n    const e = await entry.readlink()\n    return withFileTypes ? e : e?.fullpath()\n  }\n\n  /**\n   * synchronous {@link PathScurryBase.readlink}\n   */\n  readlinkSync(): string | undefined\n  readlinkSync(opt: { withFileTypes: false }): string | undefined\n  readlinkSync(opt: { withFileTypes: true }): PathBase | undefined\n  readlinkSync(opt: {\n    withFileTypes: boolean\n  }): PathBase | string | undefined\n  readlinkSync(\n    entry: string | PathBase,\n    opt?: { withFileTypes: false },\n  ): string | undefined\n  readlinkSync(\n    entry: string | PathBase,\n    opt: { withFileTypes: true },\n  ): PathBase | undefined\n  readlinkSync(\n    entry: string | PathBase,\n    opt: { withFileTypes: boolean },\n  ): string | PathBase | undefined\n  readlinkSync(\n    entry: string | PathBase | { withFileTypes: boolean } = this.cwd,\n    { withFileTypes }: { withFileTypes: boolean } = {\n      withFileTypes: false,\n    },\n  ): string | PathBase | undefined {\n    if (typeof entry === 'string') {\n      entry = this.cwd.resolve(entry)\n    } else if (!(entry instanceof PathBase)) {\n      withFileTypes = entry.withFileTypes\n      entry = this.cwd\n    }\n    const e = entry.readlinkSync()\n    return withFileTypes ? e : e?.fullpath()\n  }\n\n  /**\n   * Return the Path object or string path corresponding to path as resolved\n   * by realpath(3).\n   *\n   * If the realpath call fails for any reason, `undefined` is returned.\n   *\n   * Result is cached, and thus may be outdated if the filesystem is mutated.\n   *\n   * `{withFileTypes}` option defaults to `false`.\n   *\n   * On success, returns a Path object if `withFileTypes` option is true,\n   * otherwise a string.\n   */\n  realpath(): Promise\n  realpath(opt: { withFileTypes: false }): Promise\n  realpath(opt: { withFileTypes: true }): Promise\n  realpath(opt: {\n    withFileTypes: boolean\n  }): Promise\n  realpath(\n    entry: string | PathBase,\n    opt?: { withFileTypes: false },\n  ): Promise\n  realpath(\n    entry: string | PathBase,\n    opt: { withFileTypes: true },\n  ): Promise\n  realpath(\n    entry: string | PathBase,\n    opt: { withFileTypes: boolean },\n  ): Promise\n  async realpath(\n    entry: string | PathBase | { withFileTypes: boolean } = this.cwd,\n    { withFileTypes }: { withFileTypes: boolean } = {\n      withFileTypes: false,\n    },\n  ): Promise {\n    if (typeof entry === 'string') {\n      entry = this.cwd.resolve(entry)\n    } else if (!(entry instanceof PathBase)) {\n      withFileTypes = entry.withFileTypes\n      entry = this.cwd\n    }\n    const e = await entry.realpath()\n    return withFileTypes ? e : e?.fullpath()\n  }\n\n  realpathSync(): string | undefined\n  realpathSync(opt: { withFileTypes: false }): string | undefined\n  realpathSync(opt: { withFileTypes: true }): PathBase | undefined\n  realpathSync(opt: {\n    withFileTypes: boolean\n  }): PathBase | string | undefined\n  realpathSync(\n    entry: string | PathBase,\n    opt?: { withFileTypes: false },\n  ): string | undefined\n  realpathSync(\n    entry: string | PathBase,\n    opt: { withFileTypes: true },\n  ): PathBase | undefined\n  realpathSync(\n    entry: string | PathBase,\n    opt: { withFileTypes: boolean },\n  ): string | PathBase | undefined\n  realpathSync(\n    entry: string | PathBase | { withFileTypes: boolean } = this.cwd,\n    { withFileTypes }: { withFileTypes: boolean } = {\n      withFileTypes: false,\n    },\n  ): string | PathBase | undefined {\n    if (typeof entry === 'string') {\n      entry = this.cwd.resolve(entry)\n    } else if (!(entry instanceof PathBase)) {\n      withFileTypes = entry.withFileTypes\n      entry = this.cwd\n    }\n    const e = entry.realpathSync()\n    return withFileTypes ? e : e?.fullpath()\n  }\n\n  /**\n   * Asynchronously walk the directory tree, returning an array of\n   * all path strings or Path objects found.\n   *\n   * Note that this will be extremely memory-hungry on large filesystems.\n   * In such cases, it may be better to use the stream or async iterator\n   * walk implementation.\n   */\n  walk(): Promise\n  walk(\n    opts: WalkOptionsWithFileTypesTrue | WalkOptionsWithFileTypesUnset,\n  ): Promise\n  walk(opts: WalkOptionsWithFileTypesFalse): Promise\n  walk(opts: WalkOptions): Promise\n  walk(entry: string | PathBase): Promise\n  walk(\n    entry: string | PathBase,\n    opts: WalkOptionsWithFileTypesTrue | WalkOptionsWithFileTypesUnset,\n  ): Promise\n  walk(\n    entry: string | PathBase,\n    opts: WalkOptionsWithFileTypesFalse,\n  ): Promise\n  walk(\n    entry: string | PathBase,\n    opts: WalkOptions,\n  ): Promise\n  async walk(\n    entry: string | PathBase | WalkOptions = this.cwd,\n    opts: WalkOptions = {},\n  ): Promise {\n    if (typeof entry === 'string') {\n      entry = this.cwd.resolve(entry)\n    } else if (!(entry instanceof PathBase)) {\n      opts = entry\n      entry = this.cwd\n    }\n    const {\n      withFileTypes = true,\n      follow = false,\n      filter,\n      walkFilter,\n    } = opts\n    const results: (string | PathBase)[] = []\n    if (!filter || filter(entry)) {\n      results.push(withFileTypes ? entry : entry.fullpath())\n    }\n    const dirs = new Set()\n    const walk = (\n      dir: PathBase,\n      cb: (er?: NodeJS.ErrnoException) => void,\n    ) => {\n      dirs.add(dir)\n      dir.readdirCB((er, entries) => {\n        /* c8 ignore start */\n        if (er) {\n          return cb(er)\n        }\n        /* c8 ignore stop */\n        let len = entries.length\n        if (!len) return cb()\n        const next = () => {\n          if (--len === 0) {\n            cb()\n          }\n        }\n        for (const e of entries) {\n          if (!filter || filter(e)) {\n            results.push(withFileTypes ? e : e.fullpath())\n          }\n          if (follow && e.isSymbolicLink()) {\n            e.realpath()\n              .then(r => (r?.isUnknown() ? r.lstat() : r))\n              .then(r =>\n                r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next(),\n              )\n          } else {\n            if (e.shouldWalk(dirs, walkFilter)) {\n              walk(e, next)\n            } else {\n              next()\n            }\n          }\n        }\n      }, true) // zalgooooooo\n    }\n\n    const start = entry\n    return new Promise((res, rej) => {\n      walk(start, er => {\n        /* c8 ignore start */\n        if (er) return rej(er)\n        /* c8 ignore stop */\n        res(results as PathBase[] | string[])\n      })\n    })\n  }\n\n  /**\n   * Synchronously walk the directory tree, returning an array of\n   * all path strings or Path objects found.\n   *\n   * Note that this will be extremely memory-hungry on large filesystems.\n   * In such cases, it may be better to use the stream or async iterator\n   * walk implementation.\n   */\n  walkSync(): PathBase[]\n  walkSync(\n    opts: WalkOptionsWithFileTypesTrue | WalkOptionsWithFileTypesUnset,\n  ): PathBase[]\n  walkSync(opts: WalkOptionsWithFileTypesFalse): string[]\n  walkSync(opts: WalkOptions): string[] | PathBase[]\n  walkSync(entry: string | PathBase): PathBase[]\n  walkSync(\n    entry: string | PathBase,\n    opts: WalkOptionsWithFileTypesUnset | WalkOptionsWithFileTypesTrue,\n  ): PathBase[]\n  walkSync(\n    entry: string | PathBase,\n    opts: WalkOptionsWithFileTypesFalse,\n  ): string[]\n  walkSync(\n    entry: string | PathBase,\n    opts: WalkOptions,\n  ): PathBase[] | string[]\n  walkSync(\n    entry: string | PathBase | WalkOptions = this.cwd,\n    opts: WalkOptions = {},\n  ): PathBase[] | string[] {\n    if (typeof entry === 'string') {\n      entry = this.cwd.resolve(entry)\n    } else if (!(entry instanceof PathBase)) {\n      opts = entry\n      entry = this.cwd\n    }\n    const {\n      withFileTypes = true,\n      follow = false,\n      filter,\n      walkFilter,\n    } = opts\n    const results: (string | PathBase)[] = []\n    if (!filter || filter(entry)) {\n      results.push(withFileTypes ? entry : entry.fullpath())\n    }\n    const dirs = new Set([entry])\n    for (const dir of dirs) {\n      const entries = dir.readdirSync()\n      for (const e of entries) {\n        if (!filter || filter(e)) {\n          results.push(withFileTypes ? e : e.fullpath())\n        }\n        let r: PathBase | undefined = e\n        if (e.isSymbolicLink()) {\n          if (!(follow && (r = e.realpathSync()))) continue\n          if (r.isUnknown()) r.lstatSync()\n        }\n        if (r.shouldWalk(dirs, walkFilter)) {\n          dirs.add(r)\n        }\n      }\n    }\n    return results as string[] | PathBase[]\n  }\n\n  /**\n   * Support for `for await`\n   *\n   * Alias for {@link PathScurryBase.iterate}\n   *\n   * Note: As of Node 19, this is very slow, compared to other methods of\n   * walking.  Consider using {@link PathScurryBase.stream} if memory overhead\n   * and backpressure are concerns, or {@link PathScurryBase.walk} if not.\n   */\n  [Symbol.asyncIterator]() {\n    return this.iterate()\n  }\n\n  /**\n   * Async generator form of {@link PathScurryBase.walk}\n   *\n   * Note: As of Node 19, this is very slow, compared to other methods of\n   * walking, especially if most/all of the directory tree has been previously\n   * walked.  Consider using {@link PathScurryBase.stream} if memory overhead\n   * and backpressure are concerns, or {@link PathScurryBase.walk} if not.\n   */\n  iterate(): AsyncGenerator\n  iterate(\n    opts: WalkOptionsWithFileTypesTrue | WalkOptionsWithFileTypesUnset,\n  ): AsyncGenerator\n  iterate(\n    opts: WalkOptionsWithFileTypesFalse,\n  ): AsyncGenerator\n  iterate(opts: WalkOptions): AsyncGenerator\n  iterate(entry: string | PathBase): AsyncGenerator\n  iterate(\n    entry: string | PathBase,\n    opts: WalkOptionsWithFileTypesTrue | WalkOptionsWithFileTypesUnset,\n  ): AsyncGenerator\n  iterate(\n    entry: string | PathBase,\n    opts: WalkOptionsWithFileTypesFalse,\n  ): AsyncGenerator\n  iterate(\n    entry: string | PathBase,\n    opts: WalkOptions,\n  ): AsyncGenerator\n  iterate(\n    entry: string | PathBase | WalkOptions = this.cwd,\n    options: WalkOptions = {},\n  ): AsyncGenerator {\n    // iterating async over the stream is significantly more performant,\n    // especially in the warm-cache scenario, because it buffers up directory\n    // entries in the background instead of waiting for a yield for each one.\n    if (typeof entry === 'string') {\n      entry = this.cwd.resolve(entry)\n    } else if (!(entry instanceof PathBase)) {\n      options = entry\n      entry = this.cwd\n    }\n    return this.stream(entry, options)[Symbol.asyncIterator]()\n  }\n\n  /**\n   * Iterating over a PathScurry performs a synchronous walk.\n   *\n   * Alias for {@link PathScurryBase.iterateSync}\n   */\n  [Symbol.iterator]() {\n    return this.iterateSync()\n  }\n\n  iterateSync(): Generator\n  iterateSync(\n    opts: WalkOptionsWithFileTypesTrue | WalkOptionsWithFileTypesUnset,\n  ): Generator\n  iterateSync(\n    opts: WalkOptionsWithFileTypesFalse,\n  ): Generator\n  iterateSync(opts: WalkOptions): Generator\n  iterateSync(entry: string | PathBase): Generator\n  iterateSync(\n    entry: string | PathBase,\n    opts: WalkOptionsWithFileTypesTrue | WalkOptionsWithFileTypesUnset,\n  ): Generator\n  iterateSync(\n    entry: string | PathBase,\n    opts: WalkOptionsWithFileTypesFalse,\n  ): Generator\n  iterateSync(\n    entry: string | PathBase,\n    opts: WalkOptions,\n  ): Generator\n  *iterateSync(\n    entry: string | PathBase | WalkOptions = this.cwd,\n    opts: WalkOptions = {},\n  ): Generator {\n    if (typeof entry === 'string') {\n      entry = this.cwd.resolve(entry)\n    } else if (!(entry instanceof PathBase)) {\n      opts = entry\n      entry = this.cwd\n    }\n    const {\n      withFileTypes = true,\n      follow = false,\n      filter,\n      walkFilter,\n    } = opts\n    if (!filter || filter(entry)) {\n      yield withFileTypes ? entry : entry.fullpath()\n    }\n    const dirs = new Set([entry])\n    for (const dir of dirs) {\n      const entries = dir.readdirSync()\n      for (const e of entries) {\n        if (!filter || filter(e)) {\n          yield withFileTypes ? e : e.fullpath()\n        }\n        let r: PathBase | undefined = e\n        if (e.isSymbolicLink()) {\n          if (!(follow && (r = e.realpathSync()))) continue\n          if (r.isUnknown()) r.lstatSync()\n        }\n        if (r.shouldWalk(dirs, walkFilter)) {\n          dirs.add(r)\n        }\n      }\n    }\n  }\n\n  /**\n   * Stream form of {@link PathScurryBase.walk}\n   *\n   * Returns a Minipass stream that emits {@link PathBase} objects by default,\n   * or strings if `{ withFileTypes: false }` is set in the options.\n   */\n  stream(): Minipass\n  stream(\n    opts: WalkOptionsWithFileTypesTrue | WalkOptionsWithFileTypesUnset,\n  ): Minipass\n  stream(opts: WalkOptionsWithFileTypesFalse): Minipass\n  stream(opts: WalkOptions): Minipass\n  stream(entry: string | PathBase): Minipass\n  stream(\n    entry: string | PathBase,\n    opts: WalkOptionsWithFileTypesUnset | WalkOptionsWithFileTypesTrue,\n  ): Minipass\n  stream(\n    entry: string | PathBase,\n    opts: WalkOptionsWithFileTypesFalse,\n  ): Minipass\n  stream(\n    entry: string | PathBase,\n    opts: WalkOptions,\n  ): Minipass | Minipass\n  stream(\n    entry: string | PathBase | WalkOptions = this.cwd,\n    opts: WalkOptions = {},\n  ): Minipass | Minipass {\n    if (typeof entry === 'string') {\n      entry = this.cwd.resolve(entry)\n    } else if (!(entry instanceof PathBase)) {\n      opts = entry\n      entry = this.cwd\n    }\n    const {\n      withFileTypes = true,\n      follow = false,\n      filter,\n      walkFilter,\n    } = opts\n    const results = new Minipass({ objectMode: true })\n    if (!filter || filter(entry)) {\n      results.write(withFileTypes ? entry : entry.fullpath())\n    }\n    const dirs = new Set()\n    const queue: PathBase[] = [entry]\n    let processing = 0\n    const process = () => {\n      let paused = false\n      while (!paused) {\n        const dir = queue.shift()\n        if (!dir) {\n          if (processing === 0) results.end()\n          return\n        }\n\n        processing++\n        dirs.add(dir)\n\n        const onReaddir = (\n          er: null | NodeJS.ErrnoException,\n          entries: PathBase[],\n          didRealpaths: boolean = false,\n        ) => {\n          /* c8 ignore start */\n          if (er) return results.emit('error', er)\n          /* c8 ignore stop */\n          if (follow && !didRealpaths) {\n            const promises: Promise[] = []\n            for (const e of entries) {\n              if (e.isSymbolicLink()) {\n                promises.push(\n                  e\n                    .realpath()\n                    .then((r: PathBase | undefined) =>\n                      r?.isUnknown() ? r.lstat() : r,\n                    ),\n                )\n              }\n            }\n            if (promises.length) {\n              Promise.all(promises).then(() =>\n                onReaddir(null, entries, true),\n              )\n              return\n            }\n          }\n\n          for (const e of entries) {\n            if (e && (!filter || filter(e))) {\n              if (!results.write(withFileTypes ? e : e.fullpath())) {\n                paused = true\n              }\n            }\n          }\n\n          processing--\n          for (const e of entries) {\n            const r = e.realpathCached() || e\n            if (r.shouldWalk(dirs, walkFilter)) {\n              queue.push(r)\n            }\n          }\n          if (paused && !results.flowing) {\n            results.once('drain', process)\n          } else if (!sync) {\n            process()\n          }\n        }\n\n        // zalgo containment\n        let sync = true\n        dir.readdirCB(onReaddir, true)\n        sync = false\n      }\n    }\n    process()\n    return results as Minipass | Minipass\n  }\n\n  /**\n   * Synchronous form of {@link PathScurryBase.stream}\n   *\n   * Returns a Minipass stream that emits {@link PathBase} objects by default,\n   * or strings if `{ withFileTypes: false }` is set in the options.\n   *\n   * Will complete the walk in a single tick if the stream is consumed fully.\n   * Otherwise, will pause as needed for stream backpressure.\n   */\n  streamSync(): Minipass\n  streamSync(\n    opts: WalkOptionsWithFileTypesTrue | WalkOptionsWithFileTypesUnset,\n  ): Minipass\n  streamSync(opts: WalkOptionsWithFileTypesFalse): Minipass\n  streamSync(opts: WalkOptions): Minipass\n  streamSync(entry: string | PathBase): Minipass\n  streamSync(\n    entry: string | PathBase,\n    opts: WalkOptionsWithFileTypesUnset | WalkOptionsWithFileTypesTrue,\n  ): Minipass\n  streamSync(\n    entry: string | PathBase,\n    opts: WalkOptionsWithFileTypesFalse,\n  ): Minipass\n  streamSync(\n    entry: string | PathBase,\n    opts: WalkOptions,\n  ): Minipass | Minipass\n  streamSync(\n    entry: string | PathBase | WalkOptions = this.cwd,\n    opts: WalkOptions = {},\n  ): Minipass | Minipass {\n    if (typeof entry === 'string') {\n      entry = this.cwd.resolve(entry)\n    } else if (!(entry instanceof PathBase)) {\n      opts = entry\n      entry = this.cwd\n    }\n    const {\n      withFileTypes = true,\n      follow = false,\n      filter,\n      walkFilter,\n    } = opts\n    const results = new Minipass({ objectMode: true })\n    const dirs = new Set()\n    if (!filter || filter(entry)) {\n      results.write(withFileTypes ? entry : entry.fullpath())\n    }\n    const queue: PathBase[] = [entry]\n    let processing = 0\n    const process = () => {\n      let paused = false\n      while (!paused) {\n        const dir = queue.shift()\n        if (!dir) {\n          if (processing === 0) results.end()\n          return\n        }\n        processing++\n        dirs.add(dir)\n\n        const entries = dir.readdirSync()\n        for (const e of entries) {\n          if (!filter || filter(e)) {\n            if (!results.write(withFileTypes ? e : e.fullpath())) {\n              paused = true\n            }\n          }\n        }\n        processing--\n        for (const e of entries) {\n          let r: PathBase | undefined = e\n          if (e.isSymbolicLink()) {\n            if (!(follow && (r = e.realpathSync()))) continue\n            if (r.isUnknown()) r.lstatSync()\n          }\n          if (r.shouldWalk(dirs, walkFilter)) {\n            queue.push(r)\n          }\n        }\n      }\n      if (paused && !results.flowing) results.once('drain', process)\n    }\n    process()\n    return results as Minipass | Minipass\n  }\n\n  chdir(path: string | Path = this.cwd) {\n    const oldCwd = this.cwd\n    this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path\n    this.cwd[setAsCwd](oldCwd)\n  }\n}\n\n/**\n * Options provided to all walk methods.\n */\nexport interface WalkOptions {\n  /**\n   * Return results as {@link PathBase} objects rather than strings.\n   * When set to false, results are fully resolved paths, as returned by\n   * {@link PathBase.fullpath}.\n   * @default true\n   */\n  withFileTypes?: boolean\n\n  /**\n   *  Attempt to read directory entries from symbolic links. Otherwise, only\n   *  actual directories are traversed. Regardless of this setting, a given\n   *  target path will only ever be walked once, meaning that a symbolic link\n   *  to a previously traversed directory will never be followed.\n   *\n   *  Setting this imposes a slight performance penalty, because `readlink`\n   *  must be called on all symbolic links encountered, in order to avoid\n   *  infinite cycles.\n   * @default false\n   */\n  follow?: boolean\n\n  /**\n   * Only return entries where the provided function returns true.\n   *\n   * This will not prevent directories from being traversed, even if they do\n   * not pass the filter, though it will prevent directories themselves from\n   * being included in the result set.  See {@link walkFilter}\n   *\n   * Asynchronous functions are not supported here.\n   *\n   * By default, if no filter is provided, all entries and traversed\n   * directories are included.\n   */\n  filter?: (entry: PathBase) => boolean\n\n  /**\n   * Only traverse directories (and in the case of {@link follow} being set to\n   * true, symbolic links to directories) if the provided function returns\n   * true.\n   *\n   * This will not prevent directories from being included in the result set,\n   * even if they do not pass the supplied filter function.  See {@link filter}\n   * to do that.\n   *\n   * Asynchronous functions are not supported here.\n   */\n  walkFilter?: (entry: PathBase) => boolean\n}\n\nexport type WalkOptionsWithFileTypesUnset = WalkOptions & {\n  withFileTypes?: undefined\n}\nexport type WalkOptionsWithFileTypesTrue = WalkOptions & {\n  withFileTypes: true\n}\nexport type WalkOptionsWithFileTypesFalse = WalkOptions & {\n  withFileTypes: false\n}\n\n/**\n * Windows implementation of {@link PathScurryBase}\n *\n * Defaults to case insensitve, uses `'\\\\'` to generate path strings.  Uses\n * {@link PathWin32} for Path objects.\n */\nexport class PathScurryWin32 extends PathScurryBase {\n  /**\n   * separator for generating path strings\n   */\n  sep: '\\\\' = '\\\\'\n\n  constructor(\n    cwd: URL | string = process.cwd(),\n    opts: PathScurryOpts = {},\n  ) {\n    const { nocase = true } = opts\n    super(cwd, win32, '\\\\', { ...opts, nocase })\n    this.nocase = nocase\n    for (let p: PathBase | undefined = this.cwd; p; p = p.parent) {\n      p.nocase = this.nocase\n    }\n  }\n\n  /**\n   * @internal\n   */\n  parseRootPath(dir: string): string {\n    // if the path starts with a single separator, it's not a UNC, and we'll\n    // just get separator as the root, and driveFromUNC will return \\\n    // In that case, mount \\ on the root from the cwd.\n    return win32.parse(dir).root.toUpperCase()\n  }\n\n  /**\n   * @internal\n   */\n  newRoot(fs: FSValue) {\n    return new PathWin32(\n      this.rootPath,\n      IFDIR,\n      undefined,\n      this.roots,\n      this.nocase,\n      this.childrenCache(),\n      { fs },\n    )\n  }\n\n  /**\n   * Return true if the provided path string is an absolute path\n   */\n  isAbsolute(p: string): boolean {\n    return (\n      p.startsWith('/') || p.startsWith('\\\\') || /^[a-z]:(\\/|\\\\)/i.test(p)\n    )\n  }\n}\n\n/**\n * {@link PathScurryBase} implementation for all posix systems other than Darwin.\n *\n * Defaults to case-sensitive matching, uses `'/'` to generate path strings.\n *\n * Uses {@link PathPosix} for Path objects.\n */\nexport class PathScurryPosix extends PathScurryBase {\n  /**\n   * separator for generating path strings\n   */\n  sep: '/' = '/'\n  constructor(\n    cwd: URL | string = process.cwd(),\n    opts: PathScurryOpts = {},\n  ) {\n    const { nocase = false } = opts\n    super(cwd, posix, '/', { ...opts, nocase })\n    this.nocase = nocase\n  }\n\n  /**\n   * @internal\n   */\n  parseRootPath(_dir: string): string {\n    return '/'\n  }\n\n  /**\n   * @internal\n   */\n  newRoot(fs: FSValue) {\n    return new PathPosix(\n      this.rootPath,\n      IFDIR,\n      undefined,\n      this.roots,\n      this.nocase,\n      this.childrenCache(),\n      { fs },\n    )\n  }\n\n  /**\n   * Return true if the provided path string is an absolute path\n   */\n  isAbsolute(p: string): boolean {\n    return p.startsWith('/')\n  }\n}\n\n/**\n * {@link PathScurryBase} implementation for Darwin (macOS) systems.\n *\n * Defaults to case-insensitive matching, uses `'/'` for generating path\n * strings.\n *\n * Uses {@link PathPosix} for Path objects.\n */\nexport class PathScurryDarwin extends PathScurryPosix {\n  constructor(\n    cwd: URL | string = process.cwd(),\n    opts: PathScurryOpts = {},\n  ) {\n    const { nocase = true } = opts\n    super(cwd, { ...opts, nocase })\n  }\n}\n\n/**\n * Default {@link PathBase} implementation for the current platform.\n *\n * {@link PathWin32} on Windows systems, {@link PathPosix} on all others.\n */\nexport const Path = process.platform === 'win32' ? PathWin32 : PathPosix\nexport type Path = PathBase | InstanceType\n\n/**\n * Default {@link PathScurryBase} implementation for the current platform.\n *\n * {@link PathScurryWin32} on Windows systems, {@link PathScurryDarwin} on\n * Darwin (macOS) systems, {@link PathScurryPosix} on all others.\n */\nexport const PathScurry:\n  | typeof PathScurryWin32\n  | typeof PathScurryDarwin\n  | typeof PathScurryPosix =\n  process.platform === 'win32' ? PathScurryWin32\n  : process.platform === 'darwin' ? PathScurryDarwin\n  : PathScurryPosix\nexport type PathScurry = PathScurryBase | InstanceType\n", "const proc =\n  typeof process === 'object' && process\n    ? process\n    : {\n        stdout: null,\n        stderr: null,\n      }\nimport { EventEmitter } from 'node:events'\nimport Stream from 'node:stream'\nimport { StringDecoder } from 'node:string_decoder'\n\n/**\n * Same as StringDecoder, but exposing the `lastNeed` flag on the type\n */\ntype SD = StringDecoder & { lastNeed: boolean }\n\nexport type { SD, Pipe, PipeProxyErrors }\n\n/**\n * Return true if the argument is a Minipass stream, Node stream, or something\n * else that Minipass can interact with.\n */\nexport const isStream = (\n  s: any\n): s is Minipass.Readable | Minipass.Writable =>\n  !!s &&\n  typeof s === 'object' &&\n  (s instanceof Minipass ||\n    s instanceof Stream ||\n    isReadable(s) ||\n    isWritable(s))\n\n/**\n * Return true if the argument is a valid {@link Minipass.Readable}\n */\nexport const isReadable = (s: any): s is Minipass.Readable =>\n  !!s &&\n  typeof s === 'object' &&\n  s instanceof EventEmitter &&\n  typeof (s as Minipass.Readable).pipe === 'function' &&\n  // node core Writable streams have a pipe() method, but it throws\n  (s as Minipass.Readable).pipe !== Stream.Writable.prototype.pipe\n\n/**\n * Return true if the argument is a valid {@link Minipass.Writable}\n */\nexport const isWritable = (s: any): s is Minipass.Readable =>\n  !!s &&\n  typeof s === 'object' &&\n  s instanceof EventEmitter &&\n  typeof (s as Minipass.Writable).write === 'function' &&\n  typeof (s as Minipass.Writable).end === 'function'\n\nconst EOF = Symbol('EOF')\nconst MAYBE_EMIT_END = Symbol('maybeEmitEnd')\nconst EMITTED_END = Symbol('emittedEnd')\nconst EMITTING_END = Symbol('emittingEnd')\nconst EMITTED_ERROR = Symbol('emittedError')\nconst CLOSED = Symbol('closed')\nconst READ = Symbol('read')\nconst FLUSH = Symbol('flush')\nconst FLUSHCHUNK = Symbol('flushChunk')\nconst ENCODING = Symbol('encoding')\nconst DECODER = Symbol('decoder')\nconst FLOWING = Symbol('flowing')\nconst PAUSED = Symbol('paused')\nconst RESUME = Symbol('resume')\nconst BUFFER = Symbol('buffer')\nconst PIPES = Symbol('pipes')\nconst BUFFERLENGTH = Symbol('bufferLength')\nconst BUFFERPUSH = Symbol('bufferPush')\nconst BUFFERSHIFT = Symbol('bufferShift')\nconst OBJECTMODE = Symbol('objectMode')\n// internal event when stream is destroyed\nconst DESTROYED = Symbol('destroyed')\n// internal event when stream has an error\nconst ERROR = Symbol('error')\nconst EMITDATA = Symbol('emitData')\nconst EMITEND = Symbol('emitEnd')\nconst EMITEND2 = Symbol('emitEnd2')\nconst ASYNC = Symbol('async')\nconst ABORT = Symbol('abort')\nconst ABORTED = Symbol('aborted')\nconst SIGNAL = Symbol('signal')\nconst DATALISTENERS = Symbol('dataListeners')\nconst DISCARDED = Symbol('discarded')\n\nconst defer = (fn: (...a: any[]) => any) => Promise.resolve().then(fn)\nconst nodefer = (fn: (...a: any[]) => any) => fn()\n\n// events that mean 'the stream is over'\n// these are treated specially, and re-emitted\n// if they are listened for after emitting.\ntype EndishEvent = 'end' | 'finish' | 'prefinish'\nconst isEndish = (ev: any): ev is EndishEvent =>\n  ev === 'end' || ev === 'finish' || ev === 'prefinish'\n\nconst isArrayBufferLike = (b: any): b is ArrayBufferLike =>\n  b instanceof ArrayBuffer ||\n  (!!b &&\n    typeof b === 'object' &&\n    b.constructor &&\n    b.constructor.name === 'ArrayBuffer' &&\n    b.byteLength >= 0)\n\nconst isArrayBufferView = (b: any): b is ArrayBufferView =>\n  !Buffer.isBuffer(b) && ArrayBuffer.isView(b)\n\n/**\n * Options that may be passed to stream.pipe()\n */\nexport interface PipeOptions {\n  /**\n   * end the destination stream when the source stream ends\n   */\n  end?: boolean\n  /**\n   * proxy errors from the source stream to the destination stream\n   */\n  proxyErrors?: boolean\n}\n\n/**\n * Internal class representing a pipe to a destination stream.\n *\n * @internal\n */\nclass Pipe {\n  src: Minipass\n  dest: Minipass\n  opts: PipeOptions\n  ondrain: () => any\n  constructor(\n    src: Minipass,\n    dest: Minipass.Writable,\n    opts: PipeOptions\n  ) {\n    this.src = src\n    this.dest = dest as Minipass\n    this.opts = opts\n    this.ondrain = () => src[RESUME]()\n    this.dest.on('drain', this.ondrain)\n  }\n  unpipe() {\n    this.dest.removeListener('drain', this.ondrain)\n  }\n  // only here for the prototype\n  /* c8 ignore start */\n  proxyErrors(_er: any) {}\n  /* c8 ignore stop */\n  end() {\n    this.unpipe()\n    if (this.opts.end) this.dest.end()\n  }\n}\n\n/**\n * Internal class representing a pipe to a destination stream where\n * errors are proxied.\n *\n * @internal\n */\nclass PipeProxyErrors extends Pipe {\n  unpipe() {\n    this.src.removeListener('error', this.proxyErrors)\n    super.unpipe()\n  }\n  constructor(\n    src: Minipass,\n    dest: Minipass.Writable,\n    opts: PipeOptions\n  ) {\n    super(src, dest, opts)\n    this.proxyErrors = (er: Error) => this.dest.emit('error', er)\n    src.on('error', this.proxyErrors)\n  }\n}\n\nexport namespace Minipass {\n  /**\n   * Encoding used to create a stream that outputs strings rather than\n   * Buffer objects.\n   */\n  export type Encoding = BufferEncoding | 'buffer' | null\n\n  /**\n   * Any stream that Minipass can pipe into\n   */\n  export type Writable =\n    | Minipass\n    | NodeJS.WriteStream\n    | (NodeJS.WriteStream & { fd: number })\n    | (EventEmitter & {\n        end(): any\n        write(chunk: any, ...args: any[]): any\n      })\n\n  /**\n   * Any stream that can be read from\n   */\n  export type Readable =\n    | Minipass\n    | NodeJS.ReadStream\n    | (NodeJS.ReadStream & { fd: number })\n    | (EventEmitter & {\n        pause(): any\n        resume(): any\n        pipe(...destArgs: any[]): any\n      })\n\n  /**\n   * Utility type that can be iterated sync or async\n   */\n  export type DualIterable = Iterable & AsyncIterable\n\n  type EventArguments = Record\n\n  /**\n   * The listing of events that a Minipass class can emit.\n   * Extend this when extending the Minipass class, and pass as\n   * the third template argument.  The key is the name of the event,\n   * and the value is the argument list.\n   *\n   * Any undeclared events will still be allowed, but the handler will get\n   * arguments as `unknown[]`.\n   */\n  export interface Events\n    extends EventArguments {\n    readable: []\n    data: [chunk: RType]\n    error: [er: unknown]\n    abort: [reason: unknown]\n    drain: []\n    resume: []\n    end: []\n    finish: []\n    prefinish: []\n    close: []\n    [DESTROYED]: [er?: unknown]\n    [ERROR]: [er: unknown]\n  }\n\n  /**\n   * String or buffer-like data that can be joined and sliced\n   */\n  export type ContiguousData =\n    | Buffer\n    | ArrayBufferLike\n    | ArrayBufferView\n    | string\n  export type BufferOrString = Buffer | string\n\n  /**\n   * Options passed to the Minipass constructor.\n   */\n  export type SharedOptions = {\n    /**\n     * Defer all data emission and other events until the end of the\n     * current tick, similar to Node core streams\n     */\n    async?: boolean\n    /**\n     * A signal which will abort the stream\n     */\n    signal?: AbortSignal\n    /**\n     * Output string encoding. Set to `null` or `'buffer'` (or omit) to\n     * emit Buffer objects rather than strings.\n     *\n     * Conflicts with `objectMode`\n     */\n    encoding?: BufferEncoding | null | 'buffer'\n    /**\n     * Output data exactly as it was written, supporting non-buffer/string\n     * data (such as arbitrary objects, falsey values, etc.)\n     *\n     * Conflicts with `encoding`\n     */\n    objectMode?: boolean\n  }\n\n  /**\n   * Options for a string encoded output\n   */\n  export type EncodingOptions = SharedOptions & {\n    encoding: BufferEncoding\n    objectMode?: false\n  }\n\n  /**\n   * Options for contiguous data buffer output\n   */\n  export type BufferOptions = SharedOptions & {\n    encoding?: null | 'buffer'\n    objectMode?: false\n  }\n\n  /**\n   * Options for objectMode arbitrary output\n   */\n  export type ObjectModeOptions = SharedOptions & {\n    objectMode: true\n    encoding?: null\n  }\n\n  /**\n   * Utility type to determine allowed options based on read type\n   */\n  export type Options =\n    | ObjectModeOptions\n    | (T extends string\n        ? EncodingOptions\n        : T extends Buffer\n        ? BufferOptions\n        : SharedOptions)\n}\n\nconst isObjectModeOptions = (\n  o: Minipass.SharedOptions\n): o is Minipass.ObjectModeOptions => !!o.objectMode\n\nconst isEncodingOptions = (\n  o: Minipass.SharedOptions\n): o is Minipass.EncodingOptions =>\n  !o.objectMode && !!o.encoding && o.encoding !== 'buffer'\n\n/**\n * Main export, the Minipass class\n *\n * `RType` is the type of data emitted, defaults to Buffer\n *\n * `WType` is the type of data to be written, if RType is buffer or string,\n * then any {@link Minipass.ContiguousData} is allowed.\n *\n * `Events` is the set of event handler signatures that this object\n * will emit, see {@link Minipass.Events}\n */\nexport class Minipass<\n    RType extends unknown = Buffer,\n    WType extends unknown = RType extends Minipass.BufferOrString\n      ? Minipass.ContiguousData\n      : RType,\n    Events extends Minipass.Events = Minipass.Events\n  >\n  extends EventEmitter\n  implements Minipass.DualIterable\n{\n  [FLOWING]: boolean = false;\n  [PAUSED]: boolean = false;\n  [PIPES]: Pipe[] = [];\n  [BUFFER]: RType[] = [];\n  [OBJECTMODE]: boolean;\n  [ENCODING]: BufferEncoding | null;\n  [ASYNC]: boolean;\n  [DECODER]: SD | null;\n  [EOF]: boolean = false;\n  [EMITTED_END]: boolean = false;\n  [EMITTING_END]: boolean = false;\n  [CLOSED]: boolean = false;\n  [EMITTED_ERROR]: unknown = null;\n  [BUFFERLENGTH]: number = 0;\n  [DESTROYED]: boolean = false;\n  [SIGNAL]?: AbortSignal;\n  [ABORTED]: boolean = false;\n  [DATALISTENERS]: number = 0;\n  [DISCARDED]: boolean = false\n\n  /**\n   * true if the stream can be written\n   */\n  writable: boolean = true\n  /**\n   * true if the stream can be read\n   */\n  readable: boolean = true\n\n  /**\n   * If `RType` is Buffer, then options do not need to be provided.\n   * Otherwise, an options object must be provided to specify either\n   * {@link Minipass.SharedOptions.objectMode} or\n   * {@link Minipass.SharedOptions.encoding}, as appropriate.\n   */\n  constructor(\n    ...args:\n      | [Minipass.ObjectModeOptions]\n      | (RType extends Buffer\n          ? [] | [Minipass.Options]\n          : [Minipass.Options])\n  ) {\n    const options: Minipass.Options = (args[0] ||\n      {}) as Minipass.Options\n    super()\n    if (options.objectMode && typeof options.encoding === 'string') {\n      throw new TypeError(\n        'Encoding and objectMode may not be used together'\n      )\n    }\n    if (isObjectModeOptions(options)) {\n      this[OBJECTMODE] = true\n      this[ENCODING] = null\n    } else if (isEncodingOptions(options)) {\n      this[ENCODING] = options.encoding\n      this[OBJECTMODE] = false\n    } else {\n      this[OBJECTMODE] = false\n      this[ENCODING] = null\n    }\n    this[ASYNC] = !!options.async\n    this[DECODER] = this[ENCODING]\n      ? (new StringDecoder(this[ENCODING]) as SD)\n      : null\n\n    //@ts-ignore - private option for debugging and testing\n    if (options && options.debugExposeBuffer === true) {\n      Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] })\n    }\n    //@ts-ignore - private option for debugging and testing\n    if (options && options.debugExposePipes === true) {\n      Object.defineProperty(this, 'pipes', { get: () => this[PIPES] })\n    }\n\n    const { signal } = options\n    if (signal) {\n      this[SIGNAL] = signal\n      if (signal.aborted) {\n        this[ABORT]()\n      } else {\n        signal.addEventListener('abort', () => this[ABORT]())\n      }\n    }\n  }\n\n  /**\n   * The amount of data stored in the buffer waiting to be read.\n   *\n   * For Buffer strings, this will be the total byte length.\n   * For string encoding streams, this will be the string character length,\n   * according to JavaScript's `string.length` logic.\n   * For objectMode streams, this is a count of the items waiting to be\n   * emitted.\n   */\n  get bufferLength() {\n    return this[BUFFERLENGTH]\n  }\n\n  /**\n   * The `BufferEncoding` currently in use, or `null`\n   */\n  get encoding() {\n    return this[ENCODING]\n  }\n\n  /**\n   * @deprecated - This is a read only property\n   */\n  set encoding(_enc) {\n    throw new Error('Encoding must be set at instantiation time')\n  }\n\n  /**\n   * @deprecated - Encoding may only be set at instantiation time\n   */\n  setEncoding(_enc: Minipass.Encoding) {\n    throw new Error('Encoding must be set at instantiation time')\n  }\n\n  /**\n   * True if this is an objectMode stream\n   */\n  get objectMode() {\n    return this[OBJECTMODE]\n  }\n\n  /**\n   * @deprecated - This is a read-only property\n   */\n  set objectMode(_om) {\n    throw new Error('objectMode must be set at instantiation time')\n  }\n\n  /**\n   * true if this is an async stream\n   */\n  get ['async'](): boolean {\n    return this[ASYNC]\n  }\n  /**\n   * Set to true to make this stream async.\n   *\n   * Once set, it cannot be unset, as this would potentially cause incorrect\n   * behavior.  Ie, a sync stream can be made async, but an async stream\n   * cannot be safely made sync.\n   */\n  set ['async'](a: boolean) {\n    this[ASYNC] = this[ASYNC] || !!a\n  }\n\n  // drop everything and get out of the flow completely\n  [ABORT]() {\n    this[ABORTED] = true\n    this.emit('abort', this[SIGNAL]?.reason)\n    this.destroy(this[SIGNAL]?.reason)\n  }\n\n  /**\n   * True if the stream has been aborted.\n   */\n  get aborted() {\n    return this[ABORTED]\n  }\n  /**\n   * No-op setter. Stream aborted status is set via the AbortSignal provided\n   * in the constructor options.\n   */\n  set aborted(_) {}\n\n  /**\n   * Write data into the stream\n   *\n   * If the chunk written is a string, and encoding is not specified, then\n   * `utf8` will be assumed. If the stream encoding matches the encoding of\n   * a written string, and the state of the string decoder allows it, then\n   * the string will be passed through to either the output or the internal\n   * buffer without any processing. Otherwise, it will be turned into a\n   * Buffer object for processing into the desired encoding.\n   *\n   * If provided, `cb` function is called immediately before return for\n   * sync streams, or on next tick for async streams, because for this\n   * base class, a chunk is considered \"processed\" once it is accepted\n   * and either emitted or buffered. That is, the callback does not indicate\n   * that the chunk has been eventually emitted, though of course child\n   * classes can override this function to do whatever processing is required\n   * and call `super.write(...)` only once processing is completed.\n   */\n  write(chunk: WType, cb?: () => void): boolean\n  write(\n    chunk: WType,\n    encoding?: Minipass.Encoding,\n    cb?: () => void\n  ): boolean\n  write(\n    chunk: WType,\n    encoding?: Minipass.Encoding | (() => void),\n    cb?: () => void\n  ): boolean {\n    if (this[ABORTED]) return false\n    if (this[EOF]) throw new Error('write after end')\n\n    if (this[DESTROYED]) {\n      this.emit(\n        'error',\n        Object.assign(\n          new Error('Cannot call write after a stream was destroyed'),\n          { code: 'ERR_STREAM_DESTROYED' }\n        )\n      )\n      return true\n    }\n\n    if (typeof encoding === 'function') {\n      cb = encoding\n      encoding = 'utf8'\n    }\n\n    if (!encoding) encoding = 'utf8'\n\n    const fn = this[ASYNC] ? defer : nodefer\n\n    // convert array buffers and typed array views into buffers\n    // at some point in the future, we may want to do the opposite!\n    // leave strings and buffers as-is\n    // anything is only allowed if in object mode, so throw\n    if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {\n      if (isArrayBufferView(chunk)) {\n        //@ts-ignore - sinful unsafe type changing\n        chunk = Buffer.from(\n          chunk.buffer,\n          chunk.byteOffset,\n          chunk.byteLength\n        )\n      } else if (isArrayBufferLike(chunk)) {\n        //@ts-ignore - sinful unsafe type changing\n        chunk = Buffer.from(chunk)\n      } else if (typeof chunk !== 'string') {\n        throw new Error(\n          'Non-contiguous data written to non-objectMode stream'\n        )\n      }\n    }\n\n    // handle object mode up front, since it's simpler\n    // this yields better performance, fewer checks later.\n    if (this[OBJECTMODE]) {\n      // maybe impossible?\n      /* c8 ignore start */\n      if (this[FLOWING] && this[BUFFERLENGTH] !== 0) this[FLUSH](true)\n      /* c8 ignore stop */\n\n      if (this[FLOWING]) this.emit('data', chunk as unknown as RType)\n      else this[BUFFERPUSH](chunk as unknown as RType)\n\n      if (this[BUFFERLENGTH] !== 0) this.emit('readable')\n\n      if (cb) fn(cb)\n\n      return this[FLOWING]\n    }\n\n    // at this point the chunk is a buffer or string\n    // don't buffer it up or send it to the decoder\n    if (!(chunk as Minipass.BufferOrString).length) {\n      if (this[BUFFERLENGTH] !== 0) this.emit('readable')\n      if (cb) fn(cb)\n      return this[FLOWING]\n    }\n\n    // fast-path writing strings of same encoding to a stream with\n    // an empty buffer, skipping the buffer/decoder dance\n    if (\n      typeof chunk === 'string' &&\n      // unless it is a string already ready for us to use\n      !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)\n    ) {\n      //@ts-ignore - sinful unsafe type change\n      chunk = Buffer.from(chunk, encoding)\n    }\n\n    if (Buffer.isBuffer(chunk) && this[ENCODING]) {\n      //@ts-ignore - sinful unsafe type change\n      chunk = this[DECODER].write(chunk)\n    }\n\n    // Note: flushing CAN potentially switch us into not-flowing mode\n    if (this[FLOWING] && this[BUFFERLENGTH] !== 0) this[FLUSH](true)\n\n    if (this[FLOWING]) this.emit('data', chunk as unknown as RType)\n    else this[BUFFERPUSH](chunk as unknown as RType)\n\n    if (this[BUFFERLENGTH] !== 0) this.emit('readable')\n\n    if (cb) fn(cb)\n\n    return this[FLOWING]\n  }\n\n  /**\n   * Low-level explicit read method.\n   *\n   * In objectMode, the argument is ignored, and one item is returned if\n   * available.\n   *\n   * `n` is the number of bytes (or in the case of encoding streams,\n   * characters) to consume. If `n` is not provided, then the entire buffer\n   * is returned, or `null` is returned if no data is available.\n   *\n   * If `n` is greater that the amount of data in the internal buffer,\n   * then `null` is returned.\n   */\n  read(n?: number | null): RType | null {\n    if (this[DESTROYED]) return null\n    this[DISCARDED] = false\n\n    if (\n      this[BUFFERLENGTH] === 0 ||\n      n === 0 ||\n      (n && n > this[BUFFERLENGTH])\n    ) {\n      this[MAYBE_EMIT_END]()\n      return null\n    }\n\n    if (this[OBJECTMODE]) n = null\n\n    if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {\n      // not object mode, so if we have an encoding, then RType is string\n      // otherwise, must be Buffer\n      this[BUFFER] = [\n        (this[ENCODING]\n          ? this[BUFFER].join('')\n          : Buffer.concat(\n              this[BUFFER] as Buffer[],\n              this[BUFFERLENGTH]\n            )) as RType,\n      ]\n    }\n\n    const ret = this[READ](n || null, this[BUFFER][0] as RType)\n    this[MAYBE_EMIT_END]()\n    return ret\n  }\n\n  [READ](n: number | null, chunk: RType) {\n    if (this[OBJECTMODE]) this[BUFFERSHIFT]()\n    else {\n      const c = chunk as Minipass.BufferOrString\n      if (n === c.length || n === null) this[BUFFERSHIFT]()\n      else if (typeof c === 'string') {\n        this[BUFFER][0] = c.slice(n) as RType\n        chunk = c.slice(0, n) as RType\n        this[BUFFERLENGTH] -= n\n      } else {\n        this[BUFFER][0] = c.subarray(n) as RType\n        chunk = c.subarray(0, n) as RType\n        this[BUFFERLENGTH] -= n\n      }\n    }\n\n    this.emit('data', chunk)\n\n    if (!this[BUFFER].length && !this[EOF]) this.emit('drain')\n\n    return chunk\n  }\n\n  /**\n   * End the stream, optionally providing a final write.\n   *\n   * See {@link Minipass#write} for argument descriptions\n   */\n  end(cb?: () => void): this\n  end(chunk: WType, cb?: () => void): this\n  end(chunk: WType, encoding?: Minipass.Encoding, cb?: () => void): this\n  end(\n    chunk?: WType | (() => void),\n    encoding?: Minipass.Encoding | (() => void),\n    cb?: () => void\n  ): this {\n    if (typeof chunk === 'function') {\n      cb = chunk as () => void\n      chunk = undefined\n    }\n    if (typeof encoding === 'function') {\n      cb = encoding\n      encoding = 'utf8'\n    }\n    if (chunk !== undefined) this.write(chunk, encoding)\n    if (cb) this.once('end', cb)\n    this[EOF] = true\n    this.writable = false\n\n    // if we haven't written anything, then go ahead and emit,\n    // even if we're not reading.\n    // we'll re-emit if a new 'end' listener is added anyway.\n    // This makes MP more suitable to write-only use cases.\n    if (this[FLOWING] || !this[PAUSED]) this[MAYBE_EMIT_END]()\n    return this\n  }\n\n  // don't let the internal resume be overwritten\n  [RESUME]() {\n    if (this[DESTROYED]) return\n\n    if (!this[DATALISTENERS] && !this[PIPES].length) {\n      this[DISCARDED] = true\n    }\n    this[PAUSED] = false\n    this[FLOWING] = true\n    this.emit('resume')\n    if (this[BUFFER].length) this[FLUSH]()\n    else if (this[EOF]) this[MAYBE_EMIT_END]()\n    else this.emit('drain')\n  }\n\n  /**\n   * Resume the stream if it is currently in a paused state\n   *\n   * If called when there are no pipe destinations or `data` event listeners,\n   * this will place the stream in a \"discarded\" state, where all data will\n   * be thrown away. The discarded state is removed if a pipe destination or\n   * data handler is added, if pause() is called, or if any synchronous or\n   * asynchronous iteration is started.\n   */\n  resume() {\n    return this[RESUME]()\n  }\n\n  /**\n   * Pause the stream\n   */\n  pause() {\n    this[FLOWING] = false\n    this[PAUSED] = true\n    this[DISCARDED] = false\n  }\n\n  /**\n   * true if the stream has been forcibly destroyed\n   */\n  get destroyed() {\n    return this[DESTROYED]\n  }\n\n  /**\n   * true if the stream is currently in a flowing state, meaning that\n   * any writes will be immediately emitted.\n   */\n  get flowing() {\n    return this[FLOWING]\n  }\n\n  /**\n   * true if the stream is currently in a paused state\n   */\n  get paused() {\n    return this[PAUSED]\n  }\n\n  [BUFFERPUSH](chunk: RType) {\n    if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1\n    else this[BUFFERLENGTH] += (chunk as Minipass.BufferOrString).length\n    this[BUFFER].push(chunk)\n  }\n\n  [BUFFERSHIFT](): RType {\n    if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1\n    else\n      this[BUFFERLENGTH] -= (\n        this[BUFFER][0] as Minipass.BufferOrString\n      ).length\n    return this[BUFFER].shift() as RType\n  }\n\n  [FLUSH](noDrain: boolean = false) {\n    do {} while (\n      this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&\n      this[BUFFER].length\n    )\n\n    if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain')\n  }\n\n  [FLUSHCHUNK](chunk: RType) {\n    this.emit('data', chunk)\n    return this[FLOWING]\n  }\n\n  /**\n   * Pipe all data emitted by this stream into the destination provided.\n   *\n   * Triggers the flow of data.\n   */\n  pipe(dest: W, opts?: PipeOptions): W {\n    if (this[DESTROYED]) return dest\n    this[DISCARDED] = false\n\n    const ended = this[EMITTED_END]\n    opts = opts || {}\n    if (dest === proc.stdout || dest === proc.stderr) opts.end = false\n    else opts.end = opts.end !== false\n    opts.proxyErrors = !!opts.proxyErrors\n\n    // piping an ended stream ends immediately\n    if (ended) {\n      if (opts.end) dest.end()\n    } else {\n      // \"as\" here just ignores the WType, which pipes don't care about,\n      // since they're only consuming from us, and writing to the dest\n      this[PIPES].push(\n        !opts.proxyErrors\n          ? new Pipe(this as Minipass, dest, opts)\n          : new PipeProxyErrors(this as Minipass, dest, opts)\n      )\n      if (this[ASYNC]) defer(() => this[RESUME]())\n      else this[RESUME]()\n    }\n\n    return dest\n  }\n\n  /**\n   * Fully unhook a piped destination stream.\n   *\n   * If the destination stream was the only consumer of this stream (ie,\n   * there are no other piped destinations or `'data'` event listeners)\n   * then the flow of data will stop until there is another consumer or\n   * {@link Minipass#resume} is explicitly called.\n   */\n  unpipe(dest: W) {\n    const p = this[PIPES].find(p => p.dest === dest)\n    if (p) {\n      if (this[PIPES].length === 1) {\n        if (this[FLOWING] && this[DATALISTENERS] === 0) {\n          this[FLOWING] = false\n        }\n        this[PIPES] = []\n      } else this[PIPES].splice(this[PIPES].indexOf(p), 1)\n      p.unpipe()\n    }\n  }\n\n  /**\n   * Alias for {@link Minipass#on}\n   */\n  addListener(\n    ev: Event,\n    handler: (...args: Events[Event]) => any\n  ): this {\n    return this.on(ev, handler)\n  }\n\n  /**\n   * Mostly identical to `EventEmitter.on`, with the following\n   * behavior differences to prevent data loss and unnecessary hangs:\n   *\n   * - Adding a 'data' event handler will trigger the flow of data\n   *\n   * - Adding a 'readable' event handler when there is data waiting to be read\n   *   will cause 'readable' to be emitted immediately.\n   *\n   * - Adding an 'endish' event handler ('end', 'finish', etc.) which has\n   *   already passed will cause the event to be emitted immediately and all\n   *   handlers removed.\n   *\n   * - Adding an 'error' event handler after an error has been emitted will\n   *   cause the event to be re-emitted immediately with the error previously\n   *   raised.\n   */\n  on(\n    ev: Event,\n    handler: (...args: Events[Event]) => any\n  ): this {\n    const ret = super.on(\n      ev as string | symbol,\n      handler as (...a: any[]) => any\n    )\n    if (ev === 'data') {\n      this[DISCARDED] = false\n      this[DATALISTENERS]++\n      if (!this[PIPES].length && !this[FLOWING]) {\n        this[RESUME]()\n      }\n    } else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {\n      super.emit('readable')\n    } else if (isEndish(ev) && this[EMITTED_END]) {\n      super.emit(ev)\n      this.removeAllListeners(ev)\n    } else if (ev === 'error' && this[EMITTED_ERROR]) {\n      const h = handler as (...a: Events['error']) => any\n      if (this[ASYNC]) defer(() => h.call(this, this[EMITTED_ERROR]))\n      else h.call(this, this[EMITTED_ERROR])\n    }\n    return ret\n  }\n\n  /**\n   * Alias for {@link Minipass#off}\n   */\n  removeListener(\n    ev: Event,\n    handler: (...args: Events[Event]) => any\n  ) {\n    return this.off(ev, handler)\n  }\n\n  /**\n   * Mostly identical to `EventEmitter.off`\n   *\n   * If a 'data' event handler is removed, and it was the last consumer\n   * (ie, there are no pipe destinations or other 'data' event listeners),\n   * then the flow of data will stop until there is another consumer or\n   * {@link Minipass#resume} is explicitly called.\n   */\n  off(\n    ev: Event,\n    handler: (...args: Events[Event]) => any\n  ) {\n    const ret = super.off(\n      ev as string | symbol,\n      handler as (...a: any[]) => any\n    )\n    // if we previously had listeners, and now we don't, and we don't\n    // have any pipes, then stop the flow, unless it's been explicitly\n    // put in a discarded flowing state via stream.resume().\n    if (ev === 'data') {\n      this[DATALISTENERS] = this.listeners('data').length\n      if (\n        this[DATALISTENERS] === 0 &&\n        !this[DISCARDED] &&\n        !this[PIPES].length\n      ) {\n        this[FLOWING] = false\n      }\n    }\n    return ret\n  }\n\n  /**\n   * Mostly identical to `EventEmitter.removeAllListeners`\n   *\n   * If all 'data' event handlers are removed, and they were the last consumer\n   * (ie, there are no pipe destinations), then the flow of data will stop\n   * until there is another consumer or {@link Minipass#resume} is explicitly\n   * called.\n   */\n  removeAllListeners(ev?: Event) {\n    const ret = super.removeAllListeners(ev as string | symbol | undefined)\n    if (ev === 'data' || ev === undefined) {\n      this[DATALISTENERS] = 0\n      if (!this[DISCARDED] && !this[PIPES].length) {\n        this[FLOWING] = false\n      }\n    }\n    return ret\n  }\n\n  /**\n   * true if the 'end' event has been emitted\n   */\n  get emittedEnd() {\n    return this[EMITTED_END]\n  }\n\n  [MAYBE_EMIT_END]() {\n    if (\n      !this[EMITTING_END] &&\n      !this[EMITTED_END] &&\n      !this[DESTROYED] &&\n      this[BUFFER].length === 0 &&\n      this[EOF]\n    ) {\n      this[EMITTING_END] = true\n      this.emit('end')\n      this.emit('prefinish')\n      this.emit('finish')\n      if (this[CLOSED]) this.emit('close')\n      this[EMITTING_END] = false\n    }\n  }\n\n  /**\n   * Mostly identical to `EventEmitter.emit`, with the following\n   * behavior differences to prevent data loss and unnecessary hangs:\n   *\n   * If the stream has been destroyed, and the event is something other\n   * than 'close' or 'error', then `false` is returned and no handlers\n   * are called.\n   *\n   * If the event is 'end', and has already been emitted, then the event\n   * is ignored. If the stream is in a paused or non-flowing state, then\n   * the event will be deferred until data flow resumes. If the stream is\n   * async, then handlers will be called on the next tick rather than\n   * immediately.\n   *\n   * If the event is 'close', and 'end' has not yet been emitted, then\n   * the event will be deferred until after 'end' is emitted.\n   *\n   * If the event is 'error', and an AbortSignal was provided for the stream,\n   * and there are no listeners, then the event is ignored, matching the\n   * behavior of node core streams in the presense of an AbortSignal.\n   *\n   * If the event is 'finish' or 'prefinish', then all listeners will be\n   * removed after emitting the event, to prevent double-firing.\n   */\n  emit(\n    ev: Event,\n    ...args: Events[Event]\n  ): boolean {\n    const data = args[0]\n    // error and close are only events allowed after calling destroy()\n    if (\n      ev !== 'error' &&\n      ev !== 'close' &&\n      ev !== DESTROYED &&\n      this[DESTROYED]\n    ) {\n      return false\n    } else if (ev === 'data') {\n      return !this[OBJECTMODE] && !data\n        ? false\n        : this[ASYNC]\n        ? (defer(() => this[EMITDATA](data as RType)), true)\n        : this[EMITDATA](data as RType)\n    } else if (ev === 'end') {\n      return this[EMITEND]()\n    } else if (ev === 'close') {\n      this[CLOSED] = true\n      // don't emit close before 'end' and 'finish'\n      if (!this[EMITTED_END] && !this[DESTROYED]) return false\n      const ret = super.emit('close')\n      this.removeAllListeners('close')\n      return ret\n    } else if (ev === 'error') {\n      this[EMITTED_ERROR] = data\n      super.emit(ERROR, data)\n      const ret =\n        !this[SIGNAL] || this.listeners('error').length\n          ? super.emit('error', data)\n          : false\n      this[MAYBE_EMIT_END]()\n      return ret\n    } else if (ev === 'resume') {\n      const ret = super.emit('resume')\n      this[MAYBE_EMIT_END]()\n      return ret\n    } else if (ev === 'finish' || ev === 'prefinish') {\n      const ret = super.emit(ev)\n      this.removeAllListeners(ev)\n      return ret\n    }\n\n    // Some other unknown event\n    const ret = super.emit(ev as string, ...args)\n    this[MAYBE_EMIT_END]()\n    return ret\n  }\n\n  [EMITDATA](data: RType) {\n    for (const p of this[PIPES]) {\n      if (p.dest.write(data as RType) === false) this.pause()\n    }\n    const ret = this[DISCARDED] ? false : super.emit('data', data)\n    this[MAYBE_EMIT_END]()\n    return ret\n  }\n\n  [EMITEND]() {\n    if (this[EMITTED_END]) return false\n\n    this[EMITTED_END] = true\n    this.readable = false\n    return this[ASYNC]\n      ? (defer(() => this[EMITEND2]()), true)\n      : this[EMITEND2]()\n  }\n\n  [EMITEND2]() {\n    if (this[DECODER]) {\n      const data = this[DECODER].end()\n      if (data) {\n        for (const p of this[PIPES]) {\n          p.dest.write(data as RType)\n        }\n        if (!this[DISCARDED]) super.emit('data', data)\n      }\n    }\n\n    for (const p of this[PIPES]) {\n      p.end()\n    }\n    const ret = super.emit('end')\n    this.removeAllListeners('end')\n    return ret\n  }\n\n  /**\n   * Return a Promise that resolves to an array of all emitted data once\n   * the stream ends.\n   */\n  async collect(): Promise {\n    const buf: RType[] & { dataLength: number } = Object.assign([], {\n      dataLength: 0,\n    })\n    if (!this[OBJECTMODE]) buf.dataLength = 0\n    // set the promise first, in case an error is raised\n    // by triggering the flow here.\n    const p = this.promise()\n    this.on('data', c => {\n      buf.push(c)\n      if (!this[OBJECTMODE])\n        buf.dataLength += (c as Minipass.BufferOrString).length\n    })\n    await p\n    return buf\n  }\n\n  /**\n   * Return a Promise that resolves to the concatenation of all emitted data\n   * once the stream ends.\n   *\n   * Not allowed on objectMode streams.\n   */\n  async concat(): Promise {\n    if (this[OBJECTMODE]) {\n      throw new Error('cannot concat in objectMode')\n    }\n    const buf = await this.collect()\n    return (\n      this[ENCODING]\n        ? buf.join('')\n        : Buffer.concat(buf as Buffer[], buf.dataLength)\n    ) as RType\n  }\n\n  /**\n   * Return a void Promise that resolves once the stream ends.\n   */\n  async promise(): Promise {\n    return new Promise((resolve, reject) => {\n      this.on(DESTROYED, () => reject(new Error('stream destroyed')))\n      this.on('error', er => reject(er))\n      this.on('end', () => resolve())\n    })\n  }\n\n  /**\n   * Asynchronous `for await of` iteration.\n   *\n   * This will continue emitting all chunks until the stream terminates.\n   */\n  [Symbol.asyncIterator](): AsyncGenerator {\n    // set this up front, in case the consumer doesn't call next()\n    // right away.\n    this[DISCARDED] = false\n    let stopped = false\n    const stop = async (): Promise> => {\n      this.pause()\n      stopped = true\n      return { value: undefined, done: true }\n    }\n    const next = (): Promise> => {\n      if (stopped) return stop()\n      const res = this.read()\n      if (res !== null) return Promise.resolve({ done: false, value: res })\n\n      if (this[EOF]) return stop()\n\n      let resolve!: (res: IteratorResult) => void\n      let reject!: (er: unknown) => void\n      const onerr = (er: unknown) => {\n        this.off('data', ondata)\n        this.off('end', onend)\n        this.off(DESTROYED, ondestroy)\n        stop()\n        reject(er)\n      }\n      const ondata = (value: RType) => {\n        this.off('error', onerr)\n        this.off('end', onend)\n        this.off(DESTROYED, ondestroy)\n        this.pause()\n        resolve({ value, done: !!this[EOF] })\n      }\n      const onend = () => {\n        this.off('error', onerr)\n        this.off('data', ondata)\n        this.off(DESTROYED, ondestroy)\n        stop()\n        resolve({ done: true, value: undefined })\n      }\n      const ondestroy = () => onerr(new Error('stream destroyed'))\n      return new Promise>((res, rej) => {\n        reject = rej\n        resolve = res\n        this.once(DESTROYED, ondestroy)\n        this.once('error', onerr)\n        this.once('end', onend)\n        this.once('data', ondata)\n      })\n    }\n\n    return {\n      next,\n      throw: stop,\n      return: stop,\n      [Symbol.asyncIterator]() {\n        return this\n      },\n      [Symbol.asyncDispose]: async () => {},\n    }\n  }\n\n  /**\n   * Synchronous `for of` iteration.\n   *\n   * The iteration will terminate when the internal buffer runs out, even\n   * if the stream has not yet terminated.\n   */\n  [Symbol.iterator](): Generator {\n    // set this up front, in case the consumer doesn't call next()\n    // right away.\n    this[DISCARDED] = false\n    let stopped = false\n    const stop = (): IteratorReturnResult => {\n      this.pause()\n      this.off(ERROR, stop)\n      this.off(DESTROYED, stop)\n      this.off('end', stop)\n      stopped = true\n      return { done: true, value: undefined }\n    }\n\n    const next = (): IteratorResult => {\n      if (stopped) return stop()\n      const value = this.read()\n      return value === null ? stop() : { done: false, value }\n    }\n\n    this.once('end', stop)\n    this.once(ERROR, stop)\n    this.once(DESTROYED, stop)\n\n    return {\n      next,\n      throw: stop,\n      return: stop,\n      [Symbol.iterator]() {\n        return this\n      },\n      [Symbol.dispose]: () => {},\n    }\n  }\n\n  /**\n   * Destroy a stream, preventing it from being used for any further purpose.\n   *\n   * If the stream has a `close()` method, then it will be called on\n   * destruction.\n   *\n   * After destruction, any attempt to write data, read data, or emit most\n   * events will be ignored.\n   *\n   * If an error argument is provided, then it will be emitted in an\n   * 'error' event.\n   */\n  destroy(er?: unknown) {\n    if (this[DESTROYED]) {\n      if (er) this.emit('error', er)\n      else this.emit(DESTROYED)\n      return this\n    }\n\n    this[DESTROYED] = true\n    this[DISCARDED] = true\n\n    // throw away all buffered data, it's never coming out\n    this[BUFFER].length = 0\n    this[BUFFERLENGTH] = 0\n\n    const wc = this as Minipass & {\n      close?: () => void\n    }\n    if (typeof wc.close === 'function' && !this[CLOSED]) wc.close()\n\n    if (er) this.emit('error', er)\n    // if no error to emit, still reject pending promises\n    else this.emit(DESTROYED)\n\n    return this\n  }\n\n  /**\n   * Alias for {@link isStream}\n   *\n   * Former export location, maintained for backwards compatibility.\n   *\n   * @deprecated\n   */\n  static get isStream() {\n    return isStream\n  }\n}\n", "// this is just a very light wrapper around 2 arrays with an offset index\n\nimport { GLOBSTAR } from 'minimatch'\nexport type MMPattern = string | RegExp | typeof GLOBSTAR\n\n// an array of length >= 1\nexport type PatternList = [p: MMPattern, ...rest: MMPattern[]]\nexport type UNCPatternList = [\n  p0: '',\n  p1: '',\n  p2: string,\n  p3: string,\n  ...rest: MMPattern[],\n]\nexport type DrivePatternList = [p0: string, ...rest: MMPattern[]]\nexport type AbsolutePatternList = [p0: '', ...rest: MMPattern[]]\nexport type GlobList = [p: string, ...rest: string[]]\n\nconst isPatternList = (pl: MMPattern[]): pl is PatternList =>\n  pl.length >= 1\nconst isGlobList = (gl: string[]): gl is GlobList => gl.length >= 1\n\nconst customInspect = Symbol.for('nodejs.util.inspect.custom')\n\n/**\n * An immutable-ish view on an array of glob parts and their parsed\n * results\n */\nexport class Pattern {\n  readonly #patternList: PatternList\n  readonly #globList: GlobList\n  readonly #index: number\n  readonly length: number\n  readonly #platform: NodeJS.Platform\n  #rest?: Pattern | null\n  #globString?: string\n  #isDrive?: boolean\n  #isUNC?: boolean\n  #isAbsolute?: boolean\n  #followGlobstar: boolean = true\n\n  constructor(\n    patternList: MMPattern[],\n    globList: string[],\n    index: number,\n    platform: NodeJS.Platform,\n  ) {\n    if (!isPatternList(patternList)) {\n      throw new TypeError('empty pattern list')\n    }\n    if (!isGlobList(globList)) {\n      throw new TypeError('empty glob list')\n    }\n    if (globList.length !== patternList.length) {\n      throw new TypeError('mismatched pattern list and glob list lengths')\n    }\n    this.length = patternList.length\n    if (index < 0 || index >= this.length) {\n      throw new TypeError('index out of range')\n    }\n    this.#patternList = patternList\n    this.#globList = globList\n    this.#index = index\n    this.#platform = platform\n\n    // normalize root entries of absolute patterns on initial creation.\n    if (this.#index === 0) {\n      // c: => ['c:/']\n      // C:/ => ['C:/']\n      // C:/x => ['C:/', 'x']\n      // //host/share => ['//host/share/']\n      // //host/share/ => ['//host/share/']\n      // //host/share/x => ['//host/share/', 'x']\n      // /etc => ['/', 'etc']\n      // / => ['/']\n      if (this.isUNC()) {\n        // '' / '' / 'host' / 'share'\n        const [p0, p1, p2, p3, ...prest] = this.#patternList\n        const [g0, g1, g2, g3, ...grest] = this.#globList\n        if (prest[0] === '') {\n          // ends in /\n          prest.shift()\n          grest.shift()\n        }\n        const p = [p0, p1, p2, p3, ''].join('/')\n        const g = [g0, g1, g2, g3, ''].join('/')\n        this.#patternList = [p, ...prest]\n        this.#globList = [g, ...grest]\n        this.length = this.#patternList.length\n      } else if (this.isDrive() || this.isAbsolute()) {\n        const [p1, ...prest] = this.#patternList\n        const [g1, ...grest] = this.#globList\n        if (prest[0] === '') {\n          // ends in /\n          prest.shift()\n          grest.shift()\n        }\n        const p = (p1 as string) + '/'\n        const g = g1 + '/'\n        this.#patternList = [p, ...prest]\n        this.#globList = [g, ...grest]\n        this.length = this.#patternList.length\n      }\n    }\n  }\n\n  [customInspect]() {\n    return 'Pattern <' + this.#globList.slice(this.#index).join('/') + '>'\n  }\n\n  /**\n   * The first entry in the parsed list of patterns\n   */\n  pattern(): MMPattern {\n    return this.#patternList[this.#index] as MMPattern\n  }\n\n  /**\n   * true of if pattern() returns a string\n   */\n  isString(): boolean {\n    return typeof this.#patternList[this.#index] === 'string'\n  }\n  /**\n   * true of if pattern() returns GLOBSTAR\n   */\n  isGlobstar(): boolean {\n    return this.#patternList[this.#index] === GLOBSTAR\n  }\n  /**\n   * true if pattern() returns a regexp\n   */\n  isRegExp(): boolean {\n    return this.#patternList[this.#index] instanceof RegExp\n  }\n\n  /**\n   * The /-joined set of glob parts that make up this pattern\n   */\n  globString(): string {\n    return (this.#globString =\n      this.#globString ||\n      (this.#index === 0 ?\n        this.isAbsolute() ?\n          this.#globList[0] + this.#globList.slice(1).join('/')\n        : this.#globList.join('/')\n      : this.#globList.slice(this.#index).join('/')))\n  }\n\n  /**\n   * true if there are more pattern parts after this one\n   */\n  hasMore(): boolean {\n    return this.length > this.#index + 1\n  }\n\n  /**\n   * The rest of the pattern after this part, or null if this is the end\n   */\n  rest(): Pattern | null {\n    if (this.#rest !== undefined) return this.#rest\n    if (!this.hasMore()) return (this.#rest = null)\n    this.#rest = new Pattern(\n      this.#patternList,\n      this.#globList,\n      this.#index + 1,\n      this.#platform,\n    )\n    this.#rest.#isAbsolute = this.#isAbsolute\n    this.#rest.#isUNC = this.#isUNC\n    this.#rest.#isDrive = this.#isDrive\n    return this.#rest\n  }\n\n  /**\n   * true if the pattern represents a //unc/path/ on windows\n   */\n  isUNC(): boolean {\n    const pl = this.#patternList\n    return this.#isUNC !== undefined ?\n        this.#isUNC\n      : (this.#isUNC =\n          this.#platform === 'win32' &&\n          this.#index === 0 &&\n          pl[0] === '' &&\n          pl[1] === '' &&\n          typeof pl[2] === 'string' &&\n          !!pl[2] &&\n          typeof pl[3] === 'string' &&\n          !!pl[3])\n  }\n\n  // pattern like C:/...\n  // split = ['C:', ...]\n  // XXX: would be nice to handle patterns like `c:*` to test the cwd\n  // in c: for *, but I don't know of a way to even figure out what that\n  // cwd is without actually chdir'ing into it?\n  /**\n   * True if the pattern starts with a drive letter on Windows\n   */\n  isDrive(): boolean {\n    const pl = this.#patternList\n    return this.#isDrive !== undefined ?\n        this.#isDrive\n      : (this.#isDrive =\n          this.#platform === 'win32' &&\n          this.#index === 0 &&\n          this.length > 1 &&\n          typeof pl[0] === 'string' &&\n          /^[a-z]:$/i.test(pl[0]))\n  }\n\n  // pattern = '/' or '/...' or '/x/...'\n  // split = ['', ''] or ['', ...] or ['', 'x', ...]\n  // Drive and UNC both considered absolute on windows\n  /**\n   * True if the pattern is rooted on an absolute path\n   */\n  isAbsolute(): boolean {\n    const pl = this.#patternList\n    return this.#isAbsolute !== undefined ?\n        this.#isAbsolute\n      : (this.#isAbsolute =\n          (pl[0] === '' && pl.length > 1) ||\n          this.isDrive() ||\n          this.isUNC())\n  }\n\n  /**\n   * consume the root of the pattern, and return it\n   */\n  root(): string {\n    const p = this.#patternList[0]\n    return (\n        typeof p === 'string' && this.isAbsolute() && this.#index === 0\n      ) ?\n        p\n      : ''\n  }\n\n  /**\n   * Check to see if the current globstar pattern is allowed to follow\n   * a symbolic link.\n   */\n  checkFollowGlobstar(): boolean {\n    return !(\n      this.#index === 0 ||\n      !this.isGlobstar() ||\n      !this.#followGlobstar\n    )\n  }\n\n  /**\n   * Mark that the current globstar pattern is following a symbolic link\n   */\n  markFollowGlobstar(): boolean {\n    if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)\n      return false\n    this.#followGlobstar = false\n    return true\n  }\n}\n", "// give it a pattern, and it'll be able to tell you if\n// a given path should be ignored.\n// Ignoring a path ignores its children if the pattern ends in /**\n// Ignores are always parsed in dot:true mode\n\nimport { Minimatch, MinimatchOptions } from 'minimatch'\nimport { Path } from 'path-scurry'\nimport { Pattern } from './pattern.js'\nimport { GlobWalkerOpts } from './walker.js'\n\nexport interface IgnoreLike {\n  ignored?: (p: Path) => boolean\n  childrenIgnored?: (p: Path) => boolean\n  add?: (ignore: string) => void\n}\n\nconst defaultPlatform: NodeJS.Platform =\n  (\n    typeof process === 'object' &&\n    process &&\n    typeof process.platform === 'string'\n  ) ?\n    process.platform\n  : 'linux'\n\n/**\n * Class used to process ignored patterns\n */\nexport class Ignore implements IgnoreLike {\n  relative: Minimatch[]\n  relativeChildren: Minimatch[]\n  absolute: Minimatch[]\n  absoluteChildren: Minimatch[]\n  platform: NodeJS.Platform\n  mmopts: MinimatchOptions\n\n  constructor(\n    ignored: string[],\n    {\n      nobrace,\n      nocase,\n      noext,\n      noglobstar,\n      platform = defaultPlatform,\n    }: GlobWalkerOpts,\n  ) {\n    this.relative = []\n    this.absolute = []\n    this.relativeChildren = []\n    this.absoluteChildren = []\n    this.platform = platform\n    this.mmopts = {\n      dot: true,\n      nobrace,\n      nocase,\n      noext,\n      noglobstar,\n      optimizationLevel: 2,\n      platform,\n      nocomment: true,\n      nonegate: true,\n    }\n    for (const ign of ignored) this.add(ign)\n  }\n\n  add(ign: string) {\n    // this is a little weird, but it gives us a clean set of optimized\n    // minimatch matchers, without getting tripped up if one of them\n    // ends in /** inside a brace section, and it's only inefficient at\n    // the start of the walk, not along it.\n    // It'd be nice if the Pattern class just had a .test() method, but\n    // handling globstars is a bit of a pita, and that code already lives\n    // in minimatch anyway.\n    // Another way would be if maybe Minimatch could take its set/globParts\n    // as an option, and then we could at least just use Pattern to test\n    // for absolute-ness.\n    // Yet another way, Minimatch could take an array of glob strings, and\n    // a cwd option, and do the right thing.\n    const mm = new Minimatch(ign, this.mmopts)\n    for (let i = 0; i < mm.set.length; i++) {\n      const parsed = mm.set[i]\n      const globParts = mm.globParts[i]\n      /* c8 ignore start */\n      if (!parsed || !globParts) {\n        throw new Error('invalid pattern object')\n      }\n      // strip off leading ./ portions\n      // https://github.com/isaacs/node-glob/issues/570\n      while (parsed[0] === '.' && globParts[0] === '.') {\n        parsed.shift()\n        globParts.shift()\n      }\n      /* c8 ignore stop */\n      const p = new Pattern(parsed, globParts, 0, this.platform)\n      const m = new Minimatch(p.globString(), this.mmopts)\n      const children = globParts[globParts.length - 1] === '**'\n      const absolute = p.isAbsolute()\n      if (absolute) this.absolute.push(m)\n      else this.relative.push(m)\n      if (children) {\n        if (absolute) this.absoluteChildren.push(m)\n        else this.relativeChildren.push(m)\n      }\n    }\n  }\n\n  ignored(p: Path): boolean {\n    const fullpath = p.fullpath()\n    const fullpaths = `${fullpath}/`\n    const relative = p.relative() || '.'\n    const relatives = `${relative}/`\n    for (const m of this.relative) {\n      if (m.match(relative) || m.match(relatives)) return true\n    }\n    for (const m of this.absolute) {\n      if (m.match(fullpath) || m.match(fullpaths)) return true\n    }\n    return false\n  }\n\n  childrenIgnored(p: Path): boolean {\n    const fullpath = p.fullpath() + '/'\n    const relative = (p.relative() || '.') + '/'\n    for (const m of this.relativeChildren) {\n      if (m.match(relative)) return true\n    }\n    for (const m of this.absoluteChildren) {\n      if (m.match(fullpath)) return true\n    }\n    return false\n  }\n}\n", "// synchronous utility for filtering entries and calculating subwalks\n\nimport { GLOBSTAR, MMRegExp } from 'minimatch'\nimport { Path } from 'path-scurry'\nimport { MMPattern, Pattern } from './pattern.js'\nimport { GlobWalkerOpts } from './walker.js'\n\n/**\n * A cache of which patterns have been processed for a given Path\n */\nexport class HasWalkedCache {\n  store: Map>\n  constructor(store: Map> = new Map()) {\n    this.store = store\n  }\n  copy() {\n    return new HasWalkedCache(new Map(this.store))\n  }\n  hasWalked(target: Path, pattern: Pattern) {\n    return this.store.get(target.fullpath())?.has(pattern.globString())\n  }\n  storeWalked(target: Path, pattern: Pattern) {\n    const fullpath = target.fullpath()\n    const cached = this.store.get(fullpath)\n    if (cached) cached.add(pattern.globString())\n    else this.store.set(fullpath, new Set([pattern.globString()]))\n  }\n}\n\n/**\n * A record of which paths have been matched in a given walk step,\n * and whether they only are considered a match if they are a directory,\n * and whether their absolute or relative path should be returned.\n */\nexport class MatchRecord {\n  store: Map = new Map()\n  add(target: Path, absolute: boolean, ifDir: boolean) {\n    const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0)\n    const current = this.store.get(target)\n    this.store.set(target, current === undefined ? n : n & current)\n  }\n  // match, absolute, ifdir\n  entries(): [Path, boolean, boolean][] {\n    return [...this.store.entries()].map(([path, n]) => [\n      path,\n      !!(n & 2),\n      !!(n & 1),\n    ])\n  }\n}\n\n/**\n * A collection of patterns that must be processed in a subsequent step\n * for a given path.\n */\nexport class SubWalks {\n  store: Map = new Map()\n  add(target: Path, pattern: Pattern) {\n    if (!target.canReaddir()) {\n      return\n    }\n    const subs = this.store.get(target)\n    if (subs) {\n      if (!subs.find(p => p.globString() === pattern.globString())) {\n        subs.push(pattern)\n      }\n    } else this.store.set(target, [pattern])\n  }\n  get(target: Path): Pattern[] {\n    const subs = this.store.get(target)\n    /* c8 ignore start */\n    if (!subs) {\n      throw new Error('attempting to walk unknown path')\n    }\n    /* c8 ignore stop */\n    return subs\n  }\n  entries(): [Path, Pattern[]][] {\n    return this.keys().map(k => [k, this.store.get(k) as Pattern[]])\n  }\n  keys(): Path[] {\n    return [...this.store.keys()].filter(t => t.canReaddir())\n  }\n}\n\n/**\n * The class that processes patterns for a given path.\n *\n * Handles child entry filtering, and determining whether a path's\n * directory contents must be read.\n */\nexport class Processor {\n  hasWalkedCache: HasWalkedCache\n  matches = new MatchRecord()\n  subwalks = new SubWalks()\n  patterns?: Pattern[]\n  follow: boolean\n  dot: boolean\n  opts: GlobWalkerOpts\n\n  constructor(opts: GlobWalkerOpts, hasWalkedCache?: HasWalkedCache) {\n    this.opts = opts\n    this.follow = !!opts.follow\n    this.dot = !!opts.dot\n    this.hasWalkedCache =\n      hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache()\n  }\n\n  processPatterns(target: Path, patterns: Pattern[]) {\n    this.patterns = patterns\n    const processingSet: [Path, Pattern][] = patterns.map(p => [target, p])\n\n    // map of paths to the magic-starting subwalks they need to walk\n    // first item in patterns is the filter\n\n    for (let [t, pattern] of processingSet) {\n      this.hasWalkedCache.storeWalked(t, pattern)\n\n      const root = pattern.root()\n      const absolute = pattern.isAbsolute() && this.opts.absolute !== false\n\n      // start absolute patterns at root\n      if (root) {\n        t = t.resolve(\n          root === '/' && this.opts.root !== undefined ?\n            this.opts.root\n          : root,\n        )\n        const rest = pattern.rest()\n        if (!rest) {\n          this.matches.add(t, true, false)\n          continue\n        } else {\n          pattern = rest\n        }\n      }\n\n      if (t.isENOENT()) continue\n\n      let p: MMPattern\n      let rest: Pattern | null\n      let changed = false\n      while (\n        typeof (p = pattern.pattern()) === 'string' &&\n        (rest = pattern.rest())\n      ) {\n        const c = t.resolve(p)\n        t = c\n        pattern = rest\n        changed = true\n      }\n      p = pattern.pattern()\n      rest = pattern.rest()\n      if (changed) {\n        if (this.hasWalkedCache.hasWalked(t, pattern)) continue\n        this.hasWalkedCache.storeWalked(t, pattern)\n      }\n\n      // now we have either a final string for a known entry,\n      // more strings for an unknown entry,\n      // or a pattern starting with magic, mounted on t.\n      if (typeof p === 'string') {\n        // must not be final entry, otherwise we would have\n        // concatenated it earlier.\n        const ifDir = p === '..' || p === '' || p === '.'\n        this.matches.add(t.resolve(p), absolute, ifDir)\n        continue\n      } else if (p === GLOBSTAR) {\n        // if no rest, match and subwalk pattern\n        // if rest, process rest and subwalk pattern\n        // if it's a symlink, but we didn't get here by way of a\n        // globstar match (meaning it's the first time THIS globstar\n        // has traversed a symlink), then we follow it. Otherwise, stop.\n        if (\n          !t.isSymbolicLink() ||\n          this.follow ||\n          pattern.checkFollowGlobstar()\n        ) {\n          this.subwalks.add(t, pattern)\n        }\n        const rp = rest?.pattern()\n        const rrest = rest?.rest()\n        if (!rest || ((rp === '' || rp === '.') && !rrest)) {\n          // only HAS to be a dir if it ends in **/ or **/.\n          // but ending in ** will match files as well.\n          this.matches.add(t, absolute, rp === '' || rp === '.')\n        } else {\n          if (rp === '..') {\n            // this would mean you're matching **/.. at the fs root,\n            // and no thanks, I'm not gonna test that specific case.\n            /* c8 ignore start */\n            const tp = t.parent || t\n            /* c8 ignore stop */\n            if (!rrest) this.matches.add(tp, absolute, true)\n            else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {\n              this.subwalks.add(tp, rrest)\n            }\n          }\n        }\n      } else if (p instanceof RegExp) {\n        this.subwalks.add(t, pattern)\n      }\n    }\n\n    return this\n  }\n\n  subwalkTargets(): Path[] {\n    return this.subwalks.keys()\n  }\n\n  child() {\n    return new Processor(this.opts, this.hasWalkedCache)\n  }\n\n  // return a new Processor containing the subwalks for each\n  // child entry, and a set of matches, and\n  // a hasWalkedCache that's a copy of this one\n  // then we're going to call\n  filterEntries(parent: Path, entries: Path[]): Processor {\n    const patterns = this.subwalks.get(parent)\n    // put matches and entry walks into the results processor\n    const results = this.child()\n    for (const e of entries) {\n      for (const pattern of patterns) {\n        const absolute = pattern.isAbsolute()\n        const p = pattern.pattern()\n        const rest = pattern.rest()\n        if (p === GLOBSTAR) {\n          results.testGlobstar(e, pattern, rest, absolute)\n        } else if (p instanceof RegExp) {\n          results.testRegExp(e, p, rest, absolute)\n        } else {\n          results.testString(e, p, rest, absolute)\n        }\n      }\n    }\n    return results\n  }\n\n  testGlobstar(\n    e: Path,\n    pattern: Pattern,\n    rest: Pattern | null,\n    absolute: boolean,\n  ) {\n    if (this.dot || !e.name.startsWith('.')) {\n      if (!pattern.hasMore()) {\n        this.matches.add(e, absolute, false)\n      }\n      if (e.canReaddir()) {\n        // if we're in follow mode or it's not a symlink, just keep\n        // testing the same pattern. If there's more after the globstar,\n        // then this symlink consumes the globstar. If not, then we can\n        // follow at most ONE symlink along the way, so we mark it, which\n        // also checks to ensure that it wasn't already marked.\n        if (this.follow || !e.isSymbolicLink()) {\n          this.subwalks.add(e, pattern)\n        } else if (e.isSymbolicLink()) {\n          if (rest && pattern.checkFollowGlobstar()) {\n            this.subwalks.add(e, rest)\n          } else if (pattern.markFollowGlobstar()) {\n            this.subwalks.add(e, pattern)\n          }\n        }\n      }\n    }\n    // if the NEXT thing matches this entry, then also add\n    // the rest.\n    if (rest) {\n      const rp = rest.pattern()\n      if (\n        typeof rp === 'string' &&\n        // dots and empty were handled already\n        rp !== '..' &&\n        rp !== '' &&\n        rp !== '.'\n      ) {\n        this.testString(e, rp, rest.rest(), absolute)\n      } else if (rp === '..') {\n        /* c8 ignore start */\n        const ep = e.parent || e\n        /* c8 ignore stop */\n        this.subwalks.add(ep, rest)\n      } else if (rp instanceof RegExp) {\n        this.testRegExp(e, rp, rest.rest(), absolute)\n      }\n    }\n  }\n\n  testRegExp(\n    e: Path,\n    p: MMRegExp,\n    rest: Pattern | null,\n    absolute: boolean,\n  ) {\n    if (!p.test(e.name)) return\n    if (!rest) {\n      this.matches.add(e, absolute, false)\n    } else {\n      this.subwalks.add(e, rest)\n    }\n  }\n\n  testString(e: Path, p: string, rest: Pattern | null, absolute: boolean) {\n    // should never happen?\n    if (!e.isNamed(p)) return\n    if (!rest) {\n      this.matches.add(e, absolute, false)\n    } else {\n      this.subwalks.add(e, rest)\n    }\n  }\n}\n", "/**\n * Single-use utility classes to provide functionality to the {@link Glob}\n * methods.\n *\n * @module\n */\nimport { Minipass } from 'minipass'\nimport { Path } from 'path-scurry'\nimport { Ignore, IgnoreLike } from './ignore.js'\n\n// XXX can we somehow make it so that it NEVER processes a given path more than\n// once, enough that the match set tracking is no longer needed?  that'd speed\n// things up a lot.  Or maybe bring back nounique, and skip it in that case?\n\n// a single minimatch set entry with 1 or more parts\nimport { Pattern } from './pattern.js'\nimport { Processor } from './processor.js'\n\nexport interface GlobWalkerOpts {\n  absolute?: boolean\n  allowWindowsEscape?: boolean\n  cwd?: string | URL\n  dot?: boolean\n  dotRelative?: boolean\n  follow?: boolean\n  ignore?: string | string[] | IgnoreLike\n  mark?: boolean\n  matchBase?: boolean\n  // Note: maxDepth here means \"maximum actual Path.depth()\",\n  // not \"maximum depth beyond cwd\"\n  maxDepth?: number\n  nobrace?: boolean\n  nocase?: boolean\n  nodir?: boolean\n  noext?: boolean\n  noglobstar?: boolean\n  platform?: NodeJS.Platform\n  posix?: boolean\n  realpath?: boolean\n  root?: string\n  stat?: boolean\n  signal?: AbortSignal\n  windowsPathsNoEscape?: boolean\n  withFileTypes?: boolean\n  includeChildMatches?: boolean\n}\n\nexport type GWOFileTypesTrue = GlobWalkerOpts & {\n  withFileTypes: true\n}\nexport type GWOFileTypesFalse = GlobWalkerOpts & {\n  withFileTypes: false\n}\nexport type GWOFileTypesUnset = GlobWalkerOpts & {\n  withFileTypes?: undefined\n}\n\nexport type Result =\n  O extends GWOFileTypesTrue ? Path\n  : O extends GWOFileTypesFalse ? string\n  : O extends GWOFileTypesUnset ? string\n  : Path | string\n\nexport type Matches =\n  O extends GWOFileTypesTrue ? Set\n  : O extends GWOFileTypesFalse ? Set\n  : O extends GWOFileTypesUnset ? Set\n  : Set\n\nexport type MatchStream = Minipass<\n  Result,\n  Result\n>\n\nconst makeIgnore = (\n  ignore: string | string[] | IgnoreLike,\n  opts: GlobWalkerOpts,\n): IgnoreLike =>\n  typeof ignore === 'string' ? new Ignore([ignore], opts)\n  : Array.isArray(ignore) ? new Ignore(ignore, opts)\n  : ignore\n\n/**\n * basic walking utilities that all the glob walker types use\n */\nexport abstract class GlobUtil {\n  path: Path\n  patterns: Pattern[]\n  opts: O\n  seen: Set = new Set()\n  paused: boolean = false\n  aborted: boolean = false\n  #onResume: (() => any)[] = []\n  #ignore?: IgnoreLike\n  #sep: '\\\\' | '/'\n  signal?: AbortSignal\n  maxDepth: number\n  includeChildMatches: boolean\n\n  constructor(patterns: Pattern[], path: Path, opts: O)\n  constructor(patterns: Pattern[], path: Path, opts: O) {\n    this.patterns = patterns\n    this.path = path\n    this.opts = opts\n    this.#sep = !opts.posix && opts.platform === 'win32' ? '\\\\' : '/'\n    this.includeChildMatches = opts.includeChildMatches !== false\n    if (opts.ignore || !this.includeChildMatches) {\n      this.#ignore = makeIgnore(opts.ignore ?? [], opts)\n      if (\n        !this.includeChildMatches &&\n        typeof this.#ignore.add !== 'function'\n      ) {\n        const m = 'cannot ignore child matches, ignore lacks add() method.'\n        throw new Error(m)\n      }\n    }\n    // ignore, always set with maxDepth, but it's optional on the\n    // GlobOptions type\n    /* c8 ignore start */\n    this.maxDepth = opts.maxDepth || Infinity\n    /* c8 ignore stop */\n    if (opts.signal) {\n      this.signal = opts.signal\n      this.signal.addEventListener('abort', () => {\n        this.#onResume.length = 0\n      })\n    }\n  }\n\n  #ignored(path: Path): boolean {\n    return this.seen.has(path) || !!this.#ignore?.ignored?.(path)\n  }\n  #childrenIgnored(path: Path): boolean {\n    return !!this.#ignore?.childrenIgnored?.(path)\n  }\n\n  // backpressure mechanism\n  pause() {\n    this.paused = true\n  }\n  resume() {\n    /* c8 ignore start */\n    if (this.signal?.aborted) return\n    /* c8 ignore stop */\n    this.paused = false\n    let fn: (() => any) | undefined = undefined\n    while (!this.paused && (fn = this.#onResume.shift())) {\n      fn()\n    }\n  }\n  onResume(fn: () => any) {\n    if (this.signal?.aborted) return\n    /* c8 ignore start */\n    if (!this.paused) {\n      fn()\n    } else {\n      /* c8 ignore stop */\n      this.#onResume.push(fn)\n    }\n  }\n\n  // do the requisite realpath/stat checking, and return the path\n  // to add or undefined to filter it out.\n  async matchCheck(e: Path, ifDir: boolean): Promise {\n    if (ifDir && this.opts.nodir) return undefined\n    let rpc: Path | undefined\n    if (this.opts.realpath) {\n      rpc = e.realpathCached() || (await e.realpath())\n      if (!rpc) return undefined\n      e = rpc\n    }\n    const needStat = e.isUnknown() || this.opts.stat\n    const s = needStat ? await e.lstat() : e\n    if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {\n      const target = await s.realpath()\n      /* c8 ignore start */\n      if (target && (target.isUnknown() || this.opts.stat)) {\n        await target.lstat()\n      }\n      /* c8 ignore stop */\n    }\n    return this.matchCheckTest(s, ifDir)\n  }\n\n  matchCheckTest(e: Path | undefined, ifDir: boolean): Path | undefined {\n    return (\n        e &&\n          (this.maxDepth === Infinity || e.depth() <= this.maxDepth) &&\n          (!ifDir || e.canReaddir()) &&\n          (!this.opts.nodir || !e.isDirectory()) &&\n          (!this.opts.nodir ||\n            !this.opts.follow ||\n            !e.isSymbolicLink() ||\n            !e.realpathCached()?.isDirectory()) &&\n          !this.#ignored(e)\n      ) ?\n        e\n      : undefined\n  }\n\n  matchCheckSync(e: Path, ifDir: boolean): Path | undefined {\n    if (ifDir && this.opts.nodir) return undefined\n    let rpc: Path | undefined\n    if (this.opts.realpath) {\n      rpc = e.realpathCached() || e.realpathSync()\n      if (!rpc) return undefined\n      e = rpc\n    }\n    const needStat = e.isUnknown() || this.opts.stat\n    const s = needStat ? e.lstatSync() : e\n    if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {\n      const target = s.realpathSync()\n      if (target && (target?.isUnknown() || this.opts.stat)) {\n        target.lstatSync()\n      }\n    }\n    return this.matchCheckTest(s, ifDir)\n  }\n\n  abstract matchEmit(p: Result): void\n  abstract matchEmit(p: string | Path): void\n\n  matchFinish(e: Path, absolute: boolean) {\n    if (this.#ignored(e)) return\n    // we know we have an ignore if this is false, but TS doesn't\n    if (!this.includeChildMatches && this.#ignore?.add) {\n      const ign = `${e.relativePosix()}/**`\n      this.#ignore.add(ign)\n    }\n    const abs =\n      this.opts.absolute === undefined ? absolute : this.opts.absolute\n    this.seen.add(e)\n    const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''\n    // ok, we have what we need!\n    if (this.opts.withFileTypes) {\n      this.matchEmit(e)\n    } else if (abs) {\n      const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath()\n      this.matchEmit(abs + mark)\n    } else {\n      const rel = this.opts.posix ? e.relativePosix() : e.relative()\n      const pre =\n        this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ?\n          '.' + this.#sep\n        : ''\n      this.matchEmit(!rel ? '.' + mark : pre + rel + mark)\n    }\n  }\n\n  async match(e: Path, absolute: boolean, ifDir: boolean): Promise {\n    const p = await this.matchCheck(e, ifDir)\n    if (p) this.matchFinish(p, absolute)\n  }\n\n  matchSync(e: Path, absolute: boolean, ifDir: boolean): void {\n    const p = this.matchCheckSync(e, ifDir)\n    if (p) this.matchFinish(p, absolute)\n  }\n\n  walkCB(target: Path, patterns: Pattern[], cb: () => any) {\n    /* c8 ignore start */\n    if (this.signal?.aborted) cb()\n    /* c8 ignore stop */\n    this.walkCB2(target, patterns, new Processor(this.opts), cb)\n  }\n\n  walkCB2(\n    target: Path,\n    patterns: Pattern[],\n    processor: Processor,\n    cb: () => any,\n  ) {\n    if (this.#childrenIgnored(target)) return cb()\n    if (this.signal?.aborted) cb()\n    if (this.paused) {\n      this.onResume(() => this.walkCB2(target, patterns, processor, cb))\n      return\n    }\n    processor.processPatterns(target, patterns)\n\n    // done processing.  all of the above is sync, can be abstracted out.\n    // subwalks is a map of paths to the entry filters they need\n    // matches is a map of paths to [absolute, ifDir] tuples.\n    let tasks = 1\n    const next = () => {\n      if (--tasks === 0) cb()\n    }\n\n    for (const [m, absolute, ifDir] of processor.matches.entries()) {\n      if (this.#ignored(m)) continue\n      tasks++\n      this.match(m, absolute, ifDir).then(() => next())\n    }\n\n    for (const t of processor.subwalkTargets()) {\n      if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {\n        continue\n      }\n      tasks++\n      const childrenCached = t.readdirCached()\n      if (t.calledReaddir())\n        this.walkCB3(t, childrenCached, processor, next)\n      else {\n        t.readdirCB(\n          (_, entries) => this.walkCB3(t, entries, processor, next),\n          true,\n        )\n      }\n    }\n\n    next()\n  }\n\n  walkCB3(\n    target: Path,\n    entries: Path[],\n    processor: Processor,\n    cb: () => any,\n  ) {\n    processor = processor.filterEntries(target, entries)\n\n    let tasks = 1\n    const next = () => {\n      if (--tasks === 0) cb()\n    }\n\n    for (const [m, absolute, ifDir] of processor.matches.entries()) {\n      if (this.#ignored(m)) continue\n      tasks++\n      this.match(m, absolute, ifDir).then(() => next())\n    }\n    for (const [target, patterns] of processor.subwalks.entries()) {\n      tasks++\n      this.walkCB2(target, patterns, processor.child(), next)\n    }\n\n    next()\n  }\n\n  walkCBSync(target: Path, patterns: Pattern[], cb: () => any) {\n    /* c8 ignore start */\n    if (this.signal?.aborted) cb()\n    /* c8 ignore stop */\n    this.walkCB2Sync(target, patterns, new Processor(this.opts), cb)\n  }\n\n  walkCB2Sync(\n    target: Path,\n    patterns: Pattern[],\n    processor: Processor,\n    cb: () => any,\n  ) {\n    if (this.#childrenIgnored(target)) return cb()\n    if (this.signal?.aborted) cb()\n    if (this.paused) {\n      this.onResume(() =>\n        this.walkCB2Sync(target, patterns, processor, cb),\n      )\n      return\n    }\n    processor.processPatterns(target, patterns)\n\n    // done processing.  all of the above is sync, can be abstracted out.\n    // subwalks is a map of paths to the entry filters they need\n    // matches is a map of paths to [absolute, ifDir] tuples.\n    let tasks = 1\n    const next = () => {\n      if (--tasks === 0) cb()\n    }\n\n    for (const [m, absolute, ifDir] of processor.matches.entries()) {\n      if (this.#ignored(m)) continue\n      this.matchSync(m, absolute, ifDir)\n    }\n\n    for (const t of processor.subwalkTargets()) {\n      if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {\n        continue\n      }\n      tasks++\n      const children = t.readdirSync()\n      this.walkCB3Sync(t, children, processor, next)\n    }\n\n    next()\n  }\n\n  walkCB3Sync(\n    target: Path,\n    entries: Path[],\n    processor: Processor,\n    cb: () => any,\n  ) {\n    processor = processor.filterEntries(target, entries)\n\n    let tasks = 1\n    const next = () => {\n      if (--tasks === 0) cb()\n    }\n\n    for (const [m, absolute, ifDir] of processor.matches.entries()) {\n      if (this.#ignored(m)) continue\n      this.matchSync(m, absolute, ifDir)\n    }\n    for (const [target, patterns] of processor.subwalks.entries()) {\n      tasks++\n      this.walkCB2Sync(target, patterns, processor.child(), next)\n    }\n\n    next()\n  }\n}\n\nexport class GlobWalker<\n  O extends GlobWalkerOpts = GlobWalkerOpts,\n> extends GlobUtil {\n  matches = new Set>()\n\n  constructor(patterns: Pattern[], path: Path, opts: O) {\n    super(patterns, path, opts)\n  }\n\n  matchEmit(e: Result): void {\n    this.matches.add(e)\n  }\n\n  async walk(): Promise>> {\n    if (this.signal?.aborted) throw this.signal.reason\n    if (this.path.isUnknown()) {\n      await this.path.lstat()\n    }\n    await new Promise((res, rej) => {\n      this.walkCB(this.path, this.patterns, () => {\n        if (this.signal?.aborted) {\n          rej(this.signal.reason)\n        } else {\n          res(this.matches)\n        }\n      })\n    })\n    return this.matches\n  }\n\n  walkSync(): Set> {\n    if (this.signal?.aborted) throw this.signal.reason\n    if (this.path.isUnknown()) {\n      this.path.lstatSync()\n    }\n    // nothing for the callback to do, because this never pauses\n    this.walkCBSync(this.path, this.patterns, () => {\n      if (this.signal?.aborted) throw this.signal.reason\n    })\n    return this.matches\n  }\n}\n\nexport class GlobStream<\n  O extends GlobWalkerOpts = GlobWalkerOpts,\n> extends GlobUtil {\n  results: Minipass, Result>\n\n  constructor(patterns: Pattern[], path: Path, opts: O) {\n    super(patterns, path, opts)\n    this.results = new Minipass, Result>({\n      signal: this.signal,\n      objectMode: true,\n    })\n    this.results.on('drain', () => this.resume())\n    this.results.on('resume', () => this.resume())\n  }\n\n  matchEmit(e: Result): void {\n    this.results.write(e)\n    if (!this.results.flowing) this.pause()\n  }\n\n  stream(): MatchStream {\n    const target = this.path\n    if (target.isUnknown()) {\n      target.lstat().then(() => {\n        this.walkCB(target, this.patterns, () => this.results.end())\n      })\n    } else {\n      this.walkCB(target, this.patterns, () => this.results.end())\n    }\n    return this.results\n  }\n\n  streamSync(): MatchStream {\n    if (this.path.isUnknown()) {\n      this.path.lstatSync()\n    }\n    this.walkCBSync(this.path, this.patterns, () => this.results.end())\n    return this.results\n  }\n}\n", "import { Minimatch } from 'minimatch'\nimport { GlobOptions } from './glob.js'\n\n/**\n * Return true if the patterns provided contain any magic glob characters,\n * given the options provided.\n *\n * Brace expansion is not considered \"magic\" unless the `magicalBraces` option\n * is set, as brace expansion just turns one string into an array of strings.\n * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and\n * `'xby'` both do not contain any magic glob characters, and it's treated the\n * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`\n * is in the options, brace expansion _is_ treated as a pattern having magic.\n */\nexport const hasMagic = (\n  pattern: string | string[],\n  options: GlobOptions = {},\n): boolean => {\n  if (!Array.isArray(pattern)) {\n    pattern = [pattern]\n  }\n  for (const p of pattern) {\n    if (new Minimatch(p, options).hasMagic()) return true\n  }\n  return false\n}\n", "import { escape, unescape } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport { Path } from 'path-scurry'\nimport type {\n  GlobOptions,\n  GlobOptionsWithFileTypesFalse,\n  GlobOptionsWithFileTypesTrue,\n  GlobOptionsWithFileTypesUnset,\n} from './glob.js'\nimport { Glob } from './glob.js'\nimport { hasMagic } from './has-magic.js'\n\nexport { escape, unescape } from 'minimatch'\nexport type {\n  FSOption,\n  Path,\n  WalkOptions,\n  WalkOptionsWithFileTypesTrue,\n  WalkOptionsWithFileTypesUnset,\n} from 'path-scurry'\nexport { Glob } from './glob.js'\nexport type {\n  GlobOptions,\n  GlobOptionsWithFileTypesFalse,\n  GlobOptionsWithFileTypesTrue,\n  GlobOptionsWithFileTypesUnset,\n} from './glob.js'\nexport { hasMagic } from './has-magic.js'\nexport { Ignore } from './ignore.js'\nexport type { IgnoreLike } from './ignore.js'\nexport type { MatchStream } from './walker.js'\n\n/**\n * Syncronous form of {@link globStream}. Will read all the matches as fast as\n * you consume them, even all in a single tick if you consume them immediately,\n * but will still respond to backpressure if they're not consumed immediately.\n */\nexport function globStreamSync(\n  pattern: string | string[],\n  options: GlobOptionsWithFileTypesTrue,\n): Minipass\nexport function globStreamSync(\n  pattern: string | string[],\n  options: GlobOptionsWithFileTypesFalse,\n): Minipass\nexport function globStreamSync(\n  pattern: string | string[],\n  options: GlobOptionsWithFileTypesUnset,\n): Minipass\nexport function globStreamSync(\n  pattern: string | string[],\n  options: GlobOptions,\n): Minipass | Minipass\nexport function globStreamSync(\n  pattern: string | string[],\n  options: GlobOptions = {},\n) {\n  return new Glob(pattern, options).streamSync()\n}\n\n/**\n * Return a stream that emits all the strings or `Path` objects and\n * then emits `end` when completed.\n */\nexport function globStream(\n  pattern: string | string[],\n  options: GlobOptionsWithFileTypesFalse,\n): Minipass\nexport function globStream(\n  pattern: string | string[],\n  options: GlobOptionsWithFileTypesTrue,\n): Minipass\nexport function globStream(\n  pattern: string | string[],\n  options?: GlobOptionsWithFileTypesUnset | undefined,\n): Minipass\nexport function globStream(\n  pattern: string | string[],\n  options: GlobOptions,\n): Minipass | Minipass\nexport function globStream(\n  pattern: string | string[],\n  options: GlobOptions = {},\n) {\n  return new Glob(pattern, options).stream()\n}\n\n/**\n * Synchronous form of {@link glob}\n */\nexport function globSync(\n  pattern: string | string[],\n  options: GlobOptionsWithFileTypesFalse,\n): string[]\nexport function globSync(\n  pattern: string | string[],\n  options: GlobOptionsWithFileTypesTrue,\n): Path[]\nexport function globSync(\n  pattern: string | string[],\n  options?: GlobOptionsWithFileTypesUnset | undefined,\n): string[]\nexport function globSync(\n  pattern: string | string[],\n  options: GlobOptions,\n): Path[] | string[]\nexport function globSync(\n  pattern: string | string[],\n  options: GlobOptions = {},\n) {\n  return new Glob(pattern, options).walkSync()\n}\n\n/**\n * Perform an asynchronous glob search for the pattern(s) specified. Returns\n * [Path](https://isaacs.github.io/path-scurry/classes/PathBase) objects if the\n * {@link withFileTypes} option is set to `true`. See {@link GlobOptions} for\n * full option descriptions.\n */\nasync function glob_(\n  pattern: string | string[],\n  options?: GlobOptionsWithFileTypesUnset | undefined,\n): Promise\nasync function glob_(\n  pattern: string | string[],\n  options: GlobOptionsWithFileTypesTrue,\n): Promise\nasync function glob_(\n  pattern: string | string[],\n  options: GlobOptionsWithFileTypesFalse,\n): Promise\nasync function glob_(\n  pattern: string | string[],\n  options: GlobOptions,\n): Promise\nasync function glob_(\n  pattern: string | string[],\n  options: GlobOptions = {},\n) {\n  return new Glob(pattern, options).walk()\n}\n\n/**\n * Return a sync iterator for walking glob pattern matches.\n */\nexport function globIterateSync(\n  pattern: string | string[],\n  options?: GlobOptionsWithFileTypesUnset | undefined,\n): Generator\nexport function globIterateSync(\n  pattern: string | string[],\n  options: GlobOptionsWithFileTypesTrue,\n): Generator\nexport function globIterateSync(\n  pattern: string | string[],\n  options: GlobOptionsWithFileTypesFalse,\n): Generator\nexport function globIterateSync(\n  pattern: string | string[],\n  options: GlobOptions,\n): Generator | Generator\nexport function globIterateSync(\n  pattern: string | string[],\n  options: GlobOptions = {},\n) {\n  return new Glob(pattern, options).iterateSync()\n}\n\n/**\n * Return an async iterator for walking glob pattern matches.\n */\nexport function globIterate(\n  pattern: string | string[],\n  options?: GlobOptionsWithFileTypesUnset | undefined,\n): AsyncGenerator\nexport function globIterate(\n  pattern: string | string[],\n  options: GlobOptionsWithFileTypesTrue,\n): AsyncGenerator\nexport function globIterate(\n  pattern: string | string[],\n  options: GlobOptionsWithFileTypesFalse,\n): AsyncGenerator\nexport function globIterate(\n  pattern: string | string[],\n  options: GlobOptions,\n): AsyncGenerator | AsyncGenerator\nexport function globIterate(\n  pattern: string | string[],\n  options: GlobOptions = {},\n) {\n  return new Glob(pattern, options).iterate()\n}\n\n// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc\nexport const streamSync = globStreamSync\nexport const stream = Object.assign(globStream, { sync: globStreamSync })\nexport const iterateSync = globIterateSync\nexport const iterate = Object.assign(globIterate, {\n  sync: globIterateSync,\n})\nexport const sync = Object.assign(globSync, {\n  stream: globStreamSync,\n  iterate: globIterateSync,\n})\n\nexport const glob = Object.assign(glob_, {\n  glob: glob_,\n  globSync,\n  sync,\n  globStream,\n  stream,\n  globStreamSync,\n  streamSync,\n  globIterate,\n  iterate,\n  globIterateSync,\n  iterateSync,\n  Glob,\n  hasMagic,\n  escape,\n  unescape,\n})\nglob.glob = glob\n", "import { execFileSync } from 'child_process';\nimport { existsSync, readdirSync } from 'fs';\nimport { join, resolve } from 'path';\n\nimport type { ValidatedCdsCommand } from './types';\nimport { fileExists } from '../../filesystem';\nimport { cdsExtractorLog } from '../../logging';\nimport type { CdsDependencyGraph } from '../parser/types';\n\n/** Default timeout for command execution in milliseconds. **/\nexport const DEFAULT_COMMAND_TIMEOUT_MS = 10000;\n\n/**\n * Cache for CDS command test results to avoid running the same CLI commands repeatedly.\n */\ninterface CdsCommandCache {\n  /** Map of command strings to their test results */\n  commandResults: Map;\n  /** Available cache directories discovered during testing */\n  availableCacheDirs: string[];\n  /** Global command test results */\n  globalCommand?: string;\n  /** Whether cache has been initialized */\n  initialized: boolean;\n}\n\n// Global cache instance to share results across all calls\nconst cdsCommandCache: CdsCommandCache = {\n  commandResults: new Map(),\n  availableCacheDirs: [],\n  initialized: false,\n};\n\n/**\n * Information about CDS version dependencies from a project's package.json\n */\ninterface CdsVersionInfo {\n  /** Semver range for @sap/cds */\n  cdsVersion?: string;\n  /** Semver range for @sap/cds-dk */\n  cdsDkVersion?: string;\n  /** Computed compatible @sap/cds-dk version */\n  preferredDkVersion?: string;\n}\n\n/**\n * Factory functions to create {@link ValidatedCdsCommand} instances.\n */\nconst createCdsCommands = {\n  // Global CDS command\n  cds: (): ValidatedCdsCommand => ({\n    executable: 'cds',\n    args: [],\n    originalCommand: 'cds',\n  }),\n  // NPX with @sap/cds package\n  npxCds: (): ValidatedCdsCommand => ({\n    executable: 'npx',\n    args: ['--yes', '--package', '@sap/cds', 'cds'],\n    originalCommand: 'npx --yes --package @sap/cds cds',\n  }),\n  // NPX with @sap/cds-dk package\n  npxCdsDk: (): ValidatedCdsCommand => ({\n    executable: 'npx',\n    args: ['--yes', '--package', '@sap/cds-dk', 'cds'],\n    originalCommand: 'npx --yes --package @sap/cds-dk cds',\n  }),\n  // NPX with @sap/cds-dk package (alternative flag)\n  npxCdsDkAlt: (): ValidatedCdsCommand => ({\n    executable: 'npx',\n    args: ['--yes', '@sap/cds-dk', 'cds'],\n    originalCommand: 'npx --yes @sap/cds-dk cds',\n  }),\n  // NPX with versioned @sap/cds-dk package\n  npxCdsDkWithVersion: (version: string): ValidatedCdsCommand => ({\n    executable: 'npx',\n    args: ['--yes', '--package', `@sap/cds-dk@${version}`, 'cds'],\n    originalCommand: `npx --yes --package @sap/cds-dk@${version} cds`,\n  }),\n  // NPX with versioned @sap/cds package\n  npxCdsWithVersion: (version: string): ValidatedCdsCommand => ({\n    executable: 'npx',\n    args: ['--yes', '--package', `@sap/cds@${version}`, 'cds'],\n    originalCommand: `npx --yes --package @sap/cds@${version} cds`,\n  }),\n};\n\n/**\n * Converts a command string to a ValidatedCdsCommand object\n * @param commandString The command string to convert\n * @returns A ValidatedCdsCommand object\n */\nfunction parseCommandString(commandString: string): ValidatedCdsCommand {\n  const parts = commandString.trim().split(/\\s+/);\n  if (parts.length === 0) {\n    throw new Error('Empty command string');\n  }\n\n  const executable = parts[0];\n  const args = parts.slice(1);\n\n  return {\n    executable,\n    args,\n    originalCommand: commandString,\n  };\n}\n\n/**\n * Determines version-aware CDS commands for both primary and retry scenarios\n * @param cacheDir Optional cache directory\n * @param sourceRoot Source root directory\n * @param projectPath Project path for version resolution\n * @param dependencyGraph Dependency graph for version information\n * @returns Object containing both primary and retry commands\n */\nexport function determineVersionAwareCdsCommands(\n  cacheDir: string | undefined,\n  sourceRoot: string,\n  projectPath?: string,\n  dependencyGraph?: CdsDependencyGraph,\n): { primaryCommand: ValidatedCdsCommand; retryCommand: ValidatedCdsCommand } {\n  try {\n    // Get the best command string using existing logic\n    const commandString = getBestCdsCommand(cacheDir, sourceRoot, projectPath, dependencyGraph);\n\n    // Convert to ValidatedCdsCommand for primary use\n    const primaryCommand = parseCommandString(commandString);\n\n    // For retry command, always try to use a version-aware npx command if project context is available\n    let retryCommand: ValidatedCdsCommand;\n\n    if (projectPath && dependencyGraph) {\n      try {\n        const versionInfo = resolveCdsVersions(projectPath, dependencyGraph);\n        if (versionInfo?.preferredDkVersion) {\n          // Use version-specific command for retry\n          retryCommand = createCdsCommands.npxCdsDkWithVersion(versionInfo.preferredDkVersion);\n        } else if (versionInfo?.cdsDkVersion) {\n          // Use explicit cds-dk version\n          retryCommand = createCdsCommands.npxCdsDkWithVersion(versionInfo.cdsDkVersion);\n        } else {\n          // Fall back to generic npx cds-dk\n          retryCommand = createCdsCommands.npxCdsDk();\n        }\n      } catch (error) {\n        // If version resolution fails, fall back to generic npx\n        cdsExtractorLog(\n          'warn',\n          `Failed to resolve version info for ${projectPath}: ${String(error)}`,\n        );\n        retryCommand = createCdsCommands.npxCdsDk();\n      }\n    } else {\n      // No project context - use generic npx as fallback\n      retryCommand = createCdsCommands.npxCdsDk();\n    }\n\n    return { primaryCommand, retryCommand };\n  } catch (error) {\n    // If anything fails, fall back to simple commands\n    cdsExtractorLog('error', `Failed to determine version-aware commands: ${String(error)}`);\n    const fallbackCommand = parseCommandString('cds');\n    return {\n      primaryCommand: fallbackCommand,\n      retryCommand: createCdsCommands.npxCdsDk(),\n    };\n  }\n} /**\n * Creates a validated CDS command for an absolute path to a CDS executable.\n * @param absolutePath The absolute path to the CDS executable\n * @returns A {@link ValidatedCdsCommand} if the path exists and is valid, null otherwise\n */\nfunction createCdsCommandForPath(absolutePath: string): ValidatedCdsCommand | null {\n  try {\n    const resolvedPath = resolve(absolutePath);\n    if (resolvedPath && fileExists(resolvedPath)) {\n      return {\n        executable: resolvedPath,\n        args: [],\n        originalCommand: absolutePath,\n      };\n    }\n  } catch {\n    // Ignore path resolution errors\n  }\n  return null;\n}\n\n/**\n * Resolve CDS version information from a project's package.json via dependency graph\n * @param projectPath The path to the project\n * @param dependencyGraph The CDS dependency graph containing project information\n * @returns CDS version information or undefined if not available\n */\nfunction resolveCdsVersions(\n  projectPath: string,\n  dependencyGraph: CdsDependencyGraph,\n): CdsVersionInfo | undefined {\n  const project = dependencyGraph.projects.get(projectPath);\n  if (!project?.packageJson) {\n    return undefined;\n  }\n\n  const { dependencies = {}, devDependencies = {} } = project.packageJson;\n  const allDependencies = { ...dependencies, ...devDependencies };\n\n  const cdsVersion = allDependencies['@sap/cds'];\n  const cdsDkVersion = allDependencies['@sap/cds-dk'];\n\n  if (!cdsVersion && !cdsDkVersion) {\n    return undefined;\n  }\n\n  let preferredDkVersion: string | undefined;\n  if (cdsDkVersion) {\n    // Use explicit @sap/cds-dk version if available, but enforce minimum\n    preferredDkVersion = enforceMinimumCdsDkVersion(cdsDkVersion);\n  } else if (cdsVersion) {\n    // Derive compatible @sap/cds-dk version from @sap/cds version\n    preferredDkVersion = deriveCompatibleCdsDkVersion(cdsVersion);\n  }\n\n  return {\n    cdsVersion,\n    cdsDkVersion,\n    preferredDkVersion,\n  };\n}\n\n/**\n * Enforce minimum @sap/cds-dk version requirement\n * @param version The version string to check\n * @returns The version string with minimum version enforcement applied\n */\nfunction enforceMinimumCdsDkVersion(version: string): string {\n  const minimumVersion = 8;\n  const majorVersionMatch = version.match(/\\^?(\\d+)/);\n\n  if (majorVersionMatch) {\n    const majorVersion = parseInt(majorVersionMatch[1], 10);\n    if (majorVersion < minimumVersion) {\n      // Use the minimum version if derived version is too low\n      return `^${minimumVersion}`;\n    }\n  }\n\n  // Return original version if it meets minimum requirement or can't be parsed\n  return version;\n}\n\n/**\n * Derive a compatible @sap/cds-dk version from an @sap/cds version\n * @param cdsVersion The @sap/cds version semver range\n * @returns A compatible @sap/cds-dk version range with minimum version enforcement\n */\nfunction deriveCompatibleCdsDkVersion(cdsVersion: string): string {\n  // For simplicity, we'll use the same major version range\n  // This can be enhanced with more sophisticated logic as needed\n  const majorVersionMatch = cdsVersion.match(/\\^?(\\d+)/);\n  let derivedVersion: string;\n\n  if (majorVersionMatch) {\n    const majorVersion = majorVersionMatch[1];\n    derivedVersion = `^${majorVersion}`;\n  } else {\n    // Fallback to the original version if we can't parse it\n    derivedVersion = cdsVersion;\n  }\n\n  // Apply minimum version enforcement\n  return enforceMinimumCdsDkVersion(derivedVersion);\n}\n\n/**\n * Create a version-aware CDS command based on project information\n * @param projectPath The path to the project\n * @param dependencyGraph The CDS dependency graph containing project information\n * @returns A ValidatedCdsCommand if version information is available, null otherwise\n */\nfunction createVersionAwareCdsCommand(\n  projectPath: string,\n  dependencyGraph: CdsDependencyGraph,\n): ValidatedCdsCommand | null {\n  const versionInfo = resolveCdsVersions(projectPath, dependencyGraph);\n\n  if (!versionInfo?.preferredDkVersion) {\n    return null;\n  }\n\n  return createCdsCommands.npxCdsDkWithVersion(versionInfo.preferredDkVersion);\n}\n\n/**\n * Determine the `cds` command to use based on the environment and cache directory.\n *\n * This function uses a caching strategy to minimize repeated CLI command testing:\n * - Initializes a global cache on first call\n * - Tests global commands once and caches results\n * - Discovers all available cache directories upfront\n * - Reuses test results across multiple calls\n * - Supports project-specific version-aware command generation\n */\nexport function determineCdsCommand(\n  cacheDir: string | undefined,\n  sourceRoot: string,\n  projectPath?: string,\n  dependencyGraph?: CdsDependencyGraph,\n): string {\n  try {\n    // Always use the efficient path - debug information is collected separately\n    return getBestCdsCommand(cacheDir, sourceRoot, projectPath, dependencyGraph);\n  } catch (error) {\n    const errorMessage = `Failed to determine CDS command: ${String(error)}`;\n    cdsExtractorLog('error', errorMessage);\n    throw new Error(errorMessage);\n  }\n}\n\n/**\n * Discover all available cache directories in the source tree\n * @param sourceRoot The source root directory\n * @returns Array of cache directory paths\n */\nfunction discoverAvailableCacheDirs(sourceRoot: string): string[] {\n  if (cdsCommandCache.availableCacheDirs.length > 0) {\n    return cdsCommandCache.availableCacheDirs;\n  }\n\n  const cacheRootDir = join(sourceRoot, '.cds-extractor-cache');\n  const availableDirs: string[] = [];\n\n  try {\n    if (existsSync(cacheRootDir)) {\n      const entries = readdirSync(cacheRootDir, { withFileTypes: true });\n      for (const entry of entries) {\n        if (entry.isDirectory() && entry.name.startsWith('cds-')) {\n          const cacheDir = join(cacheRootDir, entry.name);\n          const cdsBin = join(cacheDir, 'node_modules', '.bin', 'cds');\n          if (fileExists(cdsBin)) {\n            availableDirs.push(cacheDir);\n          }\n        }\n      }\n    }\n  } catch (error) {\n    cdsExtractorLog('debug', `Failed to discover cache directories: ${String(error)}`);\n  }\n\n  cdsCommandCache.availableCacheDirs = availableDirs;\n  return availableDirs;\n}\n\n/**\n * Get the best CDS command for a specific cache directory\n * @param cacheDir Optional specific cache directory\n * @param sourceRoot The source root directory\n * @param projectPath Optional project path for version-aware commands\n * @param dependencyGraph Optional dependency graph for version information\n * @returns The best CDS command to use\n */\nfunction getBestCdsCommand(\n  cacheDir: string | undefined,\n  sourceRoot: string,\n  projectPath?: string,\n  dependencyGraph?: CdsDependencyGraph,\n): string {\n  // Initialize cache if needed\n  initializeCdsCommandCache(sourceRoot);\n\n  // If a specific cache directory is provided and valid, prefer it\n  if (cacheDir) {\n    const localCdsBin = join(cacheDir, 'node_modules', '.bin', 'cds');\n    const command = createCdsCommandForPath(localCdsBin);\n    if (command) {\n      const result = testCdsCommand(command, sourceRoot, true);\n      if (result.works) {\n        return localCdsBin;\n      }\n    }\n  }\n\n  // Try any available cache directories\n  for (const availableCacheDir of cdsCommandCache.availableCacheDirs) {\n    const localCdsBin = join(availableCacheDir, 'node_modules', '.bin', 'cds');\n    const command = createCdsCommandForPath(localCdsBin);\n    if (command) {\n      const result = testCdsCommand(command, sourceRoot, true);\n      if (result.works) {\n        return localCdsBin;\n      }\n    }\n  }\n\n  // Try project-specific version-aware commands if information is available\n  if (projectPath && dependencyGraph) {\n    const versionAwareCommand = createVersionAwareCdsCommand(projectPath, dependencyGraph);\n    if (versionAwareCommand) {\n      const result = testCdsCommand(versionAwareCommand, sourceRoot, true);\n      if (result.works) {\n        return versionAwareCommand.originalCommand;\n      }\n    }\n  }\n\n  // Fall back to global command\n  if (cdsCommandCache.globalCommand) {\n    return cdsCommandCache.globalCommand;\n  }\n\n  // Final fallback: test remaining npx options\n  const fallbackCommands = [createCdsCommands.npxCds(), createCdsCommands.npxCdsDk()];\n\n  for (const command of fallbackCommands) {\n    const result = testCdsCommand(command, sourceRoot, true);\n    if (result.works) {\n      return command.originalCommand;\n    }\n  }\n\n  // Return the default fallback even if it doesn't work, as tests expect this behavior\n  return createCdsCommands.npxCdsDk().originalCommand;\n}\n\n/**\n * Initialize the CDS command cache by testing global commands\n * @param sourceRoot The source root directory\n */\nfunction initializeCdsCommandCache(sourceRoot: string): void {\n  if (cdsCommandCache.initialized) {\n    return;\n  }\n\n  cdsExtractorLog('info', 'Initializing CDS command cache...');\n\n  // Test global commands first (most commonly used)\n  const globalCommands = [createCdsCommands.cds(), createCdsCommands.npxCdsDk()];\n\n  for (const command of globalCommands) {\n    const result = testCdsCommand(command, sourceRoot, true); // Silent testing\n    if (result.works) {\n      cdsCommandCache.globalCommand = command.originalCommand;\n      cdsExtractorLog(\n        'info',\n        `Found working global CDS command: ${command.originalCommand} (v${result.version ?? 'unknown'})`,\n      );\n      break;\n    }\n  }\n\n  // Discover available cache directories\n  const cacheDirs = discoverAvailableCacheDirs(sourceRoot);\n  if (cacheDirs.length > 0) {\n    cdsExtractorLog(\n      'info',\n      `Discovered ${cacheDirs.length} CDS cache director${cacheDirs.length === 1 ? 'y' : 'ies'}`,\n    );\n  }\n\n  cdsCommandCache.initialized = true;\n}\n\n/**\n * Reset the command cache - primarily for testing\n */\nexport function resetCdsCommandCache(): void {\n  cdsCommandCache.commandResults.clear();\n  cdsCommandCache.availableCacheDirs = [];\n  cdsCommandCache.globalCommand = undefined;\n  cdsCommandCache.initialized = false;\n}\n\n/**\n * Check if a CDS command is available and working.\n * @param validatedCommand The {@link ValidatedCdsCommand} instance for the command to test\n * @param sourceRoot The source root directory to use as cwd when testing the command\n * @param silent Whether to suppress logging of test failures\n * @returns Object with test result and version information\n */\nfunction testCdsCommand(\n  validatedCommand: ValidatedCdsCommand,\n  sourceRoot: string,\n  silent: boolean = false,\n): { works: boolean; version?: string; error?: string } {\n  const cacheKey = validatedCommand.originalCommand;\n\n  // Check cache first\n  const cachedResult = cdsCommandCache.commandResults.get(cacheKey);\n  if (cachedResult) {\n    return cachedResult;\n  }\n\n  try {\n    // Run the validated `cds` command with `--version` to test if it works.\n    const cleanEnv = {\n      ...process.env,\n      // Remove any CodeQL-specific environment variables that might interfere.\n      CODEQL_EXTRACTOR_CDS_WIP_DATABASE: undefined,\n      CODEQL_RUNNER: undefined,\n    };\n\n    const result = execFileSync(\n      validatedCommand.executable,\n      [...validatedCommand.args, '--version'],\n      {\n        encoding: 'utf8',\n        stdio: 'pipe',\n        timeout: DEFAULT_COMMAND_TIMEOUT_MS, // timeout after 10 seconds\n        cwd: sourceRoot,\n        env: cleanEnv,\n      },\n    ).toString();\n\n    // Extract version from output (typically in format \"@sap/cds-dk: 6.1.3\" or just \"6.1.3\")\n    const versionMatch = result.match(/(\\d+\\.\\d+\\.\\d+)/);\n    const version = versionMatch ? versionMatch[1] : undefined;\n\n    const testResult = { works: true, version };\n    cdsCommandCache.commandResults.set(cacheKey, testResult);\n    return testResult;\n  } catch (error) {\n    const errorMessage = String(error);\n    if (!silent) {\n      cdsExtractorLog('debug', `CDS command test failed for '${cacheKey}': ${errorMessage}`);\n    }\n\n    const testResult = { works: false, error: errorMessage };\n    cdsCommandCache.commandResults.set(cacheKey, testResult);\n    return testResult;\n  }\n}\n", "import {\n  existsSync,\n  readFileSync,\n  readdirSync,\n  renameSync,\n  statSync,\n  unlinkSync,\n  writeFileSync,\n} from 'fs';\nimport { format, join, parse } from 'path';\n\nimport { cdsExtractorMarkerFileContent, cdsExtractorMarkerFileName } from './constants';\nimport { cdsExtractorLog } from './logging';\n\n/**\n * Check if a directory exists\n * @param dirPath Path to the directory to check\n * @returns True if the directory exists, false otherwise\n */\nexport function dirExists(dirPath: string): boolean {\n  return existsSync(dirPath) && statSync(dirPath).isDirectory();\n}\n\n/**\n * Check if a file exists and can be read\n * @param filePath Path to the file to check\n * @returns True if the file exists and can be read, false otherwise\n */\nexport function fileExists(filePath: string): boolean {\n  return existsSync(filePath) && statSync(filePath).isFile();\n}\n\n/**\n * Recursively renames all .json files to .cds.json in the given directory and\n * its subdirectories, except for those that already have .cds.json extension.\n *\n * @param {string} dirPath - The directory path to start recursion from\n */\nexport function recursivelyRenameJsonFiles(dirPath: string): void {\n  // Make sure the directory exists\n  if (!dirExists(dirPath)) {\n    cdsExtractorLog('info', `Directory not found: ${dirPath}`);\n    return;\n  }\n  cdsExtractorLog('info', `Processing JSON files in directory: ${dirPath}`);\n\n  // Get all entries in the directory\n  const entries = readdirSync(dirPath, { withFileTypes: true });\n\n  for (const entry of entries) {\n    const fullPath = join(dirPath, entry.name);\n\n    if (entry.isDirectory()) {\n      // Recursively process subdirectories\n      recursivelyRenameJsonFiles(fullPath);\n    } else if (\n      entry.isFile() &&\n      entry.name.endsWith('.json') &&\n      !entry.name.endsWith('.cds.json')\n    ) {\n      // Rename .json files to .cds.json\n      const newPath = format({ ...parse(fullPath), base: '', ext: '.cds.json' });\n      renameSync(fullPath, newPath);\n      cdsExtractorLog('info', `Renamed CDS output file from ${fullPath} to ${newPath}`);\n    }\n  }\n}\n\n/**\n * Create the marker file with dummy content.\n * This file is required by the JavaScript extractor starting with CodeQL CLI v2.23.5.\n * @param sourceRoot The source root directory where the marker file should be created\n * @returns The path to the created marker file\n */\nexport function createMarkerFile(sourceRoot: string): string {\n  const markerFilePath = join(sourceRoot, cdsExtractorMarkerFileName);\n  try {\n    writeFileSync(markerFilePath, cdsExtractorMarkerFileContent, 'utf8');\n    cdsExtractorLog('info', `Created marker file: ${markerFilePath}`);\n  } catch (error) {\n    cdsExtractorLog('warn', `Failed to create marker file: ${String(error)}`);\n  }\n  return markerFilePath;\n}\n\n/**\n * Remove the cdsExtractorMarkerFileName file if it exists.\n * This cleanup prevents the marker file from being accidentally committed.\n * @param markerFilePath The path to the marker file to remove\n */\nexport function removeMarkerFile(markerFilePath: string): void {\n  if (existsSync(markerFilePath)) {\n    try {\n      unlinkSync(markerFilePath);\n      cdsExtractorLog('info', `Removed marker file: ${markerFilePath}`);\n    } catch (error) {\n      cdsExtractorLog('warn', `Failed to remove marker file: ${String(error)}`);\n    }\n  }\n}\n\n/**\n * Normalize all `$location.file` values in a parsed CDS JSON object to use\n * POSIX forward slashes. The CDS compiler on Windows produces backslash paths\n * (e.g. `\"srv\\\\service1.cds\"`), but the CodeQL libraries expect forward slashes\n * (e.g. `\"srv/service1.cds\"`).\n *\n * Mutates the object in place and returns it for convenience.\n */\nexport function normalizeCdsJsonLocations(data: Record): Record {\n  if (typeof data !== 'object' || data === null) return data;\n\n  // Normalize top-level $location.file\n  const topLoc = data['$location'] as Record | undefined;\n  if (topLoc?.file && typeof topLoc.file === 'string') {\n    topLoc.file = topLoc.file.split('\\\\').join('/');\n  }\n\n  // Normalize $location.file in all definitions (and their nested elements/params)\n  const definitions = data['definitions'] as Record> | undefined;\n  if (definitions) {\n    for (const defn of Object.values(definitions)) {\n      normalizeLocationsRecursive(defn);\n    }\n  }\n\n  return data;\n}\n\n/**\n * Recursively normalize `$location.file` values in a CDS JSON object node.\n */\nfunction normalizeLocationsRecursive(obj: unknown): void {\n  if (typeof obj !== 'object' || obj === null) return;\n\n  const record = obj as Record;\n  const loc = record['$location'] as Record | undefined;\n  if (loc?.file && typeof loc.file === 'string') {\n    loc.file = loc.file.split('\\\\').join('/');\n  }\n\n  // Recurse into known nested structures: elements, params, actions, functions\n  for (const key of ['elements', 'params', 'actions', 'functions', 'items', 'returns']) {\n    const nested = record[key];\n    if (typeof nested === 'object' && nested !== null && !Array.isArray(nested)) {\n      for (const child of Object.values(nested as Record)) {\n        normalizeLocationsRecursive(child);\n      }\n    }\n  }\n}\n\n/**\n * Read a `.cds.json` file, normalize all `$location.file` values to POSIX\n * forward slashes, and write it back. No-op if the file doesn't change.\n *\n * @param filePath Absolute path to the `.cds.json` file\n */\nexport function normalizeLocationPathsInFile(filePath: string): void {\n  if (!fileExists(filePath)) return;\n\n  const raw = readFileSync(filePath, 'utf8');\n  const data = JSON.parse(raw) as Record;\n  normalizeCdsJsonLocations(data);\n  const normalized = JSON.stringify(data, null, 2) + '\\n';\n\n  if (normalized !== raw) {\n    writeFileSync(filePath, normalized, 'utf8');\n    cdsExtractorLog('info', `Normalized $location paths in: ${filePath}`);\n  }\n}\n", "/** Common constants used throughout the CDS extractor. */\n\n/**\n * Common, expected name of the JSON file created by CDS compilation\n * tasks performed by, or on behalf of, the CDS extractor.\n */\nexport const modelCdsJsonFile = 'model.cds.json';\n\n/**\n * Common, expected name of the marker file created to meet the JavaScript\n * extractor's requirement for at least one .js file to be present under\n * the source root. This file is auto-created in the source root directory\n * prior to invoking the JavaScript extractor, and removed afterwards, to\n * enable extraction of the .cds.json files by the JavaScript extractor.\n */\nexport const cdsExtractorMarkerFileName = 'cds-extractor-marker.js';\n\n/**\n * Expected content of the {@link cdsExtractorMarkerFileName} file.\n */\nexport const cdsExtractorMarkerFileContent =\n  '\"Placeholder content created by the CDS extractor. This file can be safely deleted.\";';\n", "import type { LogLevel } from './types';\n\n/**\n * Source root directory for logging context.\n */\nlet sourceRootDirectory: string | undefined;\n\n/**\n * Unique session ID for this CDS extractor run to help distinguish\n * between multiple concurrent or sequential runs in logs.\n * Uses the extractor start timestamp for uniqueness.\n */\nconst sessionId = Date.now().toString();\n\n/**\n * Start time of the CDS extractor session for performance tracking.\n */\nconst extractorStartTime = Date.now();\n\n/**\n * Performance tracking state for timing critical operations.\n */\nconst performanceTracking = new Map();\n\n/**\n * Unified logging function for the CDS extractor. Provides consistent\n * log formatting with level prefixes, elapsed time, and session IDs.\n *\n * @param level - The log level ('debug', 'info', 'warn', 'error')\n * @param message - The primary message or data to log\n * @param optionalParams - Additional parameters to log (same as console.log)\n */\nexport function cdsExtractorLog(\n  level: LogLevel,\n  message: unknown,\n  ...optionalParams: unknown[]\n): void {\n  if (!sourceRootDirectory) {\n    throw new Error('Source root directory is not set. Call setSourceRootDirectory() first.');\n  }\n\n  const currentTime = Date.now();\n  const elapsedMs = currentTime - extractorStartTime;\n  const levelPrefix = `[CDS-${sessionId} ${elapsedMs}] ${level.toUpperCase()}: `;\n\n  // Select the appropriate console function based on log level\n  switch (level) {\n    case 'debug':\n    case 'info':\n      if (typeof message === 'string') {\n        console.log(levelPrefix + message, ...optionalParams);\n      } else {\n        console.log(levelPrefix, message, ...optionalParams);\n      }\n      break;\n    case 'warn':\n      if (typeof message === 'string') {\n        console.warn(levelPrefix + message, ...optionalParams);\n      } else {\n        console.warn(levelPrefix, message, ...optionalParams);\n      }\n      break;\n    case 'error':\n      if (typeof message === 'string') {\n        console.error(levelPrefix + message, ...optionalParams);\n      } else {\n        console.error(levelPrefix, message, ...optionalParams);\n      }\n      break;\n    default:\n      // This should never happen due to TypeScript typing\n      throw new Error(`Invalid log level: ${String(level)}`);\n  }\n}\n/**\n * Calculates elapsed time from start and formats it with appropriate units.\n *\n * @param startTime - The start timestamp in milliseconds\n * @param endTime - The end timestamp in milliseconds (defaults to current time)\n * @returns Formatted duration string\n */\nfunction formatDuration(startTime: number, endTime: number = Date.now()): string {\n  const durationMs = endTime - startTime;\n\n  if (durationMs < 1000) {\n    return `${durationMs}ms`;\n  } else if (durationMs < 60000) {\n    return `${(durationMs / 1000).toFixed(2)}s`;\n  } else {\n    const minutes = Math.floor(durationMs / 60000);\n    const seconds = ((durationMs % 60000) / 1000).toFixed(2);\n    return `${minutes}m ${seconds}s`;\n  }\n}\n\n/**\n * Logs the start of the CDS extractor session with session information.\n *\n * @param sourceRoot - The source root directory being processed\n */\nexport function logExtractorStart(sourceRoot: string): void {\n  cdsExtractorLog('info', `=== CDS EXTRACTOR START [${sessionId}] ===`);\n  cdsExtractorLog('info', `Source Root: ${sourceRoot}`);\n}\n\n/**\n * Logs the end of the CDS extractor session with final performance summary.\n *\n * @param success - Whether the extraction completed successfully\n * @param additionalSummary - Optional additional summary information\n */\nexport function logExtractorStop(success: boolean = true, additionalSummary?: string): void {\n  const endTime = Date.now();\n  const totalDuration = formatDuration(extractorStartTime, endTime);\n  const status = success ? 'SUCCESS' : 'FAILURE';\n\n  if (additionalSummary) {\n    cdsExtractorLog('info', additionalSummary);\n  }\n\n  cdsExtractorLog('info', `=== CDS EXTRACTOR END [${sessionId}] - ${status} ===`);\n  cdsExtractorLog('info', `Total Duration: ${totalDuration}`);\n}\n\n/**\n * Logs a performance milestone with timing information.\n *\n * @param milestone - Description of the milestone reached\n * @param additionalInfo - Optional additional information to include\n */\nexport function logPerformanceMilestone(milestone: string, additionalInfo?: string): void {\n  const currentTime = Date.now();\n  const overallDuration = formatDuration(extractorStartTime, currentTime);\n  const info = additionalInfo ? ` - ${additionalInfo}` : '';\n  cdsExtractorLog('info', `MILESTONE: ${milestone} (after ${overallDuration})${info}`);\n}\n\n/**\n * Starts tracking performance for a named operation.\n *\n * @param operationName - Name of the operation to track\n */\nexport function logPerformanceTrackingStart(operationName: string): void {\n  performanceTracking.set(operationName, Date.now());\n  cdsExtractorLog('debug', `Started: ${operationName}`);\n}\n\n/**\n * Ends tracking performance for a named operation and logs the duration.\n *\n * @param operationName - Name of the operation to stop tracking\n */\nexport function logPerformanceTrackingStop(operationName: string): void {\n  const startTime = performanceTracking.get(operationName);\n  if (startTime) {\n    const duration = formatDuration(startTime);\n    performanceTracking.delete(operationName);\n    cdsExtractorLog('info', `Completed: ${operationName} (took ${duration})`);\n  } else {\n    cdsExtractorLog('warn', `No start time found for operation: ${operationName}`);\n  }\n}\n\n/**\n * Sets the source root directory for logging context.\n * This should typically be called once at the start of the CDS extractor.\n *\n * @param sourceRoot - The absolute path to the source root directory\n */\nexport function setSourceRootDirectory(sourceRoot: string): void {\n  sourceRootDirectory = sourceRoot;\n}\n", "import type { CdsDependencyGraph } from '../cds/parser';\n\n/**\n * Generate a comprehensive status report for the dependency graph\n * Supports both normal execution and debug modes\n */\nexport function generateStatusReport(dependencyGraph: CdsDependencyGraph): string {\n  const summary = dependencyGraph.statusSummary;\n  const lines: string[] = [];\n\n  lines.push('='.repeat(80));\n  lines.push(`CDS EXTRACTOR STATUS REPORT`);\n  lines.push('='.repeat(80));\n  lines.push('');\n\n  // OVERALL SUMMARY\n  lines.push('OVERALL SUMMARY:');\n  lines.push(`  Status: ${summary.overallSuccess ? 'SUCCESS' : 'FAILED'}`);\n  lines.push(`  Current Phase: ${dependencyGraph.currentPhase.toUpperCase()}`);\n  lines.push(`  Projects: ${summary.totalProjects}`);\n  lines.push(`  CDS Files: ${summary.totalCdsFiles}`);\n  lines.push(`  JSON Files Generated: ${summary.jsonFilesGenerated}`);\n  lines.push('');\n\n  // COMPILATION SUMMARY\n  lines.push('COMPILATION SUMMARY:');\n  lines.push(`  Total Tasks: ${summary.totalCompilationTasks}`);\n  lines.push(`  Successful: ${summary.successfulCompilations}`);\n  lines.push(`  Retried: ${dependencyGraph.retryStatus.totalRetryAttempts}`);\n  lines.push(`  Failed: ${summary.failedCompilations}`);\n  lines.push(`  Skipped: ${summary.skippedCompilations}`);\n  lines.push('');\n\n  // RETRY SUMMARY (if retry attempts were made)\n  if (dependencyGraph.retryStatus.totalRetryAttempts > 0) {\n    lines.push('RETRY SUMMARY:');\n    lines.push(`  Tasks Requiring Retry: ${dependencyGraph.retryStatus.totalTasksRequiringRetry}`);\n    lines.push(\n      `  Tasks Successfully Retried: ${dependencyGraph.retryStatus.totalTasksSuccessfullyRetried}`,\n    );\n    lines.push(`  Total Retry Attempts: ${dependencyGraph.retryStatus.totalRetryAttempts}`);\n    lines.push(\n      `  Projects Requiring Full Dependencies: ${dependencyGraph.retryStatus.projectsRequiringFullDependencies.size}`,\n    );\n    lines.push(\n      `  Projects with Full Dependencies: ${dependencyGraph.retryStatus.projectsWithFullDependencies.size}`,\n    );\n    lines.push('');\n  }\n\n  // PERFORMANCE metrics\n  lines.push('PERFORMANCE:');\n  lines.push(`  Total Duration: ${summary.performance.totalDurationMs}ms`);\n  lines.push(`  Parsing: ${summary.performance.parsingDurationMs}ms`);\n  lines.push(`  Compilation: ${summary.performance.compilationDurationMs}ms`);\n  lines.push(`  Extraction: ${summary.performance.extractionDurationMs}ms`);\n\n  // Add percentage breakdown if total duration > 0\n  if (summary.performance.totalDurationMs > 0) {\n    const parsingPct = Math.round(\n      (summary.performance.parsingDurationMs / summary.performance.totalDurationMs) * 100,\n    );\n    const compilationPct = Math.round(\n      (summary.performance.compilationDurationMs / summary.performance.totalDurationMs) * 100,\n    );\n    const extractionPct = Math.round(\n      (summary.performance.extractionDurationMs / summary.performance.totalDurationMs) * 100,\n    );\n\n    lines.push('  Breakdown:');\n    lines.push(`    Parsing: ${parsingPct}%`);\n    lines.push(`    Compilation: ${compilationPct}%`);\n    lines.push(`    Extraction: ${extractionPct}%`);\n  }\n  lines.push('');\n\n  // Errors and warnings\n  if (summary.criticalErrors.length > 0) {\n    lines.push('CRITICAL ERRORS:');\n    for (const error of summary.criticalErrors) {\n      lines.push(`  - ${error}`);\n    }\n    lines.push('');\n  }\n\n  if (summary.warnings.length > 0) {\n    lines.push('WARNINGS:');\n    for (const warning of summary.warnings) {\n      lines.push(`  - ${warning}`);\n    }\n    lines.push('');\n  }\n\n  lines.push('='.repeat(80));\n\n  return lines.join('\\n');\n}\n", "import { spawnSync, SpawnSyncOptions } from 'child_process';\nimport { basename, delimiter, dirname, join, relative, resolve, sep } from 'path';\n\nimport { CdsCompilationResult } from './types';\nimport { getCdsVersion } from './version';\nimport { modelCdsJsonFile } from '../../constants';\nimport {\n  fileExists,\n  dirExists,\n  recursivelyRenameJsonFiles,\n  normalizeLocationPathsInFile,\n} from '../../filesystem';\nimport { cdsExtractorLog } from '../../logging';\nimport { BasicCdsProject } from '../parser/types';\n\n/**\n * Parses a command string for use with spawnSync, handling multi-word commands like 'npx cds'.\n * @param commandString The command string to parse (e.g., 'npx cds' or 'cds')\n * @returns Object with executable and args arrays for spawnSync\n */\nfunction parseCommandForSpawn(commandString: string): { executable: string; baseArgs: string[] } {\n  const parts = commandString.trim().split(/\\s+/);\n  const executable = parts[0];\n  const baseArgs = parts.slice(1);\n  return { executable, baseArgs };\n}\n\n/**\n * Determines compilation targets for a CDS project according to the new project-only compilation approach.\n * @param project The CDS project\n * @param sourceRoot The source root directory\n * @returns Array of compilation targets (directories or files relative to project base)\n */\nfunction determineCompilationTargets(project: BasicCdsProject, sourceRoot: string): string[] {\n  const projectAbsolutePath = join(sourceRoot, project.projectDir);\n\n  // Check for index.cds in the project root first, which takes precedence over CAP directories.\n  const rootCdsFiles = project.cdsFiles\n    .filter(file => dirname(join(sourceRoot, file)) === projectAbsolutePath)\n    .map(file => basename(file));\n\n  if (rootCdsFiles.includes('index.cds')) {\n    // Use only index.cds when it exists in the project root\n    return ['index.cds'];\n  }\n\n  // Check for standard CAP directories\n  const capDirectories = ['db', 'srv', 'app'];\n  const existingCapDirs = capDirectories.filter(dir => dirExists(join(projectAbsolutePath, dir)));\n\n  if (existingCapDirs.length > 0) {\n    // Use standard CAP directories\n    return existingCapDirs;\n  }\n\n  if (rootCdsFiles.length > 0) {\n    // Use other root-level files\n    return rootCdsFiles;\n  }\n\n  // Use all CDS files with their relative paths\n  return project.cdsFiles.map(file => relative(projectAbsolutePath, join(sourceRoot, file)));\n}\n\n/**\n * Compiles a CDS project to JSON using project-level compilation only.\n * This function has been simplified to only use project-level compilation,\n * eliminating all individual file compilation logic and standardizing output\n * to a single {@link modelCdsJsonFile} file per project.\n *\n *\n * @param cdsFilePath The path to the CDS file to compile, relative to the `sourceRoot`.\n * @param sourceRoot The source root directory scanned by the CDS extractor.\n * CRITICAL: All spawned processes will use the project base directory as their `cwd` to\n * ensure that paths in generated JSON are relative to the project base directory.\n *\n * @param cdsCommand The actual shell command to use for `cds compile`.\n * @param cacheDir Full path to the cache directory where dependencies are stored.\n * @param projectMap Map of project directories to {@link BasicCdsProject} instances.\n * @param projectDir The project directory to which `cdsFilePath` belongs.\n *\n * @returns The {@link CdsCompilationResult} of the compilation attempt.\n */\nexport function compileCdsToJson(\n  cdsFilePath: string,\n  sourceRoot: string,\n  cdsCommand: string,\n  cacheDir: string | undefined,\n  projectMap: Map,\n  projectDir: string,\n): CdsCompilationResult {\n  try {\n    const resolvedCdsFilePath = resolve(cdsFilePath);\n    if (!fileExists(resolvedCdsFilePath)) {\n      throw new Error(`Expected CDS file '${resolvedCdsFilePath}' does not exist.`);\n    }\n\n    // Get and log the CDS version\n    const cdsVersion = getCdsVersion(cdsCommand, cacheDir);\n    const versionInfo = cdsVersion ? `with CDS v${cdsVersion}` : '';\n\n    // Calculate project base directory for consistent working directory\n    const projectBaseDir = join(sourceRoot, projectDir);\n\n    // Create spawn options with project base directory as cwd.\n    const spawnOptions = createSpawnOptions(projectBaseDir, cdsCommand, cacheDir);\n\n    // Throw an error if projectDir cannot be found in the projectMap.\n    if (!projectMap || !projectDir || !projectMap.has(projectDir)) {\n      throw new Error(\n        `Project directory '${projectDir}' not found in projectMap. Ensure the project is properly initialized.`,\n      );\n    }\n\n    const project = projectMap.get(projectDir);\n\n    // Always use project-level compilation\n    return compileProject(sourceRoot, projectDir, cdsCommand, spawnOptions, versionInfo, project!);\n  } catch (error) {\n    return { success: false, message: String(error) };\n  }\n}\n\n/**\n * Handles project-level compilation for CAP projects.\n * CRITICAL: Uses the project base directory as cwd and calculates paths relative to project base directory.\n *\n * @param sourceRoot The source root directory\n * @param projectDir The project directory (relative to sourceRoot)\n * @param cdsCommand The CDS command to use\n * @param spawnOptions Pre-configured spawn options with project base directory as cwd\n * @param versionInfo Version information for logging\n * @param project The CDS project instance\n * @returns Compilation result\n */\nfunction compileProject(\n  sourceRoot: string,\n  projectDir: string,\n  cdsCommand: string,\n  spawnOptions: SpawnSyncOptions,\n  versionInfo: string,\n  project: BasicCdsProject,\n): CdsCompilationResult {\n  cdsExtractorLog('info', `Compiling CDS project '${projectDir}' using ${versionInfo}...`);\n\n  // Determine compilation targets using the new centralized logic\n  const compilationTargets = determineCompilationTargets(project, sourceRoot);\n\n  if (compilationTargets.length === 0) {\n    throw new Error(\n      `Project directory '${projectDir}' does not contain any CDS files and cannot be compiled`,\n    );\n  }\n\n  const projectJsonOutPath = join(sourceRoot, projectDir, modelCdsJsonFile);\n\n  const compileArgs = [\n    'compile',\n    ...compilationTargets,\n    '--to',\n    'json',\n    '--dest',\n    modelCdsJsonFile,\n    '--locations',\n    '--log-level',\n    'warn',\n  ];\n\n  cdsExtractorLog('info', `Compiling CDS project targets: ${compilationTargets.join(', ')}`);\n  cdsExtractorLog(\n    'info',\n    `Running compilation task for CDS project '${projectDir}': command='${cdsCommand}' args='${JSON.stringify(compileArgs)}'`,\n  );\n\n  // Parse command for proper spawnSync execution\n  const { executable, baseArgs } = parseCommandForSpawn(cdsCommand);\n  const allArgs = [...baseArgs, ...compileArgs];\n\n  const result = spawnSync(executable, allArgs, spawnOptions);\n\n  if (result.error) {\n    cdsExtractorLog('error', `SpawnSync error: ${result.error.message}`);\n    throw new Error(`Error executing CDS compiler: ${result.error.message}`);\n  }\n\n  // Log stderr for debugging even on success (CDS often writes warnings to stderr).\n  if (result.stderr && result.stderr.length > 0) {\n    cdsExtractorLog('warn', `CDS stderr output: ${result.stderr.toString()}`);\n  }\n\n  if (result.status !== 0) {\n    cdsExtractorLog('error', `CDS command failed with status ${result.status}`);\n    cdsExtractorLog(\n      'error',\n      `Command: ${cdsCommand} ${compileArgs.map(arg => (arg.includes(' ') ? `\"${arg}\"` : arg)).join(' ')}`,\n    );\n    cdsExtractorLog('error', `Stdout: ${result.stdout?.toString() || 'No stdout'}`);\n    cdsExtractorLog('error', `Stderr: ${result.stderr?.toString() || 'No stderr'}`);\n    throw new Error(\n      `Could not compile the CAP project ${projectDir}.\\nReported error(s):\\n\\`\\`\\`\\n${\n        result.stderr?.toString() || 'Unknown error'\n      }\\n\\`\\`\\``,\n    );\n  }\n\n  if (!fileExists(projectJsonOutPath) && !dirExists(projectJsonOutPath)) {\n    throw new Error(\n      `CAP project '${projectDir}' was not compiled to JSON. This is likely because the project structure is invalid.`,\n    );\n  }\n\n  // Handle directory output if the CDS compiler generated a directory.\n  if (dirExists(projectJsonOutPath)) {\n    cdsExtractorLog(\n      'info',\n      `CDS compiler generated JSON to output directory: ${projectJsonOutPath}`,\n    );\n    // Recursively rename generated .json files to have a .cds.json extension\n    recursivelyRenameJsonFiles(projectJsonOutPath);\n  } else {\n    cdsExtractorLog('info', `CDS compiler generated JSON to file: ${projectJsonOutPath}`);\n  }\n\n  // Normalize $location.file paths to POSIX forward slashes.\n  // The CDS compiler on Windows produces backslash paths (e.g. \"srv\\\\service1.cds\")\n  // but CodeQL libraries expect forward slashes (e.g. \"srv/service1.cds\").\n  normalizeLocationPathsInFile(projectJsonOutPath);\n\n  return {\n    success: true,\n    outputPath: projectJsonOutPath,\n    compiledAsProject: true,\n    message: 'Project was compiled using project-aware compilation',\n  };\n}\n\n/**\n * Creates spawn options for CDS compilation processes.\n * CRITICAL: Always sets cwd to project base directory to ensure generated JSON paths are relative to project base directory.\n *\n * @param projectBaseDir The project base directory (where package.json is located) - used as cwd for all spawned processes\n * @param cdsCommand The CDS command to determine if we need Node.js environment setup\n * @param cacheDir Optional cache directory for dependencies\n * @returns Spawn options configured for CDS compilation\n */\nfunction createSpawnOptions(\n  projectBaseDir: string,\n  cdsCommand: string,\n  cacheDir?: string,\n): SpawnSyncOptions {\n  const spawnOptions: SpawnSyncOptions = {\n    cwd: projectBaseDir, // CRITICAL: Always use project base directory as cwd to ensure correct path generation\n    shell: false, // Use shell=false to ensure proper argument handling for paths with spaces\n    stdio: 'pipe',\n    env: { ...process.env },\n  };\n\n  // Check if we're using a direct binary path (contains node_modules/.bin/ or node_modules\\.bin\\) or npx-style command\n  // Check both platform-native separator and forward slash for cross-platform compatibility\n  const binPathNative = `node_modules${sep}.bin${sep}`;\n  const binPathPosix = 'node_modules/.bin/';\n  const isDirectBinary = cdsCommand.includes(binPathNative) || cdsCommand.includes(binPathPosix);\n\n  // Only set up Node.js environment for npx-style commands, not for direct binary execution\n  if (cacheDir && !isDirectBinary) {\n    const nodePath = join(cacheDir, 'node_modules');\n\n    // Set up environment to use the cached dependencies\n    spawnOptions.env = {\n      ...process.env,\n      NODE_PATH: `${nodePath}${delimiter}${process.env.NODE_PATH ?? ''}`,\n      PATH: `${join(nodePath, '.bin')}${delimiter}${process.env.PATH}`,\n      // Add NPM configuration to ensure dependencies are resolved from the cache directory\n      npm_config_prefix: cacheDir,\n      // Ensure we don't pick up global CDS installations that might conflict\n      npm_config_global: 'false',\n      // Clear any existing CDS environment variables that might interfere\n      CDS_HOME: cacheDir,\n    };\n  } else if (isDirectBinary) {\n    // For direct binary execution, use minimal environment to avoid conflicts\n    // Remove Node.js-specific environment variables that might interfere\n    const cleanEnv = { ...process.env };\n    delete cleanEnv.NODE_PATH;\n    delete cleanEnv.npm_config_prefix;\n    delete cleanEnv.npm_config_global;\n    delete cleanEnv.CDS_HOME;\n\n    spawnOptions.env = cleanEnv;\n  }\n\n  return spawnOptions;\n}\n", "import { spawnSync, SpawnSyncOptions } from 'child_process';\nimport { join, delimiter } from 'path';\n\n/**\n * Get the CDS compiler version from a specific command or cache directory.\n * @param cdsCommand The CDS command to use.\n * @param cacheDir Optional path to a directory containing installed dependencies.\n * @returns The CDS compiler version string, or undefined if it couldn't be determined.\n */\nexport function getCdsVersion(cdsCommand: string, cacheDir?: string): string | undefined {\n  try {\n    // Set up environment vars if using a cache directory\n    const spawnOptions: SpawnSyncOptions = {\n      shell: true,\n      stdio: 'pipe',\n      env: { ...process.env },\n    };\n\n    // If a cache directory is provided, set NODE_PATH to use that cache\n    if (cacheDir) {\n      const nodePath = join(cacheDir, 'node_modules');\n\n      // Set up environment to use the cached dependencies\n      spawnOptions.env = {\n        ...process.env,\n        NODE_PATH: `${nodePath}${delimiter}${process.env.NODE_PATH ?? ''}`,\n        PATH: `${join(nodePath, '.bin')}${delimiter}${process.env.PATH}`,\n        npm_config_prefix: cacheDir,\n      };\n    }\n\n    // Execute the CDS command with the --version flag\n    // When shell: true is used, concatenate command and args to avoid DEP0190 deprecation warning\n    const result = spawnSync(`${cdsCommand} --version`, spawnOptions);\n    if (result.status === 0 && result.stdout) {\n      const versionOutput = result.stdout.toString().trim();\n      // Extract version number, which is typically in formats like \"@sap/cds: 6.1.3\" or similar\n      const match = versionOutput.match(/@sap\\/cds[^0-9]*([0-9]+\\.[0-9]+\\.[0-9]+)/);\n      if (match?.[1]) {\n        return match[1]; // Return just the version number\n      }\n      return versionOutput; // Return full output if we couldn't parse it\n    }\n    return undefined;\n  } catch {\n    return undefined;\n  }\n}\n", "/** Validation utilities for CDS compilation output files. */\n\nimport { readFileSync } from 'fs';\nimport { isAbsolute, join } from 'path';\n\nimport type {\n  CompilationTask,\n  ResultDependencyStatusUpdate,\n  ResultOutputFileValidation,\n  ResultTaskValidation,\n} from './types';\nimport { fileExists } from '../../filesystem';\nimport { cdsExtractorLog } from '../../logging';\nimport type { CdsDependencyGraph } from '../parser/types';\n\n/**\n * Identifies tasks requiring retry based on output validation\n * @param dependencyGraph The dependency graph containing tasks to validate\n * @returns Map of project directory to failed tasks that need retry\n */\nexport function identifyTasksRequiringRetry(\n  dependencyGraph: CdsDependencyGraph,\n): Map {\n  const tasksRequiringRetry = new Map();\n\n  for (const [projectDir, project] of dependencyGraph.projects.entries()) {\n    const failedTasks: CompilationTask[] = [];\n\n    for (const task of project.compilationTasks) {\n      // Skip tasks that have already been retried.\n      if (task.retryInfo?.hasBeenRetried) {\n        continue;\n      }\n\n      // Always validate output files exist, regardless of task status.\n      const validationResult = validateTaskOutputs(task, dependencyGraph.sourceRootDir);\n\n      if (!validationResult.isValid) {\n        failedTasks.push(task);\n        cdsExtractorLog(\n          'info',\n          `Task ${task.id} requires retry: ${validationResult.validFileCount}/${validationResult.expectedFileCount} output files valid (status: ${task.status})`,\n        );\n\n        // Update task status to reflect actual file state.\n        if (task.status === 'success') {\n          cdsExtractorLog(\n            'warn',\n            `Task ${task.id} was marked as successful but output files are missing or invalid - updating status to failed`,\n          );\n          task.status = 'failed';\n        }\n      }\n    }\n\n    if (failedTasks.length > 0) {\n      tasksRequiringRetry.set(projectDir, failedTasks);\n    }\n  }\n\n  if (tasksRequiringRetry.size > 0) {\n    const totalFailedTasks = Array.from(tasksRequiringRetry.values()).reduce(\n      (sum, tasks) => sum + tasks.length,\n      0,\n    );\n    cdsExtractorLog(\n      'info',\n      `Identified ${totalFailedTasks} task(s) requiring retry across ${tasksRequiringRetry.size} project(s)`,\n    );\n  }\n\n  return tasksRequiringRetry;\n}\n\n/**\n * Updates the dependency graph with current task status based on filesystem validation.\n * This is the single source of truth for compilation task status across all phases.\n */\nexport function updateCdsDependencyGraphStatus(\n  dependencyGraph: CdsDependencyGraph,\n  sourceRootDir: string,\n): ResultDependencyStatusUpdate {\n  let successfulTasks = 0;\n  let failedTasks = 0;\n  let tasksSuccessfullyRetried = 0;\n\n  // Validate all tasks using filesystem checks\n  for (const project of dependencyGraph.projects.values()) {\n    for (const task of project.compilationTasks) {\n      const validationResult = validateTaskOutputs(task, sourceRootDir);\n      const isValid = validationResult.isValid;\n\n      if (isValid) {\n        task.status = 'success';\n        successfulTasks++;\n\n        // If task has retry info and is now successful, count as successfully retried\n        if (task.retryInfo?.hasBeenRetried) {\n          tasksSuccessfullyRetried++;\n        }\n      } else {\n        task.status = 'failed';\n        failedTasks++;\n      }\n    }\n  }\n\n  // Update dependency graph counters\n  dependencyGraph.statusSummary.successfulCompilations = successfulTasks;\n  dependencyGraph.statusSummary.failedCompilations = failedTasks;\n\n  // Update retry status tracking\n  dependencyGraph.retryStatus.totalTasksSuccessfullyRetried = tasksSuccessfullyRetried;\n  dependencyGraph.retryStatus.totalTasksRequiringRetry = failedTasks;\n\n  return {\n    tasksValidated: successfulTasks + failedTasks,\n    successfulTasks,\n    failedTasks,\n    tasksSuccessfullyRetried,\n  };\n}\n/**\n * Validates a single expected output file.\n * @param filePath Path to the output file to validate\n * @returns Validation result with details\n */\nexport function validateOutputFile(filePath: string): ResultOutputFileValidation {\n  const result: ResultOutputFileValidation = {\n    isValid: false,\n    filePath,\n    exists: false,\n  };\n\n  // Check if file exists\n  if (!fileExists(filePath)) {\n    result.error = 'File does not exist';\n    return result;\n  }\n\n  result.exists = true;\n\n  // For .cds.json files, validate JSON content\n  if (filePath.endsWith('.cds.json') || filePath.endsWith('.json')) {\n    try {\n      const content = readFileSync(filePath, 'utf8');\n\n      // Check if content is empty\n      if (!content.trim()) {\n        result.error = 'File is empty';\n        return result;\n      }\n\n      // Try to parse as JSON\n      const parsed: unknown = JSON.parse(content);\n\n      // Basic structure validation for CDS JSON files\n      if (typeof parsed !== 'object' || parsed === null) {\n        result.error = 'File does not contain a valid JSON object';\n        return result;\n      }\n\n      result.hasValidJson = true;\n      result.isValid = true;\n    } catch (error) {\n      result.error = `Invalid JSON content: ${String(error)}`;\n      return result;\n    }\n  } else {\n    // For non-JSON files, existence is sufficient\n    result.isValid = true;\n  }\n\n  return result;\n}\n\n/**\n * Validates that all expected output files exist for a compilation task.\n * @param task The compilation task to validate\n * @param sourceRoot Source root directory for resolving relative paths\n * @returns Task-level validation result\n */\nexport function validateTaskOutputs(\n  task: CompilationTask,\n  sourceRoot: string,\n): ResultTaskValidation {\n  const fileResults: ResultOutputFileValidation[] = [];\n\n  // Resolve the output file path relative to source root\n  const expectedOutput = task.expectedOutputFile;\n  const absolutePath = isAbsolute(expectedOutput)\n    ? expectedOutput\n    : join(sourceRoot, expectedOutput);\n\n  const fileResult = validateOutputFile(absolutePath);\n  fileResults.push(fileResult);\n\n  const validFileCount = fileResults.filter(r => r.isValid).length;\n  const expectedFileCount = 1;\n  const isValid = validFileCount === expectedFileCount && expectedFileCount > 0;\n\n  return {\n    isValid,\n    task,\n    fileResults,\n    validFileCount,\n    expectedFileCount,\n  };\n}\n", "import { execFileSync } from 'child_process';\nimport { isAbsolute, relative, resolve } from 'path';\n\nimport { cdsExtractorLog } from './logging';\n\n/**\n * Severity levels for diagnostics\n */\nexport enum DiagnosticSeverity {\n  Error = 'error',\n  Warning = 'warning',\n  Note = 'note',\n  Recommendation = 'recommendation',\n}\n\n/**\n * Converts a file path to be relative to the source root if possible\n * @param filePath The file path to convert\n * @param sourceRoot The source root directory to make the path relative to\n * @returns The relative path if the file is under source root, otherwise '.' (source root)\n */\nexport function convertToRelativePath(filePath: string, sourceRoot: string): string {\n  // Handle invalid inputs\n  if (!filePath || typeof filePath !== 'string' || !sourceRoot || typeof sourceRoot !== 'string') {\n    return '.';\n  }\n\n  try {\n    const resolvedSourceRoot = resolve(sourceRoot);\n\n    // If filePath is absolute, resolve it directly; otherwise resolve relative to sourceRoot\n    // Use path.isAbsolute() for cross-platform compatibility (Unix and Windows paths)\n    const resolvedFilePath = isAbsolute(filePath)\n      ? resolve(filePath)\n      : resolve(resolvedSourceRoot, filePath);\n\n    // If the file path is the same as source root, return '.'\n    if (resolvedFilePath === resolvedSourceRoot) {\n      return '.';\n    }\n\n    const relativePath = relative(resolvedSourceRoot, resolvedFilePath);\n\n    // If the relative path starts with '..' it means the file is outside the source root\n    // Per CodeQL requirements, we should point to the source root '.' instead\n    if (relativePath.startsWith('..')) {\n      return '.';\n    }\n\n    return relativePath;\n  } catch {\n    // If path resolution fails for any reason, fallback to source root\n    return '.';\n  }\n}\n\n/**\n * Base function to add a diagnostic to the CodeQL database\n * @param filePath Path to the file related to the diagnostic\n * @param message The diagnostic message\n * @param codeqlExePath Path to the CodeQL executable\n * @param sourceId The source ID for the diagnostic\n * @param sourceName The source name for the diagnostic\n * @param severity The severity level of the diagnostic\n * @param logPrefix Prefix for the log message\n * @param sourceRoot Optional source root directory to make file paths relative to\n * @returns True if the diagnostic was added, false otherwise\n */\nfunction addDiagnostic(\n  filePath: string,\n  message: string,\n  codeqlExePath: string,\n  sourceId: string,\n  sourceName: string,\n  severity: DiagnosticSeverity,\n  logPrefix: string,\n  sourceRoot?: string,\n): boolean {\n  const finalFilePath = sourceRoot\n    ? convertToRelativePath(filePath, sourceRoot)\n    : resolve(filePath);\n\n  // If the file was remapped to source root due to being outside the repository,\n  // append an explanatory note to the message\n  let finalMessage = message;\n  if (sourceRoot && finalFilePath === '.' && filePath !== sourceRoot) {\n    const resolvedSourceRoot = resolve(sourceRoot);\n    // Use path.isAbsolute() for cross-platform compatibility (Unix and Windows paths)\n    const resolvedFilePath = isAbsolute(filePath)\n      ? resolve(filePath)\n      : resolve(resolvedSourceRoot, filePath);\n\n    // Only add the note if the file was actually outside the source root\n    if (resolvedFilePath !== resolvedSourceRoot) {\n      finalMessage = `${message}\\n\\n**Note**: The file \\`${filePath}\\` is located outside the scanned source directory and cannot be linked directly in this diagnostic. This diagnostic is associated with the repository root instead.`;\n    }\n  }\n\n  try {\n    execFileSync(codeqlExePath, [\n      'database',\n      'add-diagnostic',\n      '--extractor-name=cds',\n      '--ready-for-status-page',\n      `--source-id=${sourceId}`,\n      `--source-name=${sourceName}`,\n      `--severity=${severity}`,\n      `--markdown-message=${finalMessage}`,\n      `--file-path=${finalFilePath}`,\n      '--',\n      `${process.env.CODEQL_EXTRACTOR_CDS_WIP_DATABASE ?? ''}`,\n    ]);\n    cdsExtractorLog('info', `Added ${severity} diagnostic for ${logPrefix}: ${filePath}`);\n    return true;\n  } catch (err) {\n    cdsExtractorLog(\n      'error',\n      `Failed to add ${severity} diagnostic for ${logPrefix}=${filePath} : ${String(err)}`,\n    );\n    return false;\n  }\n}\n\n/**\n * Add a diagnostic warning to the CodeQL database for a CDS indexer failure\n * @param projectDir The project directory where the cds-indexer failed\n * @param errorMessage The error message from the cds-indexer execution\n * @param codeqlExePath Path to the CodeQL executable\n * @param sourceRoot Optional source root directory to make file paths relative to\n * @returns True if the diagnostic was added, false otherwise\n */\nexport function addCdsIndexerDiagnostic(\n  projectDir: string,\n  errorMessage: string,\n  codeqlExePath: string,\n  sourceRoot?: string,\n): boolean {\n  return addDiagnostic(\n    projectDir,\n    errorMessage,\n    codeqlExePath,\n    'cds/indexer-failure',\n    'Failure running @sap/cds-indexer for a SAP CAP CDS project',\n    DiagnosticSeverity.Warning,\n    'project directory',\n    sourceRoot,\n  );\n}\n\n/**\n * Add a diagnostic error to the CodeQL database for a failed CDS compilation\n * @param cdsFilePath Path to the CDS file that failed to compile\n * @param errorMessage The error message from the compilation\n * @param codeqlExePath Path to the CodeQL executable\n * @param sourceRoot Optional source root directory to make file paths relative to\n * @returns True if the diagnostic was added, false otherwise\n */\nexport function addCompilationDiagnostic(\n  cdsFilePath: string,\n  errorMessage: string,\n  codeqlExePath: string,\n  sourceRoot?: string,\n): boolean {\n  return addDiagnostic(\n    cdsFilePath,\n    errorMessage,\n    codeqlExePath,\n    'cds/compilation-failure',\n    'Failure to compile one or more SAP CAP CDS files',\n    DiagnosticSeverity.Error,\n    'source file',\n    sourceRoot,\n  );\n}\n\n/**\n * Add a diagnostic error to the CodeQL database for dependency graph build failure\n * @param sourceRoot Source root directory to use as file context\n * @param errorMessage The error message from dependency graph build\n * @param codeqlExePath Path to the CodeQL executable\n * @returns True if the diagnostic was added, false otherwise\n */\nexport function addDependencyGraphDiagnostic(\n  sourceRoot: string,\n  errorMessage: string,\n  codeqlExePath: string,\n): boolean {\n  return addDiagnostic(\n    sourceRoot,\n    errorMessage,\n    codeqlExePath,\n    'cds/dependency-graph-failure',\n    'CDS project dependency graph build failure',\n    DiagnosticSeverity.Error,\n    'source root',\n    sourceRoot,\n  );\n}\n\n/**\n * Add a diagnostic error to the CodeQL database for dependency installation failure\n * @param sourceRoot Source root directory to use as file context\n * @param errorMessage The error message from dependency installation\n * @param codeqlExePath Path to the CodeQL executable\n * @returns True if the diagnostic was added, false otherwise\n */\nexport function addDependencyInstallationDiagnostic(\n  sourceRoot: string,\n  errorMessage: string,\n  codeqlExePath: string,\n): boolean {\n  return addDiagnostic(\n    sourceRoot,\n    errorMessage,\n    codeqlExePath,\n    'cds/dependency-installation-failure',\n    'CDS dependency installation failure',\n    DiagnosticSeverity.Error,\n    'source root',\n    sourceRoot,\n  );\n}\n\n/**\n * Add a diagnostic error to the CodeQL database for environment setup failure\n * @param sourceRoot Source root directory to use as file context\n * @param errorMessage The error message from environment setup\n * @param codeqlExePath Path to the CodeQL executable\n * @returns True if the diagnostic was added, false otherwise\n */\nexport function addEnvironmentSetupDiagnostic(\n  sourceRoot: string,\n  errorMessage: string,\n  codeqlExePath: string,\n): boolean {\n  // Use a representative file from source root or the directory itself\n  const contextFile = sourceRoot;\n  return addDiagnostic(\n    contextFile,\n    errorMessage,\n    codeqlExePath,\n    'cds/environment-setup-failure',\n    'CDS extractor environment setup failure',\n    DiagnosticSeverity.Error,\n    'source root',\n    sourceRoot,\n  );\n}\n\n/**\n * Add a diagnostic error to the CodeQL database for a JavaScript extractor failure\n * @param filePath Path to a relevant file for the error context\n * @param errorMessage The error message from the JavaScript extractor\n * @param codeqlExePath Path to the CodeQL executable\n * @param sourceRoot Source root directory to make file paths relative to\n * @returns True if the diagnostic was added, false otherwise\n */\nexport function addJavaScriptExtractorDiagnostic(\n  filePath: string,\n  errorMessage: string,\n  codeqlExePath: string,\n  sourceRoot?: string,\n): boolean {\n  return addDiagnostic(\n    filePath,\n    errorMessage,\n    codeqlExePath,\n    'cds/js-extractor-failure',\n    'Failure in JavaScript extractor for SAP CAP CDS files',\n    DiagnosticSeverity.Error,\n    'extraction file',\n    sourceRoot,\n  );\n}\n\n/**\n * Add a diagnostic warning when no CDS projects are detected\n * @param sourceRoot Source root directory to use as file context\n * @param message The warning message about no CDS projects\n * @param codeqlExePath Path to the CodeQL executable\n * @returns True if the diagnostic was added, false otherwise\n */\nexport function addNoCdsProjectsDiagnostic(\n  sourceRoot: string,\n  message: string,\n  codeqlExePath: string,\n): boolean {\n  return addDiagnostic(\n    sourceRoot,\n    message,\n    codeqlExePath,\n    'cds/no-cds-projects',\n    'No CDS projects detected in source',\n    DiagnosticSeverity.Warning,\n    'source root',\n    sourceRoot,\n  );\n}\n", "import { execFileSync } from 'child_process';\nimport { createHash } from 'crypto';\nimport { copyFileSync, existsSync, mkdirSync, writeFileSync } from 'fs';\nimport { dirname, join, resolve } from 'path';\n\nimport type { CdsDependencyCombination } from './types';\nimport { CdsDependencyGraph, CdsProject } from '../cds/parser/types';\nimport { DiagnosticSeverity } from '../diagnostics';\nimport { cdsExtractorLog } from '../logging';\nimport { resolveCdsVersions } from './versionResolver';\n\nconst cacheSubDirName = '.cds-extractor-cache';\n\n/**\n * Add a warning diagnostic for dependency version fallback\n * @param packageJsonPath Path to the package.json file\n * @param warningMessage The warning message\n * @param codeqlExePath Path to the CodeQL executable\n * @returns True if the diagnostic was added, false otherwise\n */\nfunction addDependencyVersionWarning(\n  packageJsonPath: string,\n  warningMessage: string,\n  codeqlExePath: string,\n): boolean {\n  try {\n    execFileSync(codeqlExePath, [\n      'database',\n      'add-diagnostic',\n      '--extractor-name=cds',\n      '--ready-for-status-page',\n      '--source-id=cds/dependency-version-fallback',\n      '--source-name=Using fallback versions for SAP CAP CDS dependencies',\n      `--severity=${DiagnosticSeverity.Warning}`,\n      `--markdown-message=${warningMessage}`,\n      `--file-path=${resolve(packageJsonPath)}`,\n      '--',\n      `${process.env.CODEQL_EXTRACTOR_CDS_WIP_DATABASE ?? ''}`,\n    ]);\n    cdsExtractorLog('info', `Added warning diagnostic for dependency fallback: ${packageJsonPath}`);\n    return true;\n  } catch (err) {\n    cdsExtractorLog(\n      'error',\n      `Failed to add warning diagnostic for ${packageJsonPath}: ${String(err)}`,\n    );\n    return false;\n  }\n}\n\n/**\n * Find the nearest `.npmrc` file by searching the given directory and its\n * ancestors up to (and including) the filesystem root. npm itself walks the\n * directory tree when looking for project-level `.npmrc` files, so we mirror\n * that behaviour here.\n *\n * @param startDir The directory from which to start the upward search.\n * @returns The absolute path to the nearest `.npmrc`, or `undefined` if none is found.\n */\nexport function findNearestNpmrc(startDir: string): string | undefined {\n  let current = resolve(startDir);\n\n  // Walk up the directory tree until we find an .npmrc or reach the root\n\n  while (true) {\n    const candidate = join(current, '.npmrc');\n    if (existsSync(candidate)) {\n      return candidate;\n    }\n    const parent = dirname(current);\n    if (parent === current) {\n      // Reached filesystem root without finding .npmrc\n      return undefined;\n    }\n    current = parent;\n  }\n}\n\n/**\n * Copy the project's `.npmrc` file (if any) into the cache directory so that\n * `npm install` inside the cache respects custom registry configuration such\n * as scoped registries (`@sap:registry=...`), authentication tokens, and\n * `strict-ssl` settings.\n *\n * @param cacheDir  The cache directory where dependencies will be installed.\n * @param projectDir Absolute path to the project directory whose `.npmrc` should be used.\n */\nexport function copyNpmrcToCache(cacheDir: string, projectDir: string): void {\n  const npmrcPath = findNearestNpmrc(projectDir);\n  if (!npmrcPath) {\n    return;\n  }\n\n  const dest = join(cacheDir, '.npmrc');\n  try {\n    copyFileSync(npmrcPath, dest);\n    cdsExtractorLog('info', `Copied .npmrc from '${npmrcPath}' to cache directory '${cacheDir}'`);\n  } catch (err) {\n    cdsExtractorLog(\n      'warn',\n      `Failed to copy .npmrc to cache directory: ${err instanceof Error ? err.message : String(err)}`,\n    );\n  }\n}\n\n/**\n * Install dependencies for CDS projects using a robust cache strategy with fallback logic\n * @param dependencyGraph The dependency graph of the project\n * @param sourceRoot Source root directory\n * @param codeqlExePath Path to the CodeQL executable (optional)\n * @returns Map of project directories to their corresponding cache directories\n */\nexport function cacheInstallDependencies(\n  dependencyGraph: CdsDependencyGraph,\n  sourceRoot: string,\n  codeqlExePath?: string,\n): Map {\n  // Sanity check that we found at least one project\n  if (dependencyGraph.projects.size === 0) {\n    cdsExtractorLog('info', 'No CDS projects found for dependency installation.');\n    cdsExtractorLog(\n      'info',\n      'This is expected if the source contains no CAP/CDS projects and should be handled by the caller.',\n    );\n    return new Map();\n  }\n\n  // Extract unique dependency combinations from all projects with version resolution\n  const dependencyCombinations = extractUniqueDependencyCombinations(dependencyGraph.projects);\n\n  if (dependencyCombinations.length === 0) {\n    cdsExtractorLog(\n      'error',\n      'No CDS dependencies found in any project. This means projects were detected but lack proper @sap/cds dependencies.',\n    );\n    cdsExtractorLog(\n      'info',\n      'Will attempt to use system-installed CDS tools if available, but compilation may fail.',\n    );\n    return new Map();\n  }\n\n  cdsExtractorLog(\n    'info',\n    `Found ${dependencyCombinations.length} unique CDS dependency combination(s).`,\n  );\n\n  // Log each dependency combination for transparency\n  for (const combination of dependencyCombinations) {\n    const { cdsVersion, cdsDkVersion, hash, resolvedCdsVersion, resolvedCdsDkVersion, isFallback } =\n      combination;\n    const actualCdsVersion = resolvedCdsVersion ?? cdsVersion;\n    const actualCdsDkVersion = resolvedCdsDkVersion ?? cdsDkVersion;\n    const fallbackNote = isFallback ? ' (using fallback versions)' : '';\n    const indexerNote = combination.cdsIndexerVersion\n      ? `, @sap/cds-indexer@${combination.cdsIndexerVersion}`\n      : '';\n\n    cdsExtractorLog(\n      'info',\n      `Dependency combination ${hash.substring(0, 8)}: @sap/cds@${actualCdsVersion}, @sap/cds-dk@${actualCdsDkVersion}${indexerNote}${fallbackNote}`,\n    );\n  }\n\n  // Create a cache directory under the source root directory.\n  const cacheRootDir = join(sourceRoot, cacheSubDirName);\n  cdsExtractorLog(\n    'info',\n    `Using cache directory '${cacheSubDirName}' within source root directory '${cacheRootDir}'`,\n  );\n\n  if (!existsSync(cacheRootDir)) {\n    try {\n      mkdirSync(cacheRootDir, { recursive: true });\n      cdsExtractorLog('info', `Created cache directory: ${cacheRootDir}`);\n    } catch (err) {\n      cdsExtractorLog(\n        'warn',\n        `Failed to create cache directory: ${err instanceof Error ? err.message : String(err)}`,\n      );\n      cdsExtractorLog('info', 'Skipping dependency installation due to cache directory failure.');\n      return new Map();\n    }\n  } else {\n    cdsExtractorLog('info', `Cache directory already exists: ${cacheRootDir}`);\n  }\n\n  // Map to track which cache directory to use for each project\n  const projectCacheDirMap = new Map();\n  let successfulInstallations = 0;\n\n  // Install each unique dependency combination in its own cache directory\n  for (const combination of dependencyCombinations) {\n    const { cdsVersion, cdsDkVersion, hash } = combination;\n    const { resolvedCdsVersion, resolvedCdsDkVersion } = combination;\n    const cacheDirName = `cds-${hash}`;\n    const cacheDir = join(cacheRootDir, cacheDirName);\n\n    cdsExtractorLog(\n      'info',\n      `Processing dependency combination ${hash.substring(0, 8)} in cache directory: ${cacheDirName}`,\n    );\n\n    // Create the cache directory if it doesn't exist\n    if (!existsSync(cacheDir)) {\n      try {\n        mkdirSync(cacheDir, { recursive: true });\n        cdsExtractorLog('info', `Created cache subdirectory: ${cacheDirName}`);\n      } catch (err) {\n        cdsExtractorLog(\n          'error',\n          `Failed to create cache directory for combination ${hash.substring(0, 8)} (${cacheDirName}): ${\n            err instanceof Error ? err.message : String(err)\n          }`,\n        );\n        continue;\n      }\n\n      // Create a package.json for this dependency combination using resolved versions\n      const actualCdsVersion = resolvedCdsVersion ?? cdsVersion;\n      const actualCdsDkVersion = resolvedCdsDkVersion ?? cdsDkVersion;\n\n      const cacheDeps: Record = {\n        '@sap/cds': actualCdsVersion,\n        '@sap/cds-dk': actualCdsDkVersion,\n      };\n\n      // Include @sap/cds-indexer in the cache when a project depends on it.\n      // This is a best-effort optimization: on systems with access to the\n      // private npm registry that hosts @sap/cds-indexer, it will be cached\n      // alongside @sap/cds and @sap/cds-dk. On systems without access, the\n      // npm install will still succeed for the other packages (it won't fail\n      // the overall installation \u2014 npm install is run with --no-optional\n      // semantics handled below).\n      if (combination.cdsIndexerVersion) {\n        cacheDeps['@sap/cds-indexer'] = combination.cdsIndexerVersion;\n        cdsExtractorLog(\n          'info',\n          `Including @sap/cds-indexer@${combination.cdsIndexerVersion} in cache for combination ${hash.substring(0, 8)}`,\n        );\n      }\n\n      const packageJson = {\n        name: `cds-extractor-cache-${hash}`,\n        version: '1.0.0',\n        private: true,\n        dependencies: cacheDeps,\n      };\n\n      try {\n        writeFileSync(join(cacheDir, 'package.json'), JSON.stringify(packageJson, null, 2));\n        cdsExtractorLog('info', `Created package.json in cache subdirectory: ${cacheDirName}`);\n      } catch (err) {\n        cdsExtractorLog(\n          'error',\n          `Failed to create package.json in cache directory ${cacheDirName}: ${\n            err instanceof Error ? err.message : String(err)\n          }`,\n        );\n        continue;\n      }\n    }\n\n    // Ensure the cache directory has an .npmrc that reflects the projects' registry configuration\n    const npmrcProjectDir = Array.from(dependencyGraph.projects.values())\n      .map(project => project.projectDir)\n      .find(projectDir => projectDir && existsSync(join(sourceRoot, projectDir, '.npmrc')));\n    if (npmrcProjectDir) {\n      copyNpmrcToCache(cacheDir, join(sourceRoot, npmrcProjectDir));\n    }\n\n    // Try to install dependencies in the cache directory\n    // Get the first project package.json path for diagnostic purposes\n    const samplePackageJsonPath = Array.from(dependencyGraph.projects.values()).find(\n      project => project.packageJson,\n    )?.projectDir;\n    const packageJsonPath = samplePackageJsonPath\n      ? join(sourceRoot, samplePackageJsonPath, 'package.json')\n      : undefined;\n\n    const installSuccess = installDependenciesInCache(\n      cacheDir,\n      combination,\n      cacheDirName,\n      packageJsonPath,\n      codeqlExePath,\n    );\n\n    if (!installSuccess) {\n      cdsExtractorLog(\n        'warn',\n        `Skipping failed dependency combination ${hash.substring(0, 8)} (cache directory: ${cacheDirName})`,\n      );\n      continue;\n    }\n\n    successfulInstallations++;\n\n    // Associate projects with this dependency combination\n    for (const [projectDir, project] of Array.from(dependencyGraph.projects.entries())) {\n      if (!project.packageJson) {\n        continue;\n      }\n      const p_cdsVersion = project.packageJson.dependencies?.['@sap/cds'] ?? 'latest';\n      const p_cdsDkVersion = project.packageJson.devDependencies?.['@sap/cds-dk'] ?? p_cdsVersion;\n      const p_cdsIndexerVersion =\n        project.packageJson.dependencies?.['@sap/cds-indexer'] ??\n        project.packageJson.devDependencies?.['@sap/cds-indexer'] ??\n        undefined;\n\n      // Resolve the project's versions to match against the combination's resolved versions\n      const projectResolvedVersions = resolveCdsVersions(p_cdsVersion, p_cdsDkVersion);\n      const projectActualCdsVersion = projectResolvedVersions.resolvedCdsVersion ?? p_cdsVersion;\n      const projectActualCdsDkVersion =\n        projectResolvedVersions.resolvedCdsDkVersion ?? p_cdsDkVersion;\n\n      // Match based on resolved versions since that's what the hash is based on\n      const combinationActualCdsVersion = combination.resolvedCdsVersion ?? combination.cdsVersion;\n      const combinationActualCdsDkVersion =\n        combination.resolvedCdsDkVersion ?? combination.cdsDkVersion;\n\n      if (\n        projectActualCdsVersion === combinationActualCdsVersion &&\n        projectActualCdsDkVersion === combinationActualCdsDkVersion &&\n        p_cdsIndexerVersion === combination.cdsIndexerVersion\n      ) {\n        projectCacheDirMap.set(projectDir, cacheDir);\n      }\n    }\n  }\n\n  // Log final status\n  if (successfulInstallations === 0) {\n    cdsExtractorLog('error', 'Failed to install any dependency combinations.');\n    if (dependencyCombinations.length > 0) {\n      cdsExtractorLog(\n        'error',\n        `All ${dependencyCombinations.length} dependency combination(s) failed to install. This will likely cause compilation failures.`,\n      );\n    }\n  } else if (successfulInstallations < dependencyCombinations.length) {\n    cdsExtractorLog(\n      'warn',\n      `Successfully installed ${successfulInstallations} out of ${dependencyCombinations.length} dependency combinations.`,\n    );\n  } else {\n    cdsExtractorLog('info', 'All dependency combinations installed successfully.');\n  }\n\n  // Log project-to-cache-directory mappings for transparency.\n  if (projectCacheDirMap.size > 0) {\n    cdsExtractorLog('info', `Project to cache directory mappings:`);\n    for (const [projectDir, cacheDir] of Array.from(projectCacheDirMap.entries())) {\n      const cacheDirName = join(cacheDir).split('/').pop() ?? 'unknown';\n      cdsExtractorLog('info', `  ${projectDir} \u2192 ${cacheDirName}`);\n    }\n  } else {\n    cdsExtractorLog(\n      'warn',\n      'No project to cache directory mappings created. Projects may not have compatible dependencies installed.',\n    );\n  }\n\n  return projectCacheDirMap;\n}\n\n/**\n * Extracts unique dependency combinations from the dependency graph.\n * @param projects A map of projects from the dependency graph.\n * @returns An array of unique dependency combinations.\n */\nfunction extractUniqueDependencyCombinations(\n  projects: Map,\n): CdsDependencyCombination[] {\n  const combinations = new Map();\n\n  for (const project of Array.from(projects.values())) {\n    if (!project.packageJson) {\n      continue;\n    }\n\n    const cdsVersion = project.packageJson.dependencies?.['@sap/cds'] ?? 'latest';\n    const cdsDkVersion = project.packageJson.devDependencies?.['@sap/cds-dk'] ?? cdsVersion;\n\n    // Detect optional @sap/cds-indexer dependency\n    const cdsIndexerVersion =\n      project.packageJson.dependencies?.['@sap/cds-indexer'] ??\n      project.packageJson.devDependencies?.['@sap/cds-indexer'] ??\n      undefined;\n\n    // Resolve versions first to ensure we cache based on actual resolved versions\n    cdsExtractorLog(\n      'info',\n      `Resolving available dependency versions for project '${project.projectDir}' with dependencies: [@sap/cds@${cdsVersion}, @sap/cds-dk@${cdsDkVersion}]`,\n    );\n    const resolvedVersions = resolveCdsVersions(cdsVersion, cdsDkVersion);\n    const { resolvedCdsVersion, resolvedCdsDkVersion, ...rest } = resolvedVersions;\n\n    // Log the resolved CDS dependency versions for the project\n    if (resolvedCdsVersion && resolvedCdsDkVersion) {\n      let statusMsg: string;\n      if (resolvedVersions.cdsExactMatch && resolvedVersions.cdsDkExactMatch) {\n        statusMsg = ' (exact match)';\n      } else if (!resolvedVersions.isFallback) {\n        statusMsg = ' (compatible versions)';\n      } else {\n        statusMsg = ' (using fallback versions)';\n      }\n      cdsExtractorLog(\n        'info',\n        `Resolved to: @sap/cds@${resolvedCdsVersion}, @sap/cds-dk@${resolvedCdsDkVersion}${statusMsg}`,\n      );\n    } else {\n      cdsExtractorLog(\n        'error',\n        `Failed to resolve CDS dependencies: @sap/cds@${cdsVersion}, @sap/cds-dk@${cdsDkVersion}`,\n      );\n    }\n\n    // Calculate hash based on resolved versions to ensure proper cache reuse\n    const actualCdsVersion = resolvedCdsVersion ?? cdsVersion;\n    const actualCdsDkVersion = resolvedCdsDkVersion ?? cdsDkVersion;\n    const hashInput = cdsIndexerVersion\n      ? `${actualCdsVersion}|${actualCdsDkVersion}|${cdsIndexerVersion}`\n      : `${actualCdsVersion}|${actualCdsDkVersion}`;\n    const hash = createHash('sha256').update(hashInput).digest('hex');\n\n    if (!combinations.has(hash)) {\n      combinations.set(hash, {\n        cdsVersion,\n        cdsDkVersion,\n        cdsIndexerVersion,\n        hash,\n        resolvedCdsVersion: resolvedCdsVersion ?? undefined,\n        resolvedCdsDkVersion: resolvedCdsDkVersion ?? undefined,\n        ...rest,\n      });\n    }\n  }\n\n  return Array.from(combinations.values());\n}\n\n/**\n * Attempt to install dependencies in a cache directory with fallback logic\n * @param cacheDir Cache directory path\n * @param combination Dependency combination to install\n * @param cacheDirName Name of the cache directory for logging\n * @param packageJsonPath Optional package.json path for diagnostics\n * @param codeqlExePath Optional CodeQL executable path for diagnostics\n * @returns True if installation succeeded, false otherwise\n */\nfunction installDependenciesInCache(\n  cacheDir: string,\n  combination: CdsDependencyCombination,\n  cacheDirName: string,\n  packageJsonPath?: string,\n  codeqlExePath?: string,\n): boolean {\n  const { resolvedCdsVersion, resolvedCdsDkVersion, isFallback, warning } = combination;\n\n  // Check if node_modules directory already exists in the cache dir\n  const nodeModulesExists =\n    existsSync(join(cacheDir, 'node_modules', '@sap', 'cds')) &&\n    existsSync(join(cacheDir, 'node_modules', '@sap', 'cds-dk'));\n\n  if (nodeModulesExists) {\n    cdsExtractorLog(\n      'info',\n      `Using cached dependencies for @sap/cds@${resolvedCdsVersion} and @sap/cds-dk@${resolvedCdsDkVersion} from ${cacheDirName}`,\n    );\n\n    // Add warning diagnostic if using fallback versions\n    if (isFallback && warning && packageJsonPath && codeqlExePath) {\n      addDependencyVersionWarning(packageJsonPath, warning, codeqlExePath);\n    }\n\n    return true;\n  }\n\n  if (!resolvedCdsVersion || !resolvedCdsDkVersion) {\n    cdsExtractorLog('error', 'Cannot install dependencies: no compatible versions found');\n    return false;\n  }\n\n  // Install dependencies in the cache directory\n  cdsExtractorLog(\n    'info',\n    `Installing @sap/cds@${resolvedCdsVersion} and @sap/cds-dk@${resolvedCdsDkVersion} in cache directory: ${cacheDirName}`,\n  );\n\n  if (isFallback && warning) {\n    cdsExtractorLog('warn', warning);\n  }\n\n  try {\n    execFileSync('npm', ['install', '--quiet', '--no-audit', '--no-fund'], {\n      cwd: cacheDir,\n      stdio: 'inherit',\n    });\n\n    // Add warning diagnostic if using fallback versions\n    if (isFallback && warning && packageJsonPath && codeqlExePath) {\n      addDependencyVersionWarning(packageJsonPath, warning, codeqlExePath);\n    }\n\n    return true;\n  } catch (err) {\n    const errorMessage = `Failed to install resolved dependencies in cache directory ${cacheDir}: ${err instanceof Error ? err.message : String(err)}`;\n    cdsExtractorLog('error', errorMessage);\n    return false;\n  }\n}\n", "import { execSync } from 'child_process';\n\nimport type { SemanticVersion } from './types';\nimport { cdsExtractorLog } from '../logging';\n\n/**\n * Cache for storing available versions for npm packages to avoid duplicate\n * `npm view` calls.\n */\nconst availableVersionsCache = new Map();\n\n// Define the set of allowed npm packages for which we cache versions.\ntype CachedPackageName = '@sap/cds' | '@sap/cds-dk';\n\n/**\n * Cache statistics for debugging purposes\n */\nconst cacheStats = {\n  hits: 0,\n  misses: 0,\n  get hitRate() {\n    const total = this.hits + this.misses;\n    return total > 0 ? ((this.hits / total) * 100).toFixed(1) : '0.0';\n  },\n};\n\n/**\n * Check if @sap/cds and @sap/cds-dk versions are likely compatible.\n * @param cdsVersion The @sap/cds version\n * @param cdsDkVersion The @sap/cds-dk version\n * @returns Object with compatibility information and warnings\n */\nexport function checkVersionCompatibility(\n  cdsVersion: string,\n  cdsDkVersion: string,\n): {\n  isCompatible: boolean;\n  warning?: string;\n} {\n  // If either version is 'latest', assume they are compatible\n  if (cdsVersion === 'latest' || cdsDkVersion === 'latest') {\n    return { isCompatible: true };\n  }\n\n  const parsedCds = parseSemanticVersion(cdsVersion);\n  const parsedCdsDk = parseSemanticVersion(cdsDkVersion);\n\n  if (!parsedCds || !parsedCdsDk) {\n    return {\n      isCompatible: false,\n      warning: 'Unable to parse version numbers for compatibility check',\n    };\n  }\n\n  // Generally, @sap/cds and @sap/cds-dk should have the same major version\n  // and ideally the same minor version for best compatibility\n  const majorVersionsMatch = parsedCds.major === parsedCdsDk.major;\n  const minorVersionsMatch = parsedCds.minor === parsedCdsDk.minor;\n\n  if (!majorVersionsMatch) {\n    return {\n      isCompatible: false,\n      warning: `Major version mismatch: @sap/cds ${cdsVersion} and @sap/cds-dk ${cdsDkVersion} may not be compatible`,\n    };\n  }\n\n  if (!minorVersionsMatch) {\n    return {\n      isCompatible: true,\n      warning: `Minor version difference: @sap/cds ${cdsVersion} and @sap/cds-dk ${cdsDkVersion} - consider aligning versions for best compatibility`,\n    };\n  }\n\n  return { isCompatible: true };\n}\n\n/**\n * Compare two semantic versions\n * @param a First version\n * @param b Second version\n * @returns Negative if a < b, 0 if equal, positive if a > b\n */\nexport function compareVersions(a: SemanticVersion, b: SemanticVersion): number {\n  if (a.major !== b.major) return a.major - b.major;\n  if (a.minor !== b.minor) return a.minor - b.minor;\n  if (a.patch !== b.patch) return a.patch - b.patch;\n\n  // Handle prerelease versions (prerelease < release)\n  if (a.prerelease && !b.prerelease) return -1;\n  if (!a.prerelease && b.prerelease) return 1;\n  if (a.prerelease && b.prerelease) {\n    return a.prerelease.localeCompare(b.prerelease);\n  }\n\n  return 0;\n}\n\n/**\n * Find the best available version from a list of versions for a given requirement\n * @param availableVersions List of available version strings\n * @param requiredVersion Required version string\n * @returns Best matching version or null if no compatible version found\n */\nexport function findBestAvailableVersion(\n  availableVersions: string[],\n  requiredVersion: string,\n): string | null {\n  const parsedVersions = availableVersions\n    .map(v => parseSemanticVersion(v))\n    .filter((v): v is SemanticVersion => v !== null);\n\n  if (parsedVersions.length === 0) {\n    return null;\n  }\n\n  // First, try to find versions that satisfy the range\n  const satisfyingVersions = parsedVersions.filter(v => satisfiesRange(v, requiredVersion));\n\n  if (satisfyingVersions.length > 0) {\n    // Sort in descending order (newest first) and return the best match\n    satisfyingVersions.sort((a, b) => compareVersions(b, a));\n    return satisfyingVersions[0].original;\n  }\n\n  // If no exact match, prefer newer versions over older ones\n  // Sort all versions in descending order and return the newest\n  parsedVersions.sort((a, b) => compareVersions(b, a));\n  return parsedVersions[0].original;\n}\n\n/**\n * Get available versions for an npm package with caching to avoid duplicate\n * calls of the `npm view` command.\n * @param packageName The {@link CachedPackageName} for which to get versions\n * @returns Array of available version strings\n */\nexport function getAvailableVersions(packageName: CachedPackageName): string[] {\n  // Check cache first\n  if (availableVersionsCache.has(packageName)) {\n    cacheStats.hits++;\n    return availableVersionsCache.get(packageName)!;\n  }\n\n  // Cache miss - fetch from npm\n  cacheStats.misses++;\n  try {\n    const output = execSync(`npm view ${packageName} versions --json`, {\n      encoding: 'utf8',\n      timeout: 30000, // 30 second timeout\n    });\n\n    const versions: unknown = JSON.parse(output);\n    let versionArray: string[] = [];\n\n    if (Array.isArray(versions)) {\n      versionArray = versions.filter((v): v is string => typeof v === 'string');\n    } else if (typeof versions === 'string') {\n      versionArray = [versions];\n    }\n\n    // Cache the result\n    availableVersionsCache.set(packageName, versionArray);\n\n    return versionArray;\n  } catch (error) {\n    cdsExtractorLog('warn', `Failed to fetch versions for ${packageName}: ${String(error)}`);\n    // Cache empty array to avoid repeated failures\n    availableVersionsCache.set(packageName, []);\n    return [];\n  }\n}\n\n/**\n * Get cache statistics for debugging purposes\n * @returns Object with cache hit/miss statistics\n */\nexport function getCacheStatistics(): {\n  hits: number;\n  misses: number;\n  hitRate: string;\n  cachedPackages: string[];\n} {\n  return {\n    hits: cacheStats.hits,\n    misses: cacheStats.misses,\n    hitRate: cacheStats.hitRate,\n    cachedPackages: Array.from(availableVersionsCache.keys()),\n  };\n}\n\n/**\n * Parse a semantic version string\n * @param version Version string to parse (e.g., \"6.1.3\", \"^6.0.0\", \"~6.1.0\", \"latest\")\n * @returns Parsed semantic version or null if invalid\n */\nexport function parseSemanticVersion(version: string): SemanticVersion | null {\n  if (version === 'latest') {\n    // Return a very high version number for 'latest' to ensure it's preferred\n    return {\n      major: 999,\n      minor: 999,\n      patch: 999,\n      original: version,\n    };\n  }\n\n  // Remove common version prefixes\n  const cleanVersion = version.replace(/^[\\^~>=<]+/, '');\n\n  // Basic semver regex\n  const semverRegex = /^(\\d+)\\.(\\d+)\\.(\\d+)(?:-([a-zA-Z0-9.-]+))?(?:\\+([a-zA-Z0-9.-]+))?$/;\n  const match = cleanVersion.match(semverRegex);\n\n  if (!match) {\n    return null;\n  }\n\n  return {\n    major: parseInt(match[1], 10),\n    minor: parseInt(match[2], 10),\n    patch: parseInt(match[3], 10),\n    prerelease: match[4],\n    build: match[5],\n    original: version,\n  };\n}\n\n/**\n * Check if a resolved version satisfies the originally requested version.\n * @param resolvedVersion The version that was resolved\n * @param requestedVersion The originally requested version\n * @returns true if the resolved version satisfies the requested version range\n */\nfunction isSatisfyingVersion(resolvedVersion: string, requestedVersion: string): boolean {\n  // Exact string match or 'latest' case\n  if (resolvedVersion === requestedVersion || requestedVersion === 'latest') {\n    return true;\n  }\n\n  const parsedResolved = parseSemanticVersion(resolvedVersion);\n  if (!parsedResolved) {\n    return false;\n  }\n\n  return satisfiesRange(parsedResolved, requestedVersion);\n}\n\n/**\n * Resolve the best available version for CDS dependencies\n * @param cdsVersion Required @sap/cds version\n * @param cdsDkVersion Required @sap/cds-dk version\n * @returns Object with resolved versions and compatibility info\n */\nexport function resolveCdsVersions(\n  cdsVersion: string,\n  cdsDkVersion: string,\n): {\n  resolvedCdsVersion: string | null;\n  resolvedCdsDkVersion: string | null;\n  cdsExactMatch: boolean;\n  cdsDkExactMatch: boolean;\n  warning?: string;\n  isFallback?: boolean;\n} {\n  const cdsVersions = getAvailableVersions('@sap/cds');\n  const cdsDkVersions = getAvailableVersions('@sap/cds-dk');\n\n  const resolvedCdsVersion = findBestAvailableVersion(cdsVersions, cdsVersion);\n  const resolvedCdsDkVersion = findBestAvailableVersion(cdsDkVersions, cdsDkVersion);\n\n  // Check if resolved versions are exact matches (string equality or 'latest' case).\n  const cdsExactMatch =\n    resolvedCdsVersion === cdsVersion || (cdsVersion === 'latest' && resolvedCdsVersion !== null);\n  const cdsDkExactMatch =\n    resolvedCdsDkVersion === cdsDkVersion ||\n    (cdsDkVersion === 'latest' && resolvedCdsDkVersion !== null);\n\n  // Check if resolved versions satisfy the requested ranges (including exact matches).\n  const cdsSatisfiesRange = resolvedCdsVersion\n    ? isSatisfyingVersion(resolvedCdsVersion, cdsVersion)\n    : false;\n  const cdsDkSatisfiesRange = resolvedCdsDkVersion\n    ? isSatisfyingVersion(resolvedCdsDkVersion, cdsDkVersion)\n    : false;\n\n  // Only consider it a fallback if we couldn't find a satisfying version.\n  const isFallback = !cdsSatisfiesRange || !cdsDkSatisfiesRange;\n\n  let warning: string | undefined;\n\n  // Check compatibility between resolved versions (only if both were resolved).\n  // Show warnings when:\n  // 1. We're using fallback versions (couldn't find compatible versions), OR\n  // 2. At least one version isn't an exact match (version range was used), OR\n  // 3. Resolved versions have actual compatibility issues (e.g., major version mismatch).\n  if (resolvedCdsVersion && resolvedCdsDkVersion) {\n    const compatibility = checkVersionCompatibility(resolvedCdsVersion, resolvedCdsDkVersion);\n\n    const shouldShowWarning =\n      isFallback ||\n      !cdsExactMatch ||\n      !cdsDkExactMatch ||\n      (compatibility.warning && !compatibility.isCompatible);\n\n    if (compatibility.warning && shouldShowWarning) {\n      warning = compatibility.warning;\n    }\n  }\n\n  return {\n    resolvedCdsVersion,\n    resolvedCdsDkVersion,\n    cdsExactMatch,\n    cdsDkExactMatch,\n    warning,\n    isFallback,\n  };\n}\n\n/**\n * Check if version satisfies a version range.\n * @param version Version to check\n * @param range Version range (e.g., \"^6.0.0\", \"~6.1.0\", \">=6.0.0\")\n * @returns true if version satisfies the range\n */\nexport function satisfiesRange(version: SemanticVersion, range: string): boolean {\n  if (range === 'latest') {\n    return true;\n  }\n\n  const rangeVersion = parseSemanticVersion(range);\n  if (!rangeVersion) {\n    return false;\n  }\n\n  if (range.startsWith('^')) {\n    // Caret range: compatible within same major version\n    return version.major === rangeVersion.major && compareVersions(version, rangeVersion) >= 0;\n  } else if (range.startsWith('~')) {\n    // Tilde range: compatible within same minor version\n    return (\n      version.major === rangeVersion.major &&\n      version.minor === rangeVersion.minor &&\n      compareVersions(version, rangeVersion) >= 0\n    );\n  } else if (range.startsWith('>=')) {\n    // Greater than or equal\n    return compareVersions(version, rangeVersion) >= 0;\n  } else if (range.startsWith('>')) {\n    // Greater than\n    return compareVersions(version, rangeVersion) > 0;\n  } else if (range.startsWith('<=')) {\n    // Less than or equal\n    return compareVersions(version, rangeVersion) <= 0;\n  } else if (range.startsWith('<')) {\n    // Less than\n    return compareVersions(version, rangeVersion) < 0;\n  } else {\n    // Exact match\n    return compareVersions(version, rangeVersion) === 0;\n  }\n}\n\n/**\n * Test-only exports - DO NOT USE IN PRODUCTION CODE\n * These are exported only for testing purposes\n */\nexport const __testOnly__ = {\n  availableVersionsCache,\n  cacheStats,\n};\n", "/** Full dependency installation utilities for retry scenarios. */\n\nimport { execFileSync } from 'child_process';\nimport { join } from 'path';\n\nimport type { FullDependencyInstallationResult } from './types';\nimport type { CdsProject } from '../cds/parser';\nimport { cdsExtractorLog } from '../logging';\n\n/**\n * Determines if a {@link CdsProject} requires \"full\" dependency installation.\n *\n * @param project The {@link CdsProject} to check\n * @returns `true` if the project has at least one compilation task that is\n * currently marked as `failed` AND has not yet been retried. Otherwise, `false`.\n */\nexport function needsFullDependencyInstallation(project: CdsProject): boolean {\n  // Check if already installed\n  if (project.retryStatus?.fullDependenciesInstalled) {\n    return false;\n  }\n\n  // Check if project has failed tasks that could benefit from full dependencies.\n  //\n  // Currently, we only allow for one retry, because the only significant change we\n  // can make (to justify a retry) is to install and use the full set of declared\n  // dependencies instead of the minimal set of cached (`@sap/cds` and `@sap/cds-dk`)\n  // dependencies.\n  const hasFailedTasks = project.compilationTasks.some(\n    task => task.status === 'failed' && !task.retryInfo?.hasBeenRetried,\n  );\n\n  return hasFailedTasks && project.packageJson !== undefined;\n}\n\n/**\n * Installs full dependencies for a {@link CdsProject} in support of retry behavior\n * for compilation tasks that fail unless the `cds` CLI/compiler has access to the\n * full set of dependencies declared for the project.\n *\n * @param project The CDS project to install dependencies for\n * @param sourceRoot Source root directory\n * @returns Installation result with details\n */\nexport function projectInstallDependencies(\n  project: CdsProject,\n  sourceRoot: string,\n): FullDependencyInstallationResult {\n  const startTime = Date.now();\n  const projectPath = join(sourceRoot, project.projectDir);\n\n  const result: FullDependencyInstallationResult = {\n    success: false,\n    projectDir: projectPath,\n    warnings: [],\n    durationMs: 0,\n    timedOut: false,\n  };\n\n  try {\n    // Check if project has package.json\n    if (!project.packageJson) {\n      result.error = 'No package.json found for project';\n      return result;\n    }\n\n    // Install dependencies using npm in the project's directory\n    cdsExtractorLog(\n      'info',\n      `Installing full dependencies for project ${project.projectDir} in project's node_modules`,\n    );\n\n    try {\n      execFileSync('npm', ['install', '--quiet', '--no-audit', '--no-fund'], {\n        cwd: projectPath,\n        stdio: 'inherit',\n        timeout: 120000, // 2-minute timeout\n      });\n\n      result.success = true;\n      cdsExtractorLog(\n        'info',\n        `Successfully installed full dependencies for project ${project.projectDir}`,\n      );\n    } catch (execError) {\n      if (execError instanceof Error && 'signal' in execError && execError.signal === 'SIGTERM') {\n        result.timedOut = true;\n        result.error = 'Dependency installation timed out';\n      } else {\n        result.error = `npm install failed: ${String(execError)}`;\n      }\n\n      // Still attempt retry compilation even if dependency installation fails (optimistic approach)\n      result.warnings.push(\n        `Dependency installation failed but will still attempt retry compilation: ${result.error}`,\n      );\n      cdsExtractorLog('warn', result.warnings[0]);\n    }\n  } catch (error) {\n    result.error = `Failed to install full dependencies: ${String(error)}`;\n    cdsExtractorLog('error', result.error);\n  } finally {\n    result.durationMs = Date.now() - startTime;\n  }\n\n  return result;\n}\n", "/** Main retry orchestration logic for CDS compilation failures. */\n\nimport { compileCdsToJson } from './compile';\nimport type {\n  CompilationAttempt,\n  CompilationTask,\n  ResultRetryCompilationTask,\n  ResultRetryCompilationOrchestration,\n  ValidatedCdsCommand,\n} from './types';\nimport { identifyTasksRequiringRetry, updateCdsDependencyGraphStatus } from './validator';\nimport { addCompilationDiagnostic } from '../../diagnostics';\nimport { cdsExtractorLog } from '../../logging';\nimport { needsFullDependencyInstallation, projectInstallDependencies } from '../../packageManager';\nimport type { CdsDependencyGraph, CdsProject } from '../parser';\n\n/**\n * Add diagnostics only for tasks with `status: failed` in the {@link CdsDependencyGraph}.\n * @param dependencyGraph The dependency graph to use as the source of truth for task status\n * @param codeqlExePath Path to CodeQL executable used to add a diagnostic notification\n * @param sourceRoot Source root directory to use for making file paths relative\n */\nfunction addCompilationDiagnosticsForFailedTasks(\n  dependencyGraph: CdsDependencyGraph,\n  codeqlExePath: string,\n  sourceRoot: string,\n): void {\n  for (const project of dependencyGraph.projects.values()) {\n    for (const task of project.compilationTasks) {\n      // Add diagnostics for tasks that currently have a `status` of 'failed'.\n      if (task.status === 'failed') {\n        // Add a diagnostic if the task:\n        //  - failed initially and was never retried, or...\n        //  - failed initially and was retried without success (or without updating status).\n        const shouldAddDiagnostic = task.retryInfo?.hasBeenRetried ?? !task.retryInfo;\n\n        if (shouldAddDiagnostic) {\n          for (const sourceFile of task.sourceFiles) {\n            addCompilationDiagnostic(\n              sourceFile,\n              task.errorSummary ?? 'Compilation failed',\n              codeqlExePath,\n              sourceRoot,\n            );\n          }\n        }\n      }\n    }\n  }\n}\n\n/**\n * Main orchestration function for retrying failed tasks in the {@link CdsDependencyGraph}.\n * @param dependencyGraph The dependency graph containing compilation tasks\n * @param codeqlExePath Path to `codeql` executable to use for adding diagnostic notifications\n * @returns The {@link ResultRetryCompilationOrchestration}\n */\nexport function orchestrateRetryAttempts(\n  dependencyGraph: CdsDependencyGraph,\n  codeqlExePath: string,\n): ResultRetryCompilationOrchestration {\n  const startTime = Date.now();\n  let dependencyInstallationStartTime = 0;\n  let dependencyInstallationEndTime = 0;\n  let retryCompilationStartTime = 0;\n  let retryCompilationEndTime = 0;\n\n  const result: ResultRetryCompilationOrchestration = {\n    success: true,\n    projectsWithRetries: [],\n    totalTasksRequiringRetry: 0,\n    totalSuccessfulRetries: 0,\n    totalFailedRetries: 0,\n    projectsWithSuccessfulDependencyInstallation: [],\n    projectsWithFailedDependencyInstallation: [],\n    retryDurationMs: 0,\n    dependencyInstallationDurationMs: 0,\n    retryCompilationDurationMs: 0,\n  };\n\n  try {\n    // Phase 1: Validate current outputs and identify failed tasks.\n    cdsExtractorLog('info', 'Identifying tasks requiring retry...');\n    const tasksRequiringRetry = identifyTasksRequiringRetry(dependencyGraph);\n\n    if (tasksRequiringRetry.size === 0) {\n      cdsExtractorLog('info', 'No tasks require retry - all compilations successful');\n      return result;\n    }\n\n    // Update retry status tracking.\n    result.totalTasksRequiringRetry = Array.from(tasksRequiringRetry.values()).reduce(\n      (sum, tasks) => sum + tasks.length,\n      0,\n    );\n    dependencyGraph.retryStatus.totalTasksRequiringRetry = result.totalTasksRequiringRetry;\n\n    // Phase 2: Install full dependencies for projects with failed tasks.\n    dependencyInstallationStartTime = Date.now();\n    for (const [projectDir, failedTasks] of tasksRequiringRetry) {\n      const project = dependencyGraph.projects.get(projectDir);\n      if (!project) {\n        continue;\n      }\n\n      if (needsFullDependencyInstallation(project)) {\n        try {\n          const installResult = projectInstallDependencies(project, dependencyGraph.sourceRootDir);\n\n          // Update project retry status.\n          project.retryStatus ??= {\n            fullDependenciesInstalled: false,\n            tasksRequiringRetry: failedTasks.length,\n            tasksRetried: 0,\n            installationErrors: [],\n          };\n\n          if (installResult.success) {\n            project.retryStatus.fullDependenciesInstalled = true;\n            result.projectsWithSuccessfulDependencyInstallation.push(projectDir);\n            dependencyGraph.retryStatus.projectsWithFullDependencies.add(projectDir);\n          } else {\n            project.retryStatus.installationErrors = [\n              ...(project.retryStatus.installationErrors ?? []),\n              installResult.error ?? 'Unknown installation error',\n            ];\n            result.projectsWithFailedDependencyInstallation.push(projectDir);\n          }\n\n          if (installResult.warnings.length > 0) {\n            for (const warning of installResult.warnings) {\n              dependencyGraph.errors.warnings.push({\n                phase: 'retry_dependency_installation',\n                message: warning,\n                timestamp: new Date(),\n                context: projectDir,\n              });\n            }\n          }\n        } catch (error) {\n          const errorMessage = `Failed to install full dependencies for project ${projectDir}: ${String(error)}`;\n          cdsExtractorLog('error', errorMessage);\n\n          dependencyGraph.errors.critical.push({\n            phase: 'retry_dependency_installation',\n            message: errorMessage,\n            timestamp: new Date(),\n          });\n\n          result.projectsWithFailedDependencyInstallation.push(projectDir);\n        }\n      }\n\n      dependencyGraph.retryStatus.projectsRequiringFullDependencies.add(projectDir);\n    }\n\n    dependencyInstallationEndTime = Date.now();\n    result.dependencyInstallationDurationMs =\n      dependencyInstallationEndTime - dependencyInstallationStartTime;\n\n    // Phase 3: Execute retry compilation attempts.\n    cdsExtractorLog('info', 'Executing retry compilation attempts...');\n    retryCompilationStartTime = Date.now();\n\n    for (const [projectDir, failedTasks] of tasksRequiringRetry) {\n      const project = dependencyGraph.projects.get(projectDir);\n      if (!project) {\n        continue;\n      }\n\n      const retryExecutionResult = retryCompilationTasksForProject(\n        failedTasks,\n        project,\n        dependencyGraph,\n      );\n\n      result.projectsWithRetries.push(projectDir);\n      result.totalSuccessfulRetries += retryExecutionResult.successfulRetries;\n      result.totalFailedRetries += retryExecutionResult.failedRetries;\n\n      // Update project retry status.\n      if (project.retryStatus) {\n        project.retryStatus.tasksRetried = retryExecutionResult.retriedTasks.length;\n      }\n    }\n\n    retryCompilationEndTime = Date.now();\n    result.retryCompilationDurationMs = retryCompilationEndTime - retryCompilationStartTime;\n\n    // After retry compilation attempts complete, update status.\n    updateCdsDependencyGraphStatus(dependencyGraph, dependencyGraph.sourceRootDir);\n\n    // Phase 4: Update dependency graph with retry results.\n    updateDependencyGraphWithRetryResults(dependencyGraph, result);\n\n    // Phase 5: Add diagnostics for definitively failed tasks.\n    addCompilationDiagnosticsForFailedTasks(\n      dependencyGraph,\n      codeqlExePath,\n      dependencyGraph.sourceRootDir,\n    );\n\n    result.success = result.totalSuccessfulRetries > 0 || result.totalTasksRequiringRetry === 0;\n  } catch (error) {\n    const errorMessage = `Retry orchestration failed: ${String(error)}`;\n    cdsExtractorLog('error', errorMessage);\n\n    dependencyGraph.errors.critical.push({\n      phase: 'retry_orchestration',\n      message: errorMessage,\n      timestamp: new Date(),\n    });\n\n    result.success = false;\n  } finally {\n    result.retryDurationMs = Date.now() - startTime;\n  }\n\n  return result;\n}\n\n/**\n * Retry the provided {@link CompilationTask} using the task's configured retry command.\n * @param task The {@link CompilationTask} to be retried\n * @param retryCommand Validated CDS command to use for retry\n * @param projectDir Project directory to use as working directory\n * @param dependencyGraph The {@link CdsDependencyGraph} to be processed and updated\n * if retry succeeds.\n * @returns The result of the {@link CompilationAttempt}.\n */\nfunction retryCompilationTask(\n  task: CompilationTask,\n  retryCommand: ValidatedCdsCommand,\n  projectDir: string,\n  dependencyGraph: CdsDependencyGraph,\n): CompilationAttempt {\n  const startTime = new Date();\n  const attemptId = `${task.id}_retry_${startTime.getTime()}`;\n\n  // Use the original command string for consistency with existing compilation logic.\n  const cdsCommandString = retryCommand.originalCommand;\n\n  const attempt: CompilationAttempt = {\n    id: attemptId,\n    cdsCommand: cdsCommandString,\n    cacheDir: projectDir,\n    timestamp: startTime,\n    result: {\n      success: false,\n      timestamp: startTime,\n    },\n  };\n\n  try {\n    // Use the same compilation logic as the original attempt.\n    const primarySourceFile = task.sourceFiles[0];\n\n    const compilationResult = compileCdsToJson(\n      primarySourceFile,\n      dependencyGraph.sourceRootDir,\n      cdsCommandString,\n      projectDir,\n      // Convert CDS projects to BasicCdsProject format expected by compileCdsToJson\n      new Map(\n        Array.from(dependencyGraph.projects.entries()).map(([key, value]) => [\n          key,\n          {\n            cdsFiles: value.cdsFiles,\n            compilationTargets: value.compilationTargets,\n            expectedOutputFile: value.expectedOutputFile,\n            projectDir: value.projectDir,\n            dependencies: value.dependencies,\n            imports: value.imports,\n            packageJson: value.packageJson,\n          },\n        ]),\n      ),\n      task.projectDir,\n    );\n\n    attempt.result = {\n      ...compilationResult,\n      timestamp: startTime,\n    };\n  } catch (error) {\n    attempt.error = {\n      message: String(error),\n      stack: error instanceof Error ? error.stack : undefined,\n    };\n  }\n\n  return attempt;\n}\n\n/**\n * Executes retries for the provided array of {@link CompilationTask} instances.\n * @param tasksToRetry Tasks that need to be retried\n * @param project The {@link CdsProject} associated with the compilation tasks to retry\n * @param dependencyGraph The {@link CdsDependencyGraph} to update as tasks are retried\n * @returns The {@link ResultRetryCompilationTask}\n */\nfunction retryCompilationTasksForProject(\n  tasksToRetry: CompilationTask[],\n  project: CdsProject,\n  dependencyGraph: CdsDependencyGraph,\n): ResultRetryCompilationTask {\n  const startTime = Date.now();\n\n  const result: ResultRetryCompilationTask = {\n    projectDir: project.projectDir,\n    retriedTasks: [],\n    successfulRetries: 0,\n    failedRetries: 0,\n    fullDependenciesAvailable: Boolean(project.retryStatus?.fullDependenciesInstalled),\n    executionDurationMs: 0,\n    retryErrors: [],\n  };\n\n  cdsExtractorLog(\n    'info',\n    `Retrying ${tasksToRetry.length} task(s) for project ${project.projectDir} using ${result.fullDependenciesAvailable ? 'full' : 'minimal'} dependencies`,\n  );\n\n  for (const task of tasksToRetry) {\n    try {\n      // Mark task as being retried\n      task.retryInfo = {\n        hasBeenRetried: true,\n        retryReason: 'Output validation failed',\n        fullDependenciesInstalled: result.fullDependenciesAvailable,\n        retryTimestamp: new Date(),\n      };\n\n      // Use the retry command configured for this task\n      const retryAttempt = retryCompilationTask(\n        task,\n        task.retryCommand,\n        project.projectDir,\n        dependencyGraph,\n      );\n\n      task.retryInfo.retryAttempt = retryAttempt;\n      task.attempts.push(retryAttempt);\n      result.retriedTasks.push(task);\n\n      if (retryAttempt.result.success) {\n        task.status = 'success';\n        result.successfulRetries++;\n        cdsExtractorLog('info', `Retry successful for task ${task.id}`);\n      } else {\n        task.status = 'failed';\n        task.errorSummary = retryAttempt.error?.message ?? 'Retry compilation failed';\n        result.failedRetries++;\n        result.retryErrors.push(task.errorSummary);\n        cdsExtractorLog('warn', `Retry failed for task ${task.id}: ${task.errorSummary}`);\n      }\n    } catch (error) {\n      const errorMessage = `Failed to retry task ${task.id}: ${String(error)}`;\n      result.retryErrors.push(errorMessage);\n      result.failedRetries++;\n      task.status = 'failed';\n      task.errorSummary = errorMessage;\n      cdsExtractorLog('error', errorMessage);\n    }\n  }\n\n  result.executionDurationMs = Date.now() - startTime;\n\n  cdsExtractorLog(\n    'info',\n    `Retry execution completed for project ${project.projectDir}: ${result.successfulRetries} successful, ${result.failedRetries} failed`,\n  );\n\n  return result;\n}\n\n/**\n * Updates dependency graph with retry results\n * @param dependencyGraph The dependency graph to update\n * @param retryResults The retry orchestration results\n */\nfunction updateDependencyGraphWithRetryResults(\n  dependencyGraph: CdsDependencyGraph,\n  retryResults: ResultRetryCompilationOrchestration,\n): void {\n  // Remove manual counter updates - let updateCdsDependencyGraphStatus handle this\n  // Keep only non-status updates like timing and project tracking\n  dependencyGraph.retryStatus.totalRetryAttempts =\n    retryResults.totalSuccessfulRetries + retryResults.totalFailedRetries;\n}\n", "import { determineCdsCommand, determineVersionAwareCdsCommands } from './command';\nimport { compileCdsToJson } from './compile';\nimport { orchestrateRetryAttempts } from './retry';\nimport {\n  CompilationAttempt,\n  CompilationTask,\n  CompilationConfig,\n  ValidatedCdsCommand,\n} from './types';\nimport { updateCdsDependencyGraphStatus } from './validator';\nimport { cdsExtractorLog, generateStatusReport } from '../../logging';\nimport { CdsDependencyGraph, CdsProject } from '../parser/types';\n\n/** Attempt compilation with a specific command and configuration. */\nfunction attemptCompilation(\n  task: CompilationTask,\n  cdsCommand: string,\n  cacheDir: string | undefined,\n  dependencyGraph: CdsDependencyGraph,\n): CompilationAttempt {\n  const startTime = new Date();\n  const attemptId = `${task.id}_${startTime.getTime()}`;\n\n  const attempt: CompilationAttempt = {\n    id: attemptId,\n    cdsCommand,\n    cacheDir,\n    timestamp: startTime,\n    result: {\n      success: false,\n      timestamp: startTime,\n    },\n  };\n\n  try {\n    // For now, we'll use the first source file for compilation\n    // In a more sophisticated implementation, we might handle project-level compilation differently\n    const primarySourceFile = task.sourceFiles[0];\n\n    const compilationResult = compileCdsToJson(\n      primarySourceFile,\n      dependencyGraph.sourceRootDir,\n      cdsCommand,\n      cacheDir,\n      // Convert CDS projects to BasicCdsProject format expected by compileCdsToJson\n      new Map(\n        Array.from(dependencyGraph.projects.entries()).map(([key, value]) => [\n          key,\n          {\n            cdsFiles: value.cdsFiles,\n            compilationTargets: value.compilationTargets,\n            expectedOutputFile: value.expectedOutputFile,\n            projectDir: value.projectDir,\n            dependencies: value.dependencies,\n            imports: value.imports,\n            packageJson: value.packageJson,\n            compilationConfig: value.compilationConfig,\n          },\n        ]),\n      ),\n      task.projectDir,\n    );\n\n    const endTime = new Date();\n    attempt.result = {\n      ...compilationResult,\n      timestamp: endTime,\n      durationMs: endTime.getTime() - startTime.getTime(),\n      commandUsed: cdsCommand,\n      cacheDir,\n    };\n\n    if (compilationResult.success && compilationResult.outputPath) {\n      dependencyGraph.statusSummary.jsonFilesGenerated++;\n    }\n  } catch (error) {\n    const endTime = new Date();\n    attempt.error = {\n      message: String(error),\n      stack: error instanceof Error ? error.stack : undefined,\n    };\n    attempt.result.timestamp = endTime;\n    attempt.result.durationMs = endTime.getTime() - startTime.getTime();\n  }\n\n  task.attempts.push(attempt);\n  return attempt;\n}\n\n/**\n * Create a compilation task for a project or individual file\n */\nfunction createCompilationTask(\n  type: 'file' | 'project',\n  sourceFiles: string[],\n  expectedOutputFile: string,\n  projectDir: string,\n): CompilationTask {\n  // Create default commands for tasks - these should be updated later with proper commands\n  const defaultPrimaryCommand: ValidatedCdsCommand = {\n    executable: 'cds',\n    args: [],\n    originalCommand: 'cds',\n  };\n\n  const defaultRetryCommand: ValidatedCdsCommand = {\n    executable: 'npx',\n    args: ['cds'],\n    originalCommand: 'npx cds',\n  };\n\n  return {\n    id: `${type}_${projectDir}_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`,\n    type,\n    status: 'pending',\n    sourceFiles,\n    expectedOutputFile,\n    projectDir,\n    attempts: [],\n    dependencies: [],\n    primaryCommand: defaultPrimaryCommand,\n    retryCommand: defaultRetryCommand,\n  };\n}\n\nfunction createCompilationConfig(\n  cdsCommand: string,\n  cacheDir: string | undefined,\n): CompilationConfig {\n  return {\n    cdsCommand: cdsCommand,\n    cacheDir: cacheDir,\n    versionCompatibility: {\n      isCompatible: true, // Will be validated during planning\n    },\n    maxRetryAttempts: 3,\n  };\n}\n\n/**\n * Execute a single compilation task\n */\nfunction executeCompilationTask(\n  task: CompilationTask,\n  project: CdsProject,\n  dependencyGraph: CdsDependencyGraph,\n  _codeqlExePath: string,\n): void {\n  task.status = 'in_progress';\n\n  const config = project.enhancedCompilationConfig;\n  if (!config) {\n    throw new Error(`No compilation configuration found for project ${project.projectDir}`);\n  }\n\n  const compilationAttempt = attemptCompilation(\n    task,\n    config.cdsCommand,\n    config.cacheDir,\n    dependencyGraph,\n  );\n\n  if (compilationAttempt.result.success) {\n    task.status = 'success';\n    return;\n  }\n\n  // Compilation failed - mark task as failed\n  const lastError = compilationAttempt.error\n    ? new Error(compilationAttempt.error.message)\n    : new Error('Compilation failed');\n\n  task.status = 'failed';\n  task.errorSummary = lastError?.message || 'Compilation failed';\n\n  // Note: Diagnostics are deferred until after retry phase completes\n  // to implement \"Silent Success\" - only add diagnostics for definitively failed tasks\n\n  cdsExtractorLog('error', `Compilation failed for task ${task.id}: ${task.errorSummary}`);\n}\n\n/**\n * Executes all compilation tasks for the provided {@link CdsDependencyGraph}.\n * Uses the provided `codeqlExePath` to run the CodeQL CLI, as needed, for\n * generating diagnositic warnings and/or errors for problems encountered while\n * running the CodeQL CDS extractor.\n */\nfunction executeCompilationTasks(dependencyGraph: CdsDependencyGraph, codeqlExePath: string): void {\n  cdsExtractorLog('info', 'Starting compilation execution for all projects...');\n\n  dependencyGraph.currentPhase = 'compiling';\n  const compilationStartTime = new Date();\n\n  // Collect all compilation tasks from all projects.\n  const allTasks: Array<{ task: CompilationTask; project: CdsProject }> = [];\n\n  for (const project of dependencyGraph.projects.values()) {\n    for (const task of project.compilationTasks) {\n      allTasks.push({ task, project });\n    }\n  }\n\n  // Execute compilation tasks sequentially. There is room for optimization in the future.\n  // For now, we keep it simple to ensure consistent debug information collection.\n  cdsExtractorLog('info', `Executing ${allTasks.length} compilation task(s)...`);\n  for (const { task, project } of allTasks) {\n    try {\n      executeCompilationTask(task, project, dependencyGraph, codeqlExePath);\n    } catch (error) {\n      const errorMessage = `Failed to execute compilation task ${task.id}: ${String(error)}`;\n      cdsExtractorLog('error', errorMessage);\n\n      dependencyGraph.errors.critical.push({\n        phase: 'compiling',\n        message: errorMessage,\n        timestamp: new Date(),\n        stack: error instanceof Error ? error.stack : undefined,\n      });\n\n      task.status = 'failed';\n      task.errorSummary = errorMessage;\n      dependencyGraph.statusSummary.failedCompilations++;\n    }\n  }\n\n  // Update project statuses\n  for (const project of dependencyGraph.projects.values()) {\n    const allTasksCompleted = project.compilationTasks.every(\n      task => task.status === 'success' || task.status === 'failed',\n    );\n\n    if (allTasksCompleted) {\n      const hasFailedTasks = project.compilationTasks.some(task => task.status === 'failed');\n      project.status = hasFailedTasks ? 'failed' : 'completed';\n      project.timestamps.compilationCompleted = new Date();\n    }\n  }\n\n  const compilationEndTime = new Date();\n  dependencyGraph.statusSummary.performance.compilationDurationMs =\n    compilationEndTime.getTime() - compilationStartTime.getTime();\n\n  cdsExtractorLog(\n    'info',\n    `Compilation execution completed. Success: ${dependencyGraph.statusSummary.successfulCompilations}, Failed: ${dependencyGraph.statusSummary.failedCompilations}`,\n  );\n}\n\n/**\n * Orchestrates the compilation process for CDS files based on a dependency graph.\n *\n * This function coordinates the planning and execution of compilation tasks,\n * tracks the compilation status, and generates a post-compilation report.\n *\n * @param dependencyGraph - The {@link CdsDependencyGraph} representing the CDS projects,\n * project dependencies, expected compilation tasks, and their statuses.\n * @param projectCacheDirMap - A map from project identifiers to their cache directory paths.\n * @param codeqlExePath - The path to the CodeQL executable. Used for generating diagnostic\n * messages as part of the broader CodeQL (JavaScript) extraction process.\n * @throws Will rethrow any errors encountered during compilation, after logging them.\n */\nexport function orchestrateCompilation(\n  dependencyGraph: CdsDependencyGraph,\n  projectCacheDirMap: Map,\n  codeqlExePath: string,\n): void {\n  try {\n    // Phase 1: Initial compilation\n    planCompilationTasks(dependencyGraph, projectCacheDirMap);\n    executeCompilationTasks(dependencyGraph, codeqlExePath);\n\n    // CENTRALIZED STATUS UPDATE: Establish post-initial-compilation state\n    updateCdsDependencyGraphStatus(dependencyGraph, dependencyGraph.sourceRootDir);\n\n    // Phase 2: Retry orchestration\n    cdsExtractorLog('info', 'Starting retry orchestration phase...');\n    const retryResults = orchestrateRetryAttempts(dependencyGraph, codeqlExePath);\n\n    // CENTRALIZED STATUS UPDATE: Final validation and status synchronization\n    updateCdsDependencyGraphStatus(dependencyGraph, dependencyGraph.sourceRootDir);\n\n    // Log retry results\n    if (retryResults.totalTasksRequiringRetry > 0) {\n      cdsExtractorLog(\n        'info',\n        `Retry phase completed: ${retryResults.totalTasksRequiringRetry} tasks retried, ${retryResults.totalSuccessfulRetries} successful, ${retryResults.totalFailedRetries} failed`,\n      );\n    } else {\n      cdsExtractorLog('info', 'Retry phase completed: no tasks required retry');\n    }\n\n    // Phase 3: Final status update\n    const hasFailures =\n      dependencyGraph.statusSummary.failedCompilations > 0 ||\n      dependencyGraph.errors.critical.length > 0;\n\n    dependencyGraph.statusSummary.overallSuccess = !hasFailures;\n    dependencyGraph.currentPhase = hasFailures ? 'failed' : 'completed';\n\n    // Phase 3: Status reporting (now guaranteed to be accurate)\n    const statusReport = generateStatusReport(dependencyGraph);\n    cdsExtractorLog('info', 'CDS Extractor Status Report : Post-Compilation...\\n' + statusReport);\n  } catch (error) {\n    const errorMessage = `Compilation orchestration failed: ${String(error)}`;\n    cdsExtractorLog('error', errorMessage);\n\n    dependencyGraph.errors.critical.push({\n      phase: 'compiling',\n      message: errorMessage,\n      timestamp: new Date(),\n      stack: error instanceof Error ? error.stack : undefined,\n    });\n\n    dependencyGraph.currentPhase = 'failed';\n    dependencyGraph.statusSummary.overallSuccess = false;\n\n    throw error;\n  }\n}\n\n/** Plan compilation tasks for all projects in the dependency graph. */\nfunction planCompilationTasks(\n  dependencyGraph: CdsDependencyGraph,\n  projectCacheDirMap: Map,\n): void {\n  cdsExtractorLog('info', 'Planning compilation tasks for all projects...');\n\n  dependencyGraph.currentPhase = 'compilation_planning';\n\n  for (const [projectDir, project] of dependencyGraph.projects.entries()) {\n    try {\n      const cacheDir = projectCacheDirMap.get(projectDir);\n\n      // Determine version-aware CDS commands for both primary and retry scenarios\n      const commands = determineVersionAwareCdsCommands(\n        cacheDir,\n        dependencyGraph.sourceRootDir,\n        projectDir,\n        dependencyGraph,\n      );\n\n      // Keep backward compatibility - determine command string for compilation config\n      const cdsCommand = determineCdsCommand(cacheDir, dependencyGraph.sourceRootDir);\n\n      // Create compilation configuration (always project-level now)\n      const compilationConfig = createCompilationConfig(cdsCommand, cacheDir);\n\n      project.enhancedCompilationConfig = compilationConfig;\n\n      // Create compilation task (always project-level now)\n      const task = createCompilationTask(\n        'project',\n        project.cdsFiles,\n        project.expectedOutputFile,\n        projectDir,\n      );\n\n      // Update task with version-aware commands\n      task.primaryCommand = commands.primaryCommand;\n      task.retryCommand = commands.retryCommand;\n\n      project.compilationTasks = [task];\n\n      project.status = 'compilation_planned';\n      project.timestamps.compilationStarted = new Date();\n\n      cdsExtractorLog(\n        'info',\n        `Planned ${project.compilationTasks.length} compilation task(s) for project ${projectDir}`,\n      );\n    } catch (error) {\n      const errorMessage = `Failed to plan compilation for project ${projectDir}: ${String(error)}`;\n      cdsExtractorLog('error', errorMessage);\n\n      dependencyGraph.errors.critical.push({\n        phase: 'compilation_planning',\n        message: errorMessage,\n        timestamp: new Date(),\n        stack: error instanceof Error ? error.stack : undefined,\n      });\n\n      project.status = 'failed';\n    }\n  }\n\n  const totalTasks = Array.from(dependencyGraph.projects.values()).reduce(\n    (sum, project) => sum + project.compilationTasks.length,\n    0,\n  );\n\n  dependencyGraph.statusSummary.totalCompilationTasks = totalTasks;\n\n  cdsExtractorLog('info', `Compilation planning completed. Total tasks: ${totalTasks}`);\n}\n", "import { relative } from 'path';\n\n/**\n * Helper functions for mapping CDS files to their projects and cache directories\n */\n\n/**\n * Find the project directory for a CDS file\n * @param cdsFilePath Path to the CDS file\n * @param sourceRoot Source root directory\n * @param projectMap Map of project directories to project objects\n * @returns The project directory the file belongs to, or undefined if not found\n */\nexport function findProjectForCdsFile(\n  cdsFilePath: string,\n  sourceRoot: string,\n  projectMap: Map,\n): string | undefined {\n  // Get the relative path to the project directory for this CDS file\n  const relativeCdsFilePath = relative(sourceRoot, cdsFilePath);\n\n  // If the file is outside the source root, path.relative() will start with '../'\n  // In this case, we should also check against the absolute path\n  const isOutsideSourceRoot = relativeCdsFilePath.startsWith('../');\n\n  // Find the project this file belongs to\n  for (const [projectDir, project] of projectMap.entries()) {\n    if (\n      project.cdsFiles.some(\n        cdsFile =>\n          cdsFile === relativeCdsFilePath ||\n          relativeCdsFilePath.startsWith(projectDir) ||\n          (isOutsideSourceRoot && cdsFile === cdsFilePath),\n      )\n    ) {\n      return projectDir;\n    }\n  }\n\n  return undefined;\n}\n", "/** CDS indexer support for SAP CAP projects that use @sap/cds-indexer. */\n\nimport { spawnSync } from 'child_process';\nimport { delimiter, join } from 'path';\n\nimport { addCdsIndexerDiagnostic } from '../diagnostics';\nimport { cdsExtractorLog } from '../logging';\nimport type { CdsDependencyGraph, CdsProject } from './parser/types';\n\n/** Maximum time (ms) allowed for a single cds-indexer invocation. */\nconst CDS_INDEXER_TIMEOUT_MS = 600_000;\n\n/** The npm package name for cds-indexer. */\nconst CDS_INDEXER_PACKAGE = '@sap/cds-indexer';\n\n/**\n * Result of running @sap/cds-indexer for a single project.\n */\nexport interface CdsIndexerResult {\n  /** Whether the cds-indexer ran successfully. */\n  success: boolean;\n  /** The project directory (relative to source root). */\n  projectDir: string;\n  /** Error message if the run failed. */\n  error?: string;\n  /** Duration of the run in milliseconds. */\n  durationMs: number;\n  /** Whether the run timed out. */\n  timedOut: boolean;\n}\n\n/**\n * Summary of running cds-indexer across all applicable projects.\n */\nexport interface CdsIndexerSummary {\n  /** Total number of projects in the dependency graph. */\n  totalProjects: number;\n  /** Number of projects that required cds-indexer. */\n  projectsRequiringIndexer: number;\n  /** Number of successful cds-indexer runs. */\n  successfulRuns: number;\n  /** Number of failed cds-indexer runs. */\n  failedRuns: number;\n  /** Per-project results (only for projects that required cds-indexer). */\n  results: CdsIndexerResult[];\n}\n\n/**\n * Determines whether a {@link CdsProject} uses `@sap/cds-indexer` by checking\n * the project's `package.json` for the package in `dependencies` or `devDependencies`.\n *\n * @param project The CDS project to check.\n * @returns `true` if the project declares `@sap/cds-indexer` as a dependency.\n */\nexport function projectUsesCdsIndexer(project: CdsProject): boolean {\n  if (!project.packageJson) {\n    return false;\n  }\n\n  const inDeps = project.packageJson.dependencies?.[CDS_INDEXER_PACKAGE] !== undefined;\n  const inDevDeps = project.packageJson.devDependencies?.[CDS_INDEXER_PACKAGE] !== undefined;\n\n  return inDeps || inDevDeps;\n}\n\n/**\n * Runs `npx @sap/cds-indexer` for a given CDS project directory.\n *\n * The function spawns `npx @sap/cds-indexer` with appropriate environment\n * variables to ensure the indexer can locate dependencies from the cache\n * directory (if provided) or the project's own `node_modules`.\n *\n * @param project The CDS project to run the indexer for.\n * @param sourceRoot The source root directory.\n * @param cacheDir Optional cache directory containing installed dependencies.\n * @returns A {@link CdsIndexerResult} with the outcome of the run.\n */\nexport function runCdsIndexer(\n  project: CdsProject,\n  sourceRoot: string,\n  cacheDir?: string,\n): CdsIndexerResult {\n  const projectAbsPath = join(sourceRoot, project.projectDir);\n  const startTime = Date.now();\n\n  const result: CdsIndexerResult = {\n    success: false,\n    projectDir: project.projectDir,\n    durationMs: 0,\n    timedOut: false,\n  };\n\n  try {\n    // Build NODE_PATH to include cache and project node_modules so npx can\n    // resolve the package even when it was installed in the cache directory.\n    const nodePaths: string[] = [];\n    if (cacheDir) {\n      nodePaths.push(join(cacheDir, 'node_modules'));\n    }\n    nodePaths.push(join(projectAbsPath, 'node_modules'));\n\n    const env: Record = {\n      ...process.env,\n      NODE_PATH: nodePaths.join(delimiter),\n    };\n\n    cdsExtractorLog(\n      'info',\n      `Running ${CDS_INDEXER_PACKAGE} for project '${project.projectDir}'...`,\n    );\n\n    const spawnResult = spawnSync('npx', ['--yes', CDS_INDEXER_PACKAGE], {\n      cwd: projectAbsPath,\n      env,\n      stdio: 'pipe',\n      timeout: CDS_INDEXER_TIMEOUT_MS,\n    });\n\n    result.durationMs = Date.now() - startTime;\n\n    // Check for timeout (signal-based termination)\n    if (spawnResult.signal === 'SIGTERM' || spawnResult.signal === 'SIGKILL') {\n      result.timedOut = true;\n      result.error = `${CDS_INDEXER_PACKAGE} timed out after ${CDS_INDEXER_TIMEOUT_MS}ms for project '${project.projectDir}'`;\n      cdsExtractorLog('warn', result.error);\n      return result;\n    }\n\n    // Check for spawn error (e.g. npx not found)\n    if (spawnResult.error) {\n      result.error = `${CDS_INDEXER_PACKAGE} failed to start for project '${project.projectDir}': ${String(spawnResult.error)}`;\n      cdsExtractorLog('warn', result.error);\n      return result;\n    }\n\n    // Check exit code\n    if (spawnResult.status !== 0) {\n      const stderr = spawnResult.stderr?.toString().trim() ?? '';\n      const stdout = spawnResult.stdout?.toString().trim() ?? '';\n      const output = stderr || stdout || 'unknown error';\n      result.error = `${CDS_INDEXER_PACKAGE} failed for project '${project.projectDir}' (exit code ${spawnResult.status}): ${output}`;\n      cdsExtractorLog('warn', result.error);\n      return result;\n    }\n\n    // Success\n    result.success = true;\n    cdsExtractorLog(\n      'info',\n      `Successfully ran ${CDS_INDEXER_PACKAGE} for project '${project.projectDir}' (${result.durationMs}ms)`,\n    );\n  } catch (error) {\n    result.durationMs = Date.now() - startTime;\n    result.error = `${CDS_INDEXER_PACKAGE} threw an unexpected error for project '${project.projectDir}': ${String(error)}`;\n    cdsExtractorLog('error', result.error);\n  }\n\n  return result;\n}\n\n/**\n * Orchestrates running `@sap/cds-indexer` for all applicable projects in the\n * dependency graph. This should be called after dependency installation and\n * before CDS compilation.\n *\n * Projects that do not declare `@sap/cds-indexer` in their `package.json` are\n * skipped. Failures are handled gracefully: a diagnostic warning is added to\n * the CodeQL database and processing continues with the remaining projects.\n *\n * @param dependencyGraph The CDS dependency graph.\n * @param sourceRoot The source root directory.\n * @param projectCacheDirMap Map of project directories to their cache directories.\n * @param codeqlExePath Optional path to the CodeQL executable for diagnostics.\n * @returns A {@link CdsIndexerSummary} with the outcome of all runs.\n */\nexport function orchestrateCdsIndexer(\n  dependencyGraph: CdsDependencyGraph,\n  sourceRoot: string,\n  projectCacheDirMap: Map,\n  codeqlExePath?: string,\n): CdsIndexerSummary {\n  const summary: CdsIndexerSummary = {\n    totalProjects: dependencyGraph.projects.size,\n    projectsRequiringIndexer: 0,\n    successfulRuns: 0,\n    failedRuns: 0,\n    results: [],\n  };\n\n  for (const [projectDir, project] of dependencyGraph.projects.entries()) {\n    if (!projectUsesCdsIndexer(project)) {\n      continue;\n    }\n\n    summary.projectsRequiringIndexer++;\n    const cacheDir = projectCacheDirMap.get(projectDir);\n    const result = runCdsIndexer(project, sourceRoot, cacheDir);\n    summary.results.push(result);\n\n    if (result.success) {\n      summary.successfulRuns++;\n    } else {\n      summary.failedRuns++;\n\n      // Add a diagnostic warning \u2014 the compilation may still succeed without\n      // the indexer, so we use a warning rather than an error.\n      if (codeqlExePath) {\n        addCdsIndexerDiagnostic(\n          projectDir,\n          result.error ?? `${CDS_INDEXER_PACKAGE} failed for project '${projectDir}'`,\n          codeqlExePath,\n          sourceRoot,\n        );\n      }\n    }\n  }\n\n  // Log summary\n  if (summary.projectsRequiringIndexer > 0) {\n    cdsExtractorLog(\n      'info',\n      `CDS indexer summary: ${summary.projectsRequiringIndexer} project(s) required indexer, ${summary.successfulRuns} succeeded, ${summary.failedRuns} failed`,\n    );\n  } else {\n    cdsExtractorLog('info', 'No projects require @sap/cds-indexer.');\n  }\n\n  return summary;\n}\n", "import { dirname, join, resolve, sep, basename } from 'path';\n\nimport {\n  determineCdsFilesForProjectDir,\n  determineCdsFilesToCompile,\n  determineCdsProjectsUnderSourceDir,\n  extractCdsImports,\n  readPackageJsonFile,\n} from './functions';\nimport { CdsDependencyGraph, CdsImport, CdsProject, BasicCdsProject } from './types';\nimport { modelCdsJsonFile } from '../../constants';\nimport { cdsExtractorLog } from '../../logging';\nimport { getPathsIgnorePatterns, shouldIgnorePath } from '../../paths-ignore';\n\n/**\n * Builds a basic dependency graph of CDS projects and performs the initial parsing stage of the CDS extractor.\n * This is the internal function that creates basic project structures.\n *\n * @param sourceRootDir - Source root directory\n * @returns Map of project directories to their BasicCdsProject objects with dependency information\n */\nfunction buildBasicCdsProjectDependencyGraph(sourceRootDir: string): Map {\n  // Find all CDS projects under the source directory\n  cdsExtractorLog('info', 'Detecting CDS projects...');\n  const projectDirs = determineCdsProjectsUnderSourceDir(sourceRootDir);\n\n  if (projectDirs.length === 0) {\n    cdsExtractorLog('info', 'No CDS projects found.');\n    return new Map();\n  }\n\n  cdsExtractorLog('info', `Found ${projectDirs.length} CDS project(s) under source directory.`);\n\n  // Load paths-ignore patterns once for the entire source root\n  const pathsIgnorePatterns = getPathsIgnorePatterns(sourceRootDir);\n\n  const projectMap = new Map();\n\n  // First pass: create CdsProject objects for each project directory\n  for (const projectDir of projectDirs) {\n    // Skip projects whose directory matches a paths-ignore pattern\n    if (pathsIgnorePatterns.length > 0 && shouldIgnorePath(projectDir, pathsIgnorePatterns)) {\n      cdsExtractorLog('info', `Skipping project '${projectDir}' \u2014 matches paths-ignore pattern`);\n      continue;\n    }\n\n    const absoluteProjectDir = join(sourceRootDir, projectDir);\n    const cdsFiles = determineCdsFilesForProjectDir(sourceRootDir, absoluteProjectDir);\n\n    // Skip projects with no CDS files remaining after paths-ignore filtering\n    if (cdsFiles.length === 0) {\n      cdsExtractorLog(\n        'info',\n        `Skipping project '${projectDir}' \u2014 no CDS files remain after paths-ignore filtering`,\n      );\n      continue;\n    }\n\n    // Try to load package.json if it exists\n    const packageJsonPath = join(absoluteProjectDir, 'package.json');\n    const packageJson = readPackageJsonFile(packageJsonPath);\n\n    projectMap.set(projectDir, {\n      projectDir,\n      cdsFiles,\n      compilationTargets: [], // Will be populated in the third pass\n      expectedOutputFile: join(projectDir, modelCdsJsonFile),\n      packageJson,\n      dependencies: [],\n      imports: new Map(),\n    });\n  }\n\n  // Second pass: analyze dependencies between projects\n  cdsExtractorLog('info', 'Analyzing dependencies between CDS projects...');\n  for (const [projectDir, project] of projectMap.entries()) {\n    // Check each CDS file for imports\n    for (const relativeFilePath of project.cdsFiles) {\n      const absoluteFilePath = join(sourceRootDir, relativeFilePath);\n\n      try {\n        const imports = extractCdsImports(absoluteFilePath);\n        const enrichedImports: CdsImport[] = [];\n\n        // Process each import\n        for (const importInfo of imports) {\n          const enrichedImport: CdsImport = { ...importInfo };\n\n          if (importInfo.isRelative) {\n            // Resolve the relative import path\n            const importedFilePath = resolve(dirname(absoluteFilePath), importInfo.path);\n            const normalizedImportedPath = importedFilePath.endsWith('.cds')\n              ? importedFilePath\n              : `${importedFilePath}.cds`;\n\n            // Store the resolved path relative to source root\n            try {\n              const relativeToDirPath = dirname(relativeFilePath);\n              const resolvedPath = resolve(join(sourceRootDir, relativeToDirPath), importInfo.path);\n              const normalizedResolvedPath = resolvedPath.endsWith('.cds')\n                ? resolvedPath\n                : `${resolvedPath}.cds`;\n\n              // Convert to relative path from source root\n              if (normalizedResolvedPath.startsWith(sourceRootDir)) {\n                enrichedImport.resolvedPath = normalizedResolvedPath\n                  .substring(sourceRootDir.length)\n                  .replace(/^[/\\\\]/, '');\n              }\n            } catch (error) {\n              cdsExtractorLog(\n                'warn',\n                `Could not resolve import path for ${importInfo.path} in ${relativeFilePath}: ${String(error)}`,\n              );\n            }\n\n            // Find which project contains this imported file\n            for (const [otherProjectDir, otherProject] of projectMap.entries()) {\n              if (otherProjectDir === projectDir) continue; // Skip self\n\n              const otherProjectAbsoluteDir = join(sourceRootDir, otherProjectDir);\n\n              // Check if the imported file is in the other project\n              const isInOtherProject = otherProject.cdsFiles.some(otherFile => {\n                const otherAbsolutePath = join(sourceRootDir, otherFile);\n                return (\n                  otherAbsolutePath === normalizedImportedPath ||\n                  normalizedImportedPath.startsWith(otherProjectAbsoluteDir + sep)\n                );\n              });\n\n              if (isInOtherProject) {\n                // Add dependency if not already present\n                project.dependencies ??= [];\n\n                if (!project.dependencies.includes(otherProject)) {\n                  project.dependencies.push(otherProject);\n                }\n              }\n            }\n          }\n          // For module imports, check package.json dependencies\n          else if (importInfo.isModule && project.packageJson) {\n            const dependencies = {\n              ...(project.packageJson.dependencies ?? {}),\n              ...(project.packageJson.devDependencies ?? {}),\n            };\n\n            // Extract module name from import path (e.g., '@sap/cds/common' -> '@sap/cds')\n            const moduleName = importInfo.path.split('/')[0].startsWith('@')\n              ? importInfo.path.split('/').slice(0, 2).join('/')\n              : importInfo.path.split('/')[0];\n\n            if (dependencies[moduleName]) {\n              // This is a valid module dependency, nothing more to do here\n              // In the future, we could track module dependencies separately\n            }\n          }\n\n          enrichedImports.push(enrichedImport);\n        }\n\n        // Store the enriched imports in the project\n        project.imports?.set(relativeFilePath, enrichedImports);\n      } catch (error: unknown) {\n        cdsExtractorLog(\n          'warn',\n          `Error processing imports in ${absoluteFilePath}: ${String(error)}`,\n        );\n      }\n    }\n  }\n\n  // Third pass: determine CDS files to compile and expected output files for each project\n  cdsExtractorLog(\n    'info',\n    'Determining CDS files to compile and expected output files for each project...',\n  );\n  for (const [, project] of projectMap.entries()) {\n    try {\n      const projectPlan = determineCdsFilesToCompile(sourceRootDir, project);\n\n      // Assign the calculated values back to the project\n      project.compilationTargets = projectPlan.compilationTargets;\n      project.expectedOutputFile = projectPlan.expectedOutputFile;\n    } catch (error) {\n      cdsExtractorLog(\n        'warn',\n        `Error determining files to compile for project ${project.projectDir}: ${String(error)}`,\n      );\n      // Fall back to default project compilation on error\n      project.compilationTargets = project.cdsFiles.map(file => basename(file));\n      project.expectedOutputFile = join(project.projectDir, modelCdsJsonFile);\n    }\n  }\n\n  return projectMap;\n}\n\n/**\n * Builds a CDS dependency graph with comprehensive tracking and debug information.\n * This is the main function that returns a CdsDependencyGraph instead of a simple Map.\n * The extractor now runs in autobuild mode by default.\n *\n * @param sourceRootDir - Source root directory\n * @returns CDS dependency graph with comprehensive tracking\n */\nexport function buildCdsProjectDependencyGraph(sourceRootDir: string): CdsDependencyGraph {\n  const startTime = new Date();\n\n  // Create the initial dependency graph structure\n  const dependencyGraph: CdsDependencyGraph = {\n    id: `cds_graph_${Date.now()}`,\n    sourceRootDir,\n    projects: new Map(),\n    debugInfo: {\n      extractor: {\n        runMode: 'autobuild',\n        sourceRootDir,\n        startTime,\n        environment: {\n          nodeVersion: process.version,\n          platform: process.platform,\n          cwd: process.cwd(),\n          argv: process.argv,\n        },\n      },\n      parser: {\n        projectsDetected: 0,\n        cdsFilesFound: 0,\n        dependencyResolutionSuccess: true,\n        parsingErrors: [],\n        parsingWarnings: [],\n      },\n      compiler: {\n        availableCommands: [],\n        selectedCommand: '',\n        cacheDirectories: [],\n        cacheInitialized: false,\n      },\n    },\n    currentPhase: 'parsing',\n    statusSummary: {\n      overallSuccess: false,\n      totalProjects: 0,\n      totalCdsFiles: 0,\n      totalCompilationTasks: 0,\n      successfulCompilations: 0,\n      failedCompilations: 0,\n      skippedCompilations: 0,\n      jsonFilesGenerated: 0,\n      criticalErrors: [],\n      warnings: [],\n      performance: {\n        totalDurationMs: 0,\n        parsingDurationMs: 0,\n        compilationDurationMs: 0,\n        extractionDurationMs: 0,\n      },\n    },\n    config: {\n      maxRetryAttempts: 3,\n      enableDetailedLogging: false, // Debug modes removed\n      generateDebugOutput: false, // Debug modes removed\n      compilationTimeoutMs: 30000, // 30 seconds\n    },\n    errors: {\n      critical: [],\n      warnings: [],\n    },\n    retryStatus: {\n      totalTasksRequiringRetry: 0,\n      totalTasksSuccessfullyRetried: 0,\n      totalRetryAttempts: 0,\n      projectsRequiringFullDependencies: new Set(),\n      projectsWithFullDependencies: new Set(),\n    },\n  };\n\n  try {\n    // Use the existing function to build the basic project map\n    const basicProjectMap = buildBasicCdsProjectDependencyGraph(sourceRootDir);\n\n    // Convert basic projects to CDS projects\n    for (const [projectDir, basicProject] of basicProjectMap.entries()) {\n      const cdsProject: CdsProject = {\n        ...basicProject,\n        id: `project_${projectDir.replace(/[^a-zA-Z0-9]/g, '_')}_${Date.now()}`,\n        enhancedCompilationConfig: undefined, // Will be set during compilation planning\n        compilationTasks: [],\n        parserDebugInfo: {\n          dependenciesResolved: [],\n          importErrors: [],\n          parseErrors: new Map(),\n        },\n        status: 'discovered',\n        timestamps: {\n          discovered: new Date(),\n        },\n      };\n\n      dependencyGraph.projects.set(projectDir, cdsProject);\n    }\n\n    // Update summary statistics\n    dependencyGraph.statusSummary.totalProjects = dependencyGraph.projects.size;\n    dependencyGraph.statusSummary.totalCdsFiles = Array.from(\n      dependencyGraph.projects.values(),\n    ).reduce((sum, project) => sum + project.cdsFiles.length, 0);\n\n    dependencyGraph.debugInfo.parser.projectsDetected = dependencyGraph.projects.size;\n    dependencyGraph.debugInfo.parser.cdsFilesFound = dependencyGraph.statusSummary.totalCdsFiles;\n\n    // Mark dependency resolution phase as completed\n    dependencyGraph.currentPhase = 'dependency_resolution';\n\n    const endTime = new Date();\n    dependencyGraph.debugInfo.extractor.endTime = endTime;\n    dependencyGraph.debugInfo.extractor.durationMs = endTime.getTime() - startTime.getTime();\n    dependencyGraph.statusSummary.performance.parsingDurationMs =\n      dependencyGraph.debugInfo.extractor.durationMs;\n\n    cdsExtractorLog(\n      'info',\n      `CDS dependency graph created with ${dependencyGraph.projects.size} projects and ${dependencyGraph.statusSummary.totalCdsFiles} CDS files`,\n    );\n\n    return dependencyGraph;\n  } catch (error) {\n    const errorMessage = `Failed to build CDS dependency graph: ${String(error)}`;\n    cdsExtractorLog('error', errorMessage);\n\n    dependencyGraph.errors.critical.push({\n      phase: 'parsing',\n      message: errorMessage,\n      timestamp: new Date(),\n      stack: error instanceof Error ? error.stack : undefined,\n    });\n\n    dependencyGraph.currentPhase = 'failed';\n    return dependencyGraph;\n  }\n}\n", "import { existsSync, readFileSync, statSync } from 'fs';\nimport { basename, dirname, join, relative, sep } from 'path';\n\nimport { sync } from 'glob';\n\nimport { CdsFilesToCompile, CdsImport, PackageJson } from './types';\nimport { modelCdsJsonFile } from '../../constants';\nimport { cdsExtractorLog } from '../../logging';\nimport { filterIgnoredPaths, getPathsIgnorePatterns } from '../../paths-ignore';\n\n/**\n * Determines the list of CDS files to be parsed for the specified project directory.\n *\n * @param sourceRootDir - The source root directory to search for CDS files. This is\n * used to resolve relative paths in relation to a common (source root) directory for\n * multiple projects.\n * @param projectDir - The full, local filesystem path of the directory that contains\n * the individual `.cds` definition files for some `CAP` project.\n * @returns An array of strings representing the paths, relative to the source root\n * directory, of the `.cds` files to be parsed for a given project.\n */\nexport function determineCdsFilesForProjectDir(\n  sourceRootDir: string,\n  projectDir: string,\n): string[] {\n  if (!sourceRootDir || !projectDir) {\n    throw new Error(\n      `Unable to determine CDS files for project dir '${projectDir}'; both sourceRootDir and projectDir must be provided.`,\n    );\n  }\n\n  // Normalize paths by removing trailing slashes for comparison\n  const normalizedSourceRoot = sourceRootDir.replace(/[/\\\\]+$/, '');\n  const normalizedProjectDir = projectDir.replace(/[/\\\\]+$/, '');\n\n  if (\n    !normalizedProjectDir.startsWith(normalizedSourceRoot) &&\n    normalizedProjectDir !== normalizedSourceRoot\n  ) {\n    throw new Error(\n      'projectDir must be a subdirectory of sourceRootDir or equal to sourceRootDir.',\n    );\n  }\n\n  try {\n    // Use glob to find all .cds files under the project directory, excluding node_modules\n    const cdsFiles = sync(join(projectDir, '**/*.cds'), {\n      nodir: true,\n      ignore: ['**/node_modules/**', '**/*.testproj/**'],\n    });\n\n    // Convert absolute paths to paths relative to sourceRootDir\n    const relativePaths = cdsFiles.map(file => relative(sourceRootDir, file));\n\n    // Apply paths-ignore filtering from CodeQL config\n    const pathsIgnorePatterns = getPathsIgnorePatterns(sourceRootDir);\n    if (pathsIgnorePatterns.length > 0) {\n      const filtered = filterIgnoredPaths(relativePaths, pathsIgnorePatterns);\n      const ignoredCount = relativePaths.length - filtered.length;\n      if (ignoredCount > 0) {\n        cdsExtractorLog(\n          'info',\n          `Filtered ${ignoredCount} CDS file(s) matching paths-ignore patterns in project ${relative(sourceRootDir, projectDir) || '.'}`,\n        );\n      }\n      return filtered;\n    }\n\n    return relativePaths;\n  } catch (error: unknown) {\n    cdsExtractorLog('error', `Error finding CDS files in ${projectDir}: ${String(error)}`);\n    return [];\n  }\n}\n\n/**\n * Determines the list of distinct CDS projects under the specified source\n * directory.\n * @param sourceRootDir - The source root directory to search for CDS projects.\n * @returns An array of strings representing the paths, relative to the source\n * root directory, of the detected CDS projects.\n */\nexport function determineCdsProjectsUnderSourceDir(sourceRootDir: string): string[] {\n  if (!sourceRootDir || !existsSync(sourceRootDir)) {\n    throw new Error(`Source root directory '${sourceRootDir}' does not exist.`);\n  }\n\n  const foundProjects = new Set();\n\n  // Find all potential project directories by looking for package.json files and CDS files\n  const packageJsonFiles = sync(join(sourceRootDir, '**/package.json'), {\n    nodir: true,\n    ignore: ['**/node_modules/**', '**/*.testproj/**'],\n  });\n\n  const cdsFiles = sync(join(sourceRootDir, '**/*.cds'), {\n    nodir: true,\n    ignore: ['**/node_modules/**', '**/*.testproj/**'],\n  });\n\n  // Collect all potential project directories\n  const candidateDirectories = new Set();\n\n  // Add directories with package.json files\n  for (const packageJsonFile of packageJsonFiles) {\n    candidateDirectories.add(dirname(packageJsonFile));\n  }\n\n  // Add directories with CDS files and try to find their project roots\n  for (const cdsFile of cdsFiles) {\n    const cdsDir = dirname(cdsFile);\n    const projectRoot = findProjectRootFromCdsFile(cdsDir, sourceRootDir);\n    if (projectRoot) {\n      candidateDirectories.add(projectRoot);\n    } else {\n      candidateDirectories.add(cdsDir);\n    }\n  }\n\n  // Filter candidates to only include likely CDS projects\n  for (const dir of candidateDirectories) {\n    if (isLikelyCdsProject(dir)) {\n      const relativePath = relative(sourceRootDir, dir);\n      const projectDir = relativePath || '.';\n\n      // Check if this project is already included as a parent or child of an existing project\n      let shouldAdd = true;\n      const existingProjects = Array.from(foundProjects);\n\n      for (const existingProject of existingProjects) {\n        const existingAbsPath = join(sourceRootDir, existingProject);\n\n        // Skip if this directory is a subdirectory of an existing project,\n        // but only if the parent is not a monorepo with its own CDS content\n        if (dir.startsWith(existingAbsPath + sep)) {\n          // Check if parent is a monorepo root with its own CDS content\n          const parentPackageJsonPath = join(existingAbsPath, 'package.json');\n          const parentPackageJson = readPackageJsonFile(parentPackageJsonPath);\n          const isParentMonorepo =\n            parentPackageJson?.workspaces &&\n            Array.isArray(parentPackageJson.workspaces) &&\n            parentPackageJson.workspaces.length > 0;\n\n          // If parent is a monorepo with CDS content, allow both parent and child\n          if (\n            isParentMonorepo &&\n            (hasStandardCdsContent(existingAbsPath) || hasDirectCdsContent(existingAbsPath))\n          ) {\n            // Both parent and child can coexist as separate CDS projects\n            shouldAdd = true;\n          } else {\n            // Traditional case: exclude subdirectory\n            shouldAdd = false;\n          }\n          break;\n        }\n\n        // Remove existing project if it's a subdirectory of the current directory,\n        // unless the current directory is a monorepo root and the existing project has its own CDS content\n        if (existingAbsPath.startsWith(dir + sep)) {\n          const currentPackageJsonPath = join(dir, 'package.json');\n          const currentPackageJson = readPackageJsonFile(currentPackageJsonPath);\n          const isCurrentMonorepo =\n            currentPackageJson?.workspaces &&\n            Array.isArray(currentPackageJson.workspaces) &&\n            currentPackageJson.workspaces.length > 0;\n\n          // If current is a monorepo and the existing project is a legitimate CDS project, keep both\n          if (!(isCurrentMonorepo && isLikelyCdsProject(existingAbsPath))) {\n            foundProjects.delete(existingProject);\n          }\n        }\n      }\n\n      if (shouldAdd) {\n        foundProjects.add(projectDir);\n      }\n    }\n  }\n\n  return Array.from(foundProjects).sort();\n}\n\n/**\n * Parses a CDS file to extract import statements\n *\n * @param filePath - Path to the CDS file\n * @returns Array of import statements found in the file\n */\nexport function extractCdsImports(filePath: string): CdsImport[] {\n  if (!existsSync(filePath)) {\n    throw new Error(`File does not exist: ${filePath}`);\n  }\n\n  const content = readFileSync(filePath, 'utf8');\n  const imports: CdsImport[] = [];\n\n  // Regular expression to match using statements\n  // This handles: using X from 'path'; and using { X, Y } from 'path';\n  // and also using X as Y from 'path';\n  const usingRegex =\n    /using\\s+(?:{[^}]+}|[\\w.]+(?:\\s+as\\s+[\\w.]+)?)\\s+from\\s+['\"`]([^'\"`]+)['\"`]\\s*;/g;\n\n  let match;\n  while ((match = usingRegex.exec(content)) !== null) {\n    const path = match[1];\n    imports.push({\n      statement: match[0],\n      path,\n      isRelative: path.startsWith('./') || path.startsWith('../'),\n      isModule: !path.startsWith('./') && !path.startsWith('../') && !path.startsWith('/'),\n    });\n  }\n\n  return imports;\n}\n\n/**\n * Attempts to find the project root directory starting from a directory containing a CDS file\n *\n * @param cdsFileDir - Directory containing a CDS file\n * @param sourceRootDir - Source root directory to limit the search\n * @returns The project root directory or null if not found\n */\nfunction findProjectRootFromCdsFile(cdsFileDir: string, sourceRootDir: string): string | null {\n  // Skip node_modules and testproj directories entirely\n  if (cdsFileDir.includes('node_modules') || cdsFileDir.includes('.testproj')) {\n    return null;\n  }\n\n  let currentDir = cdsFileDir;\n\n  // Limit the upward search to the sourceRootDir\n  while (currentDir.startsWith(sourceRootDir)) {\n    // Check if this directory looks like a project root\n    if (isLikelyCdsProject(currentDir)) {\n      // If this is a standard CAP subdirectory (srv, db, app), check if the parent\n      // directory should be the real project root\n      const currentDirName = basename(currentDir);\n      const isStandardSubdir = ['srv', 'db', 'app'].includes(currentDirName);\n\n      if (isStandardSubdir) {\n        const parentDir = dirname(currentDir);\n\n        if (\n          parentDir !== currentDir &&\n          parentDir.startsWith(sourceRootDir) &&\n          !parentDir.includes('node_modules') &&\n          !parentDir.includes('.testproj') &&\n          isLikelyCdsProject(parentDir)\n        ) {\n          // The parent is also a CDS project, so it's likely the real project root\n          return parentDir;\n        }\n      }\n\n      // For non-standard subdirectories, also check if the parent might be a better project root\n      const parentDir = dirname(currentDir);\n\n      if (\n        parentDir !== currentDir &&\n        parentDir.startsWith(sourceRootDir) &&\n        !parentDir.includes('node_modules') &&\n        !parentDir.includes('.testproj')\n      ) {\n        const hasDbDir =\n          existsSync(join(parentDir, 'db')) && statSync(join(parentDir, 'db')).isDirectory();\n        const hasSrvDir =\n          existsSync(join(parentDir, 'srv')) && statSync(join(parentDir, 'srv')).isDirectory();\n        const hasAppDir =\n          existsSync(join(parentDir, 'app')) && statSync(join(parentDir, 'app')).isDirectory();\n\n        // Use the same CAP project structure logic as below\n        if ((hasDbDir && hasSrvDir) || (hasSrvDir && hasAppDir)) {\n          return parentDir;\n        }\n      }\n\n      return currentDir;\n    }\n\n    // Check for typical CAP project structure indicators\n    const hasDbDir =\n      existsSync(join(currentDir, 'db')) && statSync(join(currentDir, 'db')).isDirectory();\n    const hasSrvDir =\n      existsSync(join(currentDir, 'srv')) && statSync(join(currentDir, 'srv')).isDirectory();\n    const hasAppDir =\n      existsSync(join(currentDir, 'app')) && statSync(join(currentDir, 'app')).isDirectory();\n\n    if ((hasDbDir && hasSrvDir) || (hasSrvDir && hasAppDir)) {\n      return currentDir;\n    }\n\n    // Move up one directory\n    const parentDir = dirname(currentDir);\n    if (parentDir === currentDir) {\n      // We've reached the root of the filesystem\n      break;\n    }\n    currentDir = parentDir;\n  }\n\n  // If we couldn't determine a proper project root, return the original directory\n  return cdsFileDir;\n}\n\n/**\n * Determines if a directory likely contains a CAP project by checking for key\n * indicators like package.json with CAP dependencies or .cds files in standard\n * locations.\n *\n * @param dir - Directory to check\n * @returns true if the directory likely contains a CAP project\n */\nexport function isLikelyCdsProject(dir: string): boolean {\n  try {\n    // Skip node_modules and testproj directories entirely\n    if (dir.includes('node_modules') || dir.includes('.testproj')) {\n      return false;\n    }\n\n    // Check for CDS files in standard locations (checking both direct and nested files)\n    const hasStandardCdsDirectories = hasStandardCdsContent(dir);\n    const hasDirectCdsFiles = hasDirectCdsContent(dir);\n    const hasCdsFiles = hasStandardCdsDirectories || hasDirectCdsFiles;\n\n    // Check if package.json exists and has CAP dependencies\n    const hasCapDependencies = hasPackageJsonWithCapDeps(dir);\n\n    if (hasCapDependencies) {\n      // If there are CAP dependencies but no CDS files, there's nothing for us to do\n      if (!hasCdsFiles) {\n        return false;\n      }\n\n      // Check if this is a monorepo root\n      const packageJsonPath = join(dir, 'package.json');\n      const packageJson = readPackageJsonFile(packageJsonPath);\n\n      if (\n        packageJson?.workspaces &&\n        Array.isArray(packageJson.workspaces) &&\n        packageJson.workspaces.length > 0\n      ) {\n        // This is likely a monorepo - only treat as CDS project if it has actual CDS content\n        if (!hasCdsFiles) {\n          // This is a monorepo root without its own CDS content\n          return false;\n        }\n      }\n\n      return true;\n    }\n\n    // If no CAP dependencies, only consider it a CDS project if it has CDS files\n    return hasCdsFiles;\n  } catch (error: unknown) {\n    cdsExtractorLog('error', `Error checking directory ${dir}: ${String(error)}`);\n    return false;\n  }\n}\n\n/**\n * Check if a directory has CDS content in standard CAP directories.\n */\nfunction hasStandardCdsContent(dir: string): boolean {\n  const standardLocations = [join(dir, 'db'), join(dir, 'srv'), join(dir, 'app')];\n\n  for (const location of standardLocations) {\n    if (existsSync(location) && statSync(location).isDirectory()) {\n      // Check for any .cds files at any level under these directories.\n      const cdsFiles = sync(join(location, '**/*.cds'), { nodir: true });\n      if (cdsFiles.length > 0) {\n        return true;\n      }\n    }\n  }\n\n  return false;\n}\n\n/**\n * Check if a directory has direct CDS files.\n */\nfunction hasDirectCdsContent(dir: string): boolean {\n  const directCdsFiles = sync(join(dir, '*.cds'));\n  return directCdsFiles.length > 0;\n}\n\n/**\n * Safely parses a package.json file, using the cache if available\n * @param filePath - Path to the package.json file\n * @returns The parsed package.json content or undefined if the file doesn't exist or can't be parsed\n */\nexport function readPackageJsonFile(filePath: string): PackageJson | undefined {\n  if (!existsSync(filePath)) {\n    return undefined;\n  }\n\n  try {\n    const content = readFileSync(filePath, 'utf8');\n    const packageJson = JSON.parse(content) as PackageJson;\n    return packageJson;\n  } catch (error) {\n    cdsExtractorLog('warn', `Error parsing package.json at ${filePath}: ${String(error)}`);\n    return undefined;\n  }\n}\n\n/**\n * Determines which CDS files should be compiled for a given project and what output files to expect.\n * This function analyzes the project structure and dependencies to decide\n * whether to use project-level compilation or individual file compilation.\n *\n * For CAP projects (identified by either having @sap/cds dependencies or\n * typical CAP directory structure), it returns a special marker indicating\n * project-level compilation should be used. For other projects, it attempts\n * to identify root files (files that are not imported by others) and returns\n * those for individual compilation.\n *\n * @param sourceRootDir - The source root directory\n * @param project - The project to analyze, containing cdsFiles, imports, and projectDir\n * @returns Object containing files to compile and expected output files\n */\nexport function determineCdsFilesToCompile(\n  sourceRootDir: string,\n  project: {\n    cdsFiles: string[];\n    imports?: Map;\n    projectDir: string;\n  },\n): CdsFilesToCompile {\n  if (!project.cdsFiles || project.cdsFiles.length === 0) {\n    return {\n      compilationTargets: [],\n      expectedOutputFile: join(project.projectDir, modelCdsJsonFile),\n    };\n  }\n\n  const absoluteProjectDir = join(sourceRootDir, project.projectDir);\n\n  // Check for standard CAP directories\n  const capDirectories = ['db', 'srv', 'app'];\n  const existingCapDirs = capDirectories.filter(dir => existsSync(join(absoluteProjectDir, dir)));\n\n  if (existingCapDirs.length > 0) {\n    // Use standard CAP directories\n    return {\n      compilationTargets: existingCapDirs,\n      expectedOutputFile: join(project.projectDir, modelCdsJsonFile),\n    };\n  }\n\n  // Check for root-level CDS files\n  const rootCdsFiles = project.cdsFiles\n    .filter(file => dirname(join(sourceRootDir, file)) === absoluteProjectDir)\n    .map(file => basename(file));\n\n  if (rootCdsFiles.length > 0) {\n    // Use root-level files\n    return {\n      compilationTargets: rootCdsFiles,\n      expectedOutputFile: join(project.projectDir, modelCdsJsonFile),\n    };\n  }\n\n  // Use all CDS files with their relative paths\n  const compilationTargets = project.cdsFiles.map(file =>\n    relative(absoluteProjectDir, join(sourceRootDir, file)),\n  );\n\n  return {\n    compilationTargets,\n    expectedOutputFile: join(project.projectDir, modelCdsJsonFile),\n  };\n}\n\n/**\n * Checks if a directory has a package.json with CAP dependencies.\n * This function is used to determine if a directory has the necessary CAP packages installed,\n * which is one indicator that it might be a CAP project.\n *\n * @param dir - Directory to check for package.json with CAP dependencies\n * @returns true if the directory has a package.json with CAP dependencies\n */\nexport function hasPackageJsonWithCapDeps(dir: string): boolean {\n  try {\n    const packageJsonPath = join(dir, 'package.json');\n    const packageJson = readPackageJsonFile(packageJsonPath);\n\n    if (packageJson) {\n      const dependencies = {\n        ...(packageJson.dependencies ?? {}),\n        ...(packageJson.devDependencies ?? {}),\n      };\n\n      // Check for common CAP dependencies\n      return !!(dependencies['@sap/cds'] || dependencies['@sap/cds-dk']);\n    }\n\n    return false;\n  } catch {\n    return false;\n  }\n}\n", "import { existsSync, readFileSync } from 'fs';\nimport { join, relative, resolve } from 'path';\n\nimport { load as yamlLoad } from 'js-yaml';\nimport { minimatch } from 'minimatch';\n\nimport { cdsExtractorLog } from './logging';\n\n/**\n * Well-known paths where a CodeQL configuration file may be located,\n * relative to the source root directory. Checked in order of priority.\n */\nconst DEFAULT_CONFIG_RELATIVE_PATHS = [\n  '.github/codeql/codeql-config.yml',\n  '.github/codeql/codeql-config.yaml',\n];\n\n/**\n * Cache for parsed paths-ignore patterns, keyed by source root.\n * Avoids re-reading and re-parsing the config file on every call.\n */\nconst patternsCache = new Map();\n\n/**\n * Shape of the subset of a CodeQL configuration file that we care about.\n */\ninterface CodeqlConfig {\n  'paths-ignore'?: string[];\n}\n\n/**\n * Finds the CodeQL configuration file in the source root directory.\n *\n * When the `CODEQL_CONFIG_PATH` environment variable is set, its value\n * is treated as a path (relative to `sourceRoot`) to the config file.\n * The resolved path must reside under `sourceRoot`; otherwise it is\n * rejected to prevent path-traversal issues.\n *\n * When the environment variable is not set, the well-known default\n * paths are checked in order.\n *\n * @param sourceRoot - The source root directory\n * @returns The absolute path to the config file, or undefined if not found\n */\nexport function findCodeqlConfigFile(sourceRoot: string): string | undefined {\n  const envConfigPath = process.env.CODEQL_CONFIG_PATH;\n  if (envConfigPath) {\n    const resolvedRoot = resolve(sourceRoot);\n    const fullPath = resolve(resolvedRoot, envConfigPath);\n    const rel = relative(resolvedRoot, fullPath);\n    if (rel.startsWith('..') || resolve(resolvedRoot, rel) !== fullPath) {\n      cdsExtractorLog(\n        'warn',\n        `CODEQL_CONFIG_PATH '${envConfigPath}' resolves outside the source root. Ignoring.`,\n      );\n      return undefined;\n    }\n    if (existsSync(fullPath)) {\n      cdsExtractorLog('info', `Using CodeQL config file from CODEQL_CONFIG_PATH: ${fullPath}`);\n      return fullPath;\n    }\n    cdsExtractorLog(\n      'warn',\n      `CODEQL_CONFIG_PATH is set to '${envConfigPath}', but no file exists at '${fullPath}'.`,\n    );\n    return undefined;\n  }\n\n  for (const configPath of DEFAULT_CONFIG_RELATIVE_PATHS) {\n    const fullPath = join(sourceRoot, configPath);\n    if (existsSync(fullPath)) {\n      return fullPath;\n    }\n  }\n  return undefined;\n}\n\n/**\n * Reads the CodeQL configuration file and extracts the `paths-ignore`\n * patterns list.\n *\n * @param sourceRoot - The source root directory\n * @returns Array of paths-ignore glob patterns, or empty array if none\n */\nexport function getPathsIgnorePatterns(sourceRoot: string): string[] {\n  const cached = patternsCache.get(sourceRoot);\n  if (cached !== undefined) {\n    return cached;\n  }\n\n  const configPath = findCodeqlConfigFile(sourceRoot);\n  if (!configPath) {\n    patternsCache.set(sourceRoot, []);\n    return [];\n  }\n\n  try {\n    const content = readFileSync(configPath, 'utf8');\n    const config = yamlLoad(content) as CodeqlConfig | null;\n\n    if (!config || !Array.isArray(config['paths-ignore'])) {\n      patternsCache.set(sourceRoot, []);\n      return [];\n    }\n\n    const patterns = config['paths-ignore'].filter(\n      (p): p is string => typeof p === 'string' && p.length > 0,\n    );\n\n    if (patterns.length > 0) {\n      cdsExtractorLog(\n        'info',\n        `Found ${patterns.length} paths-ignore pattern(s) in ${configPath}: ${patterns.join(', ')}`,\n      );\n    }\n\n    patternsCache.set(sourceRoot, patterns);\n    return patterns;\n  } catch (error) {\n    cdsExtractorLog('warn', `Failed to read CodeQL config file at ${configPath}: ${String(error)}`);\n    patternsCache.set(sourceRoot, []);\n    return [];\n  }\n}\n\n/**\n * Tests whether a single relative file path matches any of the given\n * paths-ignore patterns.\n *\n * Pattern matching follows the CodeQL `paths-ignore` semantics:\n *  - A bare directory name `vendor` matches anything under `vendor/`\n *  - `**` matches across directory boundaries\n *  - `*` matches within a single path segment\n *\n * @param relativePath - File path relative to the source root\n * @param patterns     - Array of paths-ignore glob patterns\n * @returns true if the path should be ignored\n */\nexport function shouldIgnorePath(relativePath: string, patterns: string[]): boolean {\n  const matchOptions = { dot: true, windowsPathsNoEscape: true };\n\n  for (const raw of patterns) {\n    // Strip trailing slashes so `vendor/` is treated the same as `vendor`\n    const pattern = raw.replace(/\\/+$/, '');\n\n    // Direct minimatch check\n    if (minimatch(relativePath, pattern, matchOptions)) {\n      return true;\n    }\n\n    // Also match as a directory prefix: pattern `vendor` should\n    // match `vendor/lib/foo.cds` (i.e. anything nested underneath).\n    if (minimatch(relativePath, `${pattern}/**`, matchOptions)) {\n      return true;\n    }\n  }\n  return false;\n}\n\n/**\n * Filters a list of relative file paths, removing any that match the\n * given paths-ignore patterns.\n *\n * @param relativePaths - File paths relative to the source root\n * @param patterns      - Array of paths-ignore glob patterns\n * @returns Filtered list of paths that do NOT match any ignore pattern\n */\nexport function filterIgnoredPaths(relativePaths: string[], patterns: string[]): string[] {\n  if (patterns.length === 0) {\n    return relativePaths;\n  }\n  return relativePaths.filter(p => !shouldIgnorePath(p, patterns));\n}\n\n/**\n * Clears the internal patterns cache. Intended for testing only.\n */\nexport function clearPathsIgnoreCache(): void {\n  patternsCache.clear();\n}\n", "\n/*! js-yaml 4.1.1 https://github.com/nodeca/js-yaml @license MIT */\nfunction isNothing(subject) {\n  return (typeof subject === 'undefined') || (subject === null);\n}\n\n\nfunction isObject(subject) {\n  return (typeof subject === 'object') && (subject !== null);\n}\n\n\nfunction toArray(sequence) {\n  if (Array.isArray(sequence)) return sequence;\n  else if (isNothing(sequence)) return [];\n\n  return [ sequence ];\n}\n\n\nfunction extend(target, source) {\n  var index, length, key, sourceKeys;\n\n  if (source) {\n    sourceKeys = Object.keys(source);\n\n    for (index = 0, length = sourceKeys.length; index < length; index += 1) {\n      key = sourceKeys[index];\n      target[key] = source[key];\n    }\n  }\n\n  return target;\n}\n\n\nfunction repeat(string, count) {\n  var result = '', cycle;\n\n  for (cycle = 0; cycle < count; cycle += 1) {\n    result += string;\n  }\n\n  return result;\n}\n\n\nfunction isNegativeZero(number) {\n  return (number === 0) && (Number.NEGATIVE_INFINITY === 1 / number);\n}\n\n\nvar isNothing_1      = isNothing;\nvar isObject_1       = isObject;\nvar toArray_1        = toArray;\nvar repeat_1         = repeat;\nvar isNegativeZero_1 = isNegativeZero;\nvar extend_1         = extend;\n\nvar common = {\n\tisNothing: isNothing_1,\n\tisObject: isObject_1,\n\ttoArray: toArray_1,\n\trepeat: repeat_1,\n\tisNegativeZero: isNegativeZero_1,\n\textend: extend_1\n};\n\n// YAML error class. http://stackoverflow.com/questions/8458984\n\n\nfunction formatError(exception, compact) {\n  var where = '', message = exception.reason || '(unknown reason)';\n\n  if (!exception.mark) return message;\n\n  if (exception.mark.name) {\n    where += 'in \"' + exception.mark.name + '\" ';\n  }\n\n  where += '(' + (exception.mark.line + 1) + ':' + (exception.mark.column + 1) + ')';\n\n  if (!compact && exception.mark.snippet) {\n    where += '\\n\\n' + exception.mark.snippet;\n  }\n\n  return message + ' ' + where;\n}\n\n\nfunction YAMLException$1(reason, mark) {\n  // Super constructor\n  Error.call(this);\n\n  this.name = 'YAMLException';\n  this.reason = reason;\n  this.mark = mark;\n  this.message = formatError(this, false);\n\n  // Include stack trace in error object\n  if (Error.captureStackTrace) {\n    // Chrome and NodeJS\n    Error.captureStackTrace(this, this.constructor);\n  } else {\n    // FF, IE 10+ and Safari 6+. Fallback for others\n    this.stack = (new Error()).stack || '';\n  }\n}\n\n\n// Inherit from Error\nYAMLException$1.prototype = Object.create(Error.prototype);\nYAMLException$1.prototype.constructor = YAMLException$1;\n\n\nYAMLException$1.prototype.toString = function toString(compact) {\n  return this.name + ': ' + formatError(this, compact);\n};\n\n\nvar exception = YAMLException$1;\n\n// get snippet for a single line, respecting maxLength\nfunction getLine(buffer, lineStart, lineEnd, position, maxLineLength) {\n  var head = '';\n  var tail = '';\n  var maxHalfLength = Math.floor(maxLineLength / 2) - 1;\n\n  if (position - lineStart > maxHalfLength) {\n    head = ' ... ';\n    lineStart = position - maxHalfLength + head.length;\n  }\n\n  if (lineEnd - position > maxHalfLength) {\n    tail = ' ...';\n    lineEnd = position + maxHalfLength - tail.length;\n  }\n\n  return {\n    str: head + buffer.slice(lineStart, lineEnd).replace(/\\t/g, '\u2192') + tail,\n    pos: position - lineStart + head.length // relative position\n  };\n}\n\n\nfunction padStart(string, max) {\n  return common.repeat(' ', max - string.length) + string;\n}\n\n\nfunction makeSnippet(mark, options) {\n  options = Object.create(options || null);\n\n  if (!mark.buffer) return null;\n\n  if (!options.maxLength) options.maxLength = 79;\n  if (typeof options.indent      !== 'number') options.indent      = 1;\n  if (typeof options.linesBefore !== 'number') options.linesBefore = 3;\n  if (typeof options.linesAfter  !== 'number') options.linesAfter  = 2;\n\n  var re = /\\r?\\n|\\r|\\0/g;\n  var lineStarts = [ 0 ];\n  var lineEnds = [];\n  var match;\n  var foundLineNo = -1;\n\n  while ((match = re.exec(mark.buffer))) {\n    lineEnds.push(match.index);\n    lineStarts.push(match.index + match[0].length);\n\n    if (mark.position <= match.index && foundLineNo < 0) {\n      foundLineNo = lineStarts.length - 2;\n    }\n  }\n\n  if (foundLineNo < 0) foundLineNo = lineStarts.length - 1;\n\n  var result = '', i, line;\n  var lineNoLength = Math.min(mark.line + options.linesAfter, lineEnds.length).toString().length;\n  var maxLineLength = options.maxLength - (options.indent + lineNoLength + 3);\n\n  for (i = 1; i <= options.linesBefore; i++) {\n    if (foundLineNo - i < 0) break;\n    line = getLine(\n      mark.buffer,\n      lineStarts[foundLineNo - i],\n      lineEnds[foundLineNo - i],\n      mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo - i]),\n      maxLineLength\n    );\n    result = common.repeat(' ', options.indent) + padStart((mark.line - i + 1).toString(), lineNoLength) +\n      ' | ' + line.str + '\\n' + result;\n  }\n\n  line = getLine(mark.buffer, lineStarts[foundLineNo], lineEnds[foundLineNo], mark.position, maxLineLength);\n  result += common.repeat(' ', options.indent) + padStart((mark.line + 1).toString(), lineNoLength) +\n    ' | ' + line.str + '\\n';\n  result += common.repeat('-', options.indent + lineNoLength + 3 + line.pos) + '^' + '\\n';\n\n  for (i = 1; i <= options.linesAfter; i++) {\n    if (foundLineNo + i >= lineEnds.length) break;\n    line = getLine(\n      mark.buffer,\n      lineStarts[foundLineNo + i],\n      lineEnds[foundLineNo + i],\n      mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo + i]),\n      maxLineLength\n    );\n    result += common.repeat(' ', options.indent) + padStart((mark.line + i + 1).toString(), lineNoLength) +\n      ' | ' + line.str + '\\n';\n  }\n\n  return result.replace(/\\n$/, '');\n}\n\n\nvar snippet = makeSnippet;\n\nvar TYPE_CONSTRUCTOR_OPTIONS = [\n  'kind',\n  'multi',\n  'resolve',\n  'construct',\n  'instanceOf',\n  'predicate',\n  'represent',\n  'representName',\n  'defaultStyle',\n  'styleAliases'\n];\n\nvar YAML_NODE_KINDS = [\n  'scalar',\n  'sequence',\n  'mapping'\n];\n\nfunction compileStyleAliases(map) {\n  var result = {};\n\n  if (map !== null) {\n    Object.keys(map).forEach(function (style) {\n      map[style].forEach(function (alias) {\n        result[String(alias)] = style;\n      });\n    });\n  }\n\n  return result;\n}\n\nfunction Type$1(tag, options) {\n  options = options || {};\n\n  Object.keys(options).forEach(function (name) {\n    if (TYPE_CONSTRUCTOR_OPTIONS.indexOf(name) === -1) {\n      throw new exception('Unknown option \"' + name + '\" is met in definition of \"' + tag + '\" YAML type.');\n    }\n  });\n\n  // TODO: Add tag format check.\n  this.options       = options; // keep original options in case user wants to extend this type later\n  this.tag           = tag;\n  this.kind          = options['kind']          || null;\n  this.resolve       = options['resolve']       || function () { return true; };\n  this.construct     = options['construct']     || function (data) { return data; };\n  this.instanceOf    = options['instanceOf']    || null;\n  this.predicate     = options['predicate']     || null;\n  this.represent     = options['represent']     || null;\n  this.representName = options['representName'] || null;\n  this.defaultStyle  = options['defaultStyle']  || null;\n  this.multi         = options['multi']         || false;\n  this.styleAliases  = compileStyleAliases(options['styleAliases'] || null);\n\n  if (YAML_NODE_KINDS.indexOf(this.kind) === -1) {\n    throw new exception('Unknown kind \"' + this.kind + '\" is specified for \"' + tag + '\" YAML type.');\n  }\n}\n\nvar type = Type$1;\n\n/*eslint-disable max-len*/\n\n\n\n\n\nfunction compileList(schema, name) {\n  var result = [];\n\n  schema[name].forEach(function (currentType) {\n    var newIndex = result.length;\n\n    result.forEach(function (previousType, previousIndex) {\n      if (previousType.tag === currentType.tag &&\n          previousType.kind === currentType.kind &&\n          previousType.multi === currentType.multi) {\n\n        newIndex = previousIndex;\n      }\n    });\n\n    result[newIndex] = currentType;\n  });\n\n  return result;\n}\n\n\nfunction compileMap(/* lists... */) {\n  var result = {\n        scalar: {},\n        sequence: {},\n        mapping: {},\n        fallback: {},\n        multi: {\n          scalar: [],\n          sequence: [],\n          mapping: [],\n          fallback: []\n        }\n      }, index, length;\n\n  function collectType(type) {\n    if (type.multi) {\n      result.multi[type.kind].push(type);\n      result.multi['fallback'].push(type);\n    } else {\n      result[type.kind][type.tag] = result['fallback'][type.tag] = type;\n    }\n  }\n\n  for (index = 0, length = arguments.length; index < length; index += 1) {\n    arguments[index].forEach(collectType);\n  }\n  return result;\n}\n\n\nfunction Schema$1(definition) {\n  return this.extend(definition);\n}\n\n\nSchema$1.prototype.extend = function extend(definition) {\n  var implicit = [];\n  var explicit = [];\n\n  if (definition instanceof type) {\n    // Schema.extend(type)\n    explicit.push(definition);\n\n  } else if (Array.isArray(definition)) {\n    // Schema.extend([ type1, type2, ... ])\n    explicit = explicit.concat(definition);\n\n  } else if (definition && (Array.isArray(definition.implicit) || Array.isArray(definition.explicit))) {\n    // Schema.extend({ explicit: [ type1, type2, ... ], implicit: [ type1, type2, ... ] })\n    if (definition.implicit) implicit = implicit.concat(definition.implicit);\n    if (definition.explicit) explicit = explicit.concat(definition.explicit);\n\n  } else {\n    throw new exception('Schema.extend argument should be a Type, [ Type ], ' +\n      'or a schema definition ({ implicit: [...], explicit: [...] })');\n  }\n\n  implicit.forEach(function (type$1) {\n    if (!(type$1 instanceof type)) {\n      throw new exception('Specified list of YAML types (or a single Type object) contains a non-Type object.');\n    }\n\n    if (type$1.loadKind && type$1.loadKind !== 'scalar') {\n      throw new exception('There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.');\n    }\n\n    if (type$1.multi) {\n      throw new exception('There is a multi type in the implicit list of a schema. Multi tags can only be listed as explicit.');\n    }\n  });\n\n  explicit.forEach(function (type$1) {\n    if (!(type$1 instanceof type)) {\n      throw new exception('Specified list of YAML types (or a single Type object) contains a non-Type object.');\n    }\n  });\n\n  var result = Object.create(Schema$1.prototype);\n\n  result.implicit = (this.implicit || []).concat(implicit);\n  result.explicit = (this.explicit || []).concat(explicit);\n\n  result.compiledImplicit = compileList(result, 'implicit');\n  result.compiledExplicit = compileList(result, 'explicit');\n  result.compiledTypeMap  = compileMap(result.compiledImplicit, result.compiledExplicit);\n\n  return result;\n};\n\n\nvar schema = Schema$1;\n\nvar str = new type('tag:yaml.org,2002:str', {\n  kind: 'scalar',\n  construct: function (data) { return data !== null ? data : ''; }\n});\n\nvar seq = new type('tag:yaml.org,2002:seq', {\n  kind: 'sequence',\n  construct: function (data) { return data !== null ? data : []; }\n});\n\nvar map = new type('tag:yaml.org,2002:map', {\n  kind: 'mapping',\n  construct: function (data) { return data !== null ? data : {}; }\n});\n\nvar failsafe = new schema({\n  explicit: [\n    str,\n    seq,\n    map\n  ]\n});\n\nfunction resolveYamlNull(data) {\n  if (data === null) return true;\n\n  var max = data.length;\n\n  return (max === 1 && data === '~') ||\n         (max === 4 && (data === 'null' || data === 'Null' || data === 'NULL'));\n}\n\nfunction constructYamlNull() {\n  return null;\n}\n\nfunction isNull(object) {\n  return object === null;\n}\n\nvar _null = new type('tag:yaml.org,2002:null', {\n  kind: 'scalar',\n  resolve: resolveYamlNull,\n  construct: constructYamlNull,\n  predicate: isNull,\n  represent: {\n    canonical: function () { return '~';    },\n    lowercase: function () { return 'null'; },\n    uppercase: function () { return 'NULL'; },\n    camelcase: function () { return 'Null'; },\n    empty:     function () { return '';     }\n  },\n  defaultStyle: 'lowercase'\n});\n\nfunction resolveYamlBoolean(data) {\n  if (data === null) return false;\n\n  var max = data.length;\n\n  return (max === 4 && (data === 'true' || data === 'True' || data === 'TRUE')) ||\n         (max === 5 && (data === 'false' || data === 'False' || data === 'FALSE'));\n}\n\nfunction constructYamlBoolean(data) {\n  return data === 'true' ||\n         data === 'True' ||\n         data === 'TRUE';\n}\n\nfunction isBoolean(object) {\n  return Object.prototype.toString.call(object) === '[object Boolean]';\n}\n\nvar bool = new type('tag:yaml.org,2002:bool', {\n  kind: 'scalar',\n  resolve: resolveYamlBoolean,\n  construct: constructYamlBoolean,\n  predicate: isBoolean,\n  represent: {\n    lowercase: function (object) { return object ? 'true' : 'false'; },\n    uppercase: function (object) { return object ? 'TRUE' : 'FALSE'; },\n    camelcase: function (object) { return object ? 'True' : 'False'; }\n  },\n  defaultStyle: 'lowercase'\n});\n\nfunction isHexCode(c) {\n  return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) ||\n         ((0x41/* A */ <= c) && (c <= 0x46/* F */)) ||\n         ((0x61/* a */ <= c) && (c <= 0x66/* f */));\n}\n\nfunction isOctCode(c) {\n  return ((0x30/* 0 */ <= c) && (c <= 0x37/* 7 */));\n}\n\nfunction isDecCode(c) {\n  return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */));\n}\n\nfunction resolveYamlInteger(data) {\n  if (data === null) return false;\n\n  var max = data.length,\n      index = 0,\n      hasDigits = false,\n      ch;\n\n  if (!max) return false;\n\n  ch = data[index];\n\n  // sign\n  if (ch === '-' || ch === '+') {\n    ch = data[++index];\n  }\n\n  if (ch === '0') {\n    // 0\n    if (index + 1 === max) return true;\n    ch = data[++index];\n\n    // base 2, base 8, base 16\n\n    if (ch === 'b') {\n      // base 2\n      index++;\n\n      for (; index < max; index++) {\n        ch = data[index];\n        if (ch === '_') continue;\n        if (ch !== '0' && ch !== '1') return false;\n        hasDigits = true;\n      }\n      return hasDigits && ch !== '_';\n    }\n\n\n    if (ch === 'x') {\n      // base 16\n      index++;\n\n      for (; index < max; index++) {\n        ch = data[index];\n        if (ch === '_') continue;\n        if (!isHexCode(data.charCodeAt(index))) return false;\n        hasDigits = true;\n      }\n      return hasDigits && ch !== '_';\n    }\n\n\n    if (ch === 'o') {\n      // base 8\n      index++;\n\n      for (; index < max; index++) {\n        ch = data[index];\n        if (ch === '_') continue;\n        if (!isOctCode(data.charCodeAt(index))) return false;\n        hasDigits = true;\n      }\n      return hasDigits && ch !== '_';\n    }\n  }\n\n  // base 10 (except 0)\n\n  // value should not start with `_`;\n  if (ch === '_') return false;\n\n  for (; index < max; index++) {\n    ch = data[index];\n    if (ch === '_') continue;\n    if (!isDecCode(data.charCodeAt(index))) {\n      return false;\n    }\n    hasDigits = true;\n  }\n\n  // Should have digits and should not end with `_`\n  if (!hasDigits || ch === '_') return false;\n\n  return true;\n}\n\nfunction constructYamlInteger(data) {\n  var value = data, sign = 1, ch;\n\n  if (value.indexOf('_') !== -1) {\n    value = value.replace(/_/g, '');\n  }\n\n  ch = value[0];\n\n  if (ch === '-' || ch === '+') {\n    if (ch === '-') sign = -1;\n    value = value.slice(1);\n    ch = value[0];\n  }\n\n  if (value === '0') return 0;\n\n  if (ch === '0') {\n    if (value[1] === 'b') return sign * parseInt(value.slice(2), 2);\n    if (value[1] === 'x') return sign * parseInt(value.slice(2), 16);\n    if (value[1] === 'o') return sign * parseInt(value.slice(2), 8);\n  }\n\n  return sign * parseInt(value, 10);\n}\n\nfunction isInteger(object) {\n  return (Object.prototype.toString.call(object)) === '[object Number]' &&\n         (object % 1 === 0 && !common.isNegativeZero(object));\n}\n\nvar int = new type('tag:yaml.org,2002:int', {\n  kind: 'scalar',\n  resolve: resolveYamlInteger,\n  construct: constructYamlInteger,\n  predicate: isInteger,\n  represent: {\n    binary:      function (obj) { return obj >= 0 ? '0b' + obj.toString(2) : '-0b' + obj.toString(2).slice(1); },\n    octal:       function (obj) { return obj >= 0 ? '0o'  + obj.toString(8) : '-0o'  + obj.toString(8).slice(1); },\n    decimal:     function (obj) { return obj.toString(10); },\n    /* eslint-disable max-len */\n    hexadecimal: function (obj) { return obj >= 0 ? '0x' + obj.toString(16).toUpperCase() :  '-0x' + obj.toString(16).toUpperCase().slice(1); }\n  },\n  defaultStyle: 'decimal',\n  styleAliases: {\n    binary:      [ 2,  'bin' ],\n    octal:       [ 8,  'oct' ],\n    decimal:     [ 10, 'dec' ],\n    hexadecimal: [ 16, 'hex' ]\n  }\n});\n\nvar YAML_FLOAT_PATTERN = new RegExp(\n  // 2.5e4, 2.5 and integers\n  '^(?:[-+]?(?:[0-9][0-9_]*)(?:\\\\.[0-9_]*)?(?:[eE][-+]?[0-9]+)?' +\n  // .2e4, .2\n  // special case, seems not from spec\n  '|\\\\.[0-9_]+(?:[eE][-+]?[0-9]+)?' +\n  // .inf\n  '|[-+]?\\\\.(?:inf|Inf|INF)' +\n  // .nan\n  '|\\\\.(?:nan|NaN|NAN))$');\n\nfunction resolveYamlFloat(data) {\n  if (data === null) return false;\n\n  if (!YAML_FLOAT_PATTERN.test(data) ||\n      // Quick hack to not allow integers end with `_`\n      // Probably should update regexp & check speed\n      data[data.length - 1] === '_') {\n    return false;\n  }\n\n  return true;\n}\n\nfunction constructYamlFloat(data) {\n  var value, sign;\n\n  value  = data.replace(/_/g, '').toLowerCase();\n  sign   = value[0] === '-' ? -1 : 1;\n\n  if ('+-'.indexOf(value[0]) >= 0) {\n    value = value.slice(1);\n  }\n\n  if (value === '.inf') {\n    return (sign === 1) ? Number.POSITIVE_INFINITY : Number.NEGATIVE_INFINITY;\n\n  } else if (value === '.nan') {\n    return NaN;\n  }\n  return sign * parseFloat(value, 10);\n}\n\n\nvar SCIENTIFIC_WITHOUT_DOT = /^[-+]?[0-9]+e/;\n\nfunction representYamlFloat(object, style) {\n  var res;\n\n  if (isNaN(object)) {\n    switch (style) {\n      case 'lowercase': return '.nan';\n      case 'uppercase': return '.NAN';\n      case 'camelcase': return '.NaN';\n    }\n  } else if (Number.POSITIVE_INFINITY === object) {\n    switch (style) {\n      case 'lowercase': return '.inf';\n      case 'uppercase': return '.INF';\n      case 'camelcase': return '.Inf';\n    }\n  } else if (Number.NEGATIVE_INFINITY === object) {\n    switch (style) {\n      case 'lowercase': return '-.inf';\n      case 'uppercase': return '-.INF';\n      case 'camelcase': return '-.Inf';\n    }\n  } else if (common.isNegativeZero(object)) {\n    return '-0.0';\n  }\n\n  res = object.toString(10);\n\n  // JS stringifier can build scientific format without dots: 5e-100,\n  // while YAML requres dot: 5.e-100. Fix it with simple hack\n\n  return SCIENTIFIC_WITHOUT_DOT.test(res) ? res.replace('e', '.e') : res;\n}\n\nfunction isFloat(object) {\n  return (Object.prototype.toString.call(object) === '[object Number]') &&\n         (object % 1 !== 0 || common.isNegativeZero(object));\n}\n\nvar float = new type('tag:yaml.org,2002:float', {\n  kind: 'scalar',\n  resolve: resolveYamlFloat,\n  construct: constructYamlFloat,\n  predicate: isFloat,\n  represent: representYamlFloat,\n  defaultStyle: 'lowercase'\n});\n\nvar json = failsafe.extend({\n  implicit: [\n    _null,\n    bool,\n    int,\n    float\n  ]\n});\n\nvar core = json;\n\nvar YAML_DATE_REGEXP = new RegExp(\n  '^([0-9][0-9][0-9][0-9])'          + // [1] year\n  '-([0-9][0-9])'                    + // [2] month\n  '-([0-9][0-9])$');                   // [3] day\n\nvar YAML_TIMESTAMP_REGEXP = new RegExp(\n  '^([0-9][0-9][0-9][0-9])'          + // [1] year\n  '-([0-9][0-9]?)'                   + // [2] month\n  '-([0-9][0-9]?)'                   + // [3] day\n  '(?:[Tt]|[ \\\\t]+)'                 + // ...\n  '([0-9][0-9]?)'                    + // [4] hour\n  ':([0-9][0-9])'                    + // [5] minute\n  ':([0-9][0-9])'                    + // [6] second\n  '(?:\\\\.([0-9]*))?'                 + // [7] fraction\n  '(?:[ \\\\t]*(Z|([-+])([0-9][0-9]?)' + // [8] tz [9] tz_sign [10] tz_hour\n  '(?::([0-9][0-9]))?))?$');           // [11] tz_minute\n\nfunction resolveYamlTimestamp(data) {\n  if (data === null) return false;\n  if (YAML_DATE_REGEXP.exec(data) !== null) return true;\n  if (YAML_TIMESTAMP_REGEXP.exec(data) !== null) return true;\n  return false;\n}\n\nfunction constructYamlTimestamp(data) {\n  var match, year, month, day, hour, minute, second, fraction = 0,\n      delta = null, tz_hour, tz_minute, date;\n\n  match = YAML_DATE_REGEXP.exec(data);\n  if (match === null) match = YAML_TIMESTAMP_REGEXP.exec(data);\n\n  if (match === null) throw new Error('Date resolve error');\n\n  // match: [1] year [2] month [3] day\n\n  year = +(match[1]);\n  month = +(match[2]) - 1; // JS month starts with 0\n  day = +(match[3]);\n\n  if (!match[4]) { // no hour\n    return new Date(Date.UTC(year, month, day));\n  }\n\n  // match: [4] hour [5] minute [6] second [7] fraction\n\n  hour = +(match[4]);\n  minute = +(match[5]);\n  second = +(match[6]);\n\n  if (match[7]) {\n    fraction = match[7].slice(0, 3);\n    while (fraction.length < 3) { // milli-seconds\n      fraction += '0';\n    }\n    fraction = +fraction;\n  }\n\n  // match: [8] tz [9] tz_sign [10] tz_hour [11] tz_minute\n\n  if (match[9]) {\n    tz_hour = +(match[10]);\n    tz_minute = +(match[11] || 0);\n    delta = (tz_hour * 60 + tz_minute) * 60000; // delta in mili-seconds\n    if (match[9] === '-') delta = -delta;\n  }\n\n  date = new Date(Date.UTC(year, month, day, hour, minute, second, fraction));\n\n  if (delta) date.setTime(date.getTime() - delta);\n\n  return date;\n}\n\nfunction representYamlTimestamp(object /*, style*/) {\n  return object.toISOString();\n}\n\nvar timestamp = new type('tag:yaml.org,2002:timestamp', {\n  kind: 'scalar',\n  resolve: resolveYamlTimestamp,\n  construct: constructYamlTimestamp,\n  instanceOf: Date,\n  represent: representYamlTimestamp\n});\n\nfunction resolveYamlMerge(data) {\n  return data === '<<' || data === null;\n}\n\nvar merge = new type('tag:yaml.org,2002:merge', {\n  kind: 'scalar',\n  resolve: resolveYamlMerge\n});\n\n/*eslint-disable no-bitwise*/\n\n\n\n\n\n// [ 64, 65, 66 ] -> [ padding, CR, LF ]\nvar BASE64_MAP = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\\n\\r';\n\n\nfunction resolveYamlBinary(data) {\n  if (data === null) return false;\n\n  var code, idx, bitlen = 0, max = data.length, map = BASE64_MAP;\n\n  // Convert one by one.\n  for (idx = 0; idx < max; idx++) {\n    code = map.indexOf(data.charAt(idx));\n\n    // Skip CR/LF\n    if (code > 64) continue;\n\n    // Fail on illegal characters\n    if (code < 0) return false;\n\n    bitlen += 6;\n  }\n\n  // If there are any bits left, source was corrupted\n  return (bitlen % 8) === 0;\n}\n\nfunction constructYamlBinary(data) {\n  var idx, tailbits,\n      input = data.replace(/[\\r\\n=]/g, ''), // remove CR/LF & padding to simplify scan\n      max = input.length,\n      map = BASE64_MAP,\n      bits = 0,\n      result = [];\n\n  // Collect by 6*4 bits (3 bytes)\n\n  for (idx = 0; idx < max; idx++) {\n    if ((idx % 4 === 0) && idx) {\n      result.push((bits >> 16) & 0xFF);\n      result.push((bits >> 8) & 0xFF);\n      result.push(bits & 0xFF);\n    }\n\n    bits = (bits << 6) | map.indexOf(input.charAt(idx));\n  }\n\n  // Dump tail\n\n  tailbits = (max % 4) * 6;\n\n  if (tailbits === 0) {\n    result.push((bits >> 16) & 0xFF);\n    result.push((bits >> 8) & 0xFF);\n    result.push(bits & 0xFF);\n  } else if (tailbits === 18) {\n    result.push((bits >> 10) & 0xFF);\n    result.push((bits >> 2) & 0xFF);\n  } else if (tailbits === 12) {\n    result.push((bits >> 4) & 0xFF);\n  }\n\n  return new Uint8Array(result);\n}\n\nfunction representYamlBinary(object /*, style*/) {\n  var result = '', bits = 0, idx, tail,\n      max = object.length,\n      map = BASE64_MAP;\n\n  // Convert every three bytes to 4 ASCII characters.\n\n  for (idx = 0; idx < max; idx++) {\n    if ((idx % 3 === 0) && idx) {\n      result += map[(bits >> 18) & 0x3F];\n      result += map[(bits >> 12) & 0x3F];\n      result += map[(bits >> 6) & 0x3F];\n      result += map[bits & 0x3F];\n    }\n\n    bits = (bits << 8) + object[idx];\n  }\n\n  // Dump tail\n\n  tail = max % 3;\n\n  if (tail === 0) {\n    result += map[(bits >> 18) & 0x3F];\n    result += map[(bits >> 12) & 0x3F];\n    result += map[(bits >> 6) & 0x3F];\n    result += map[bits & 0x3F];\n  } else if (tail === 2) {\n    result += map[(bits >> 10) & 0x3F];\n    result += map[(bits >> 4) & 0x3F];\n    result += map[(bits << 2) & 0x3F];\n    result += map[64];\n  } else if (tail === 1) {\n    result += map[(bits >> 2) & 0x3F];\n    result += map[(bits << 4) & 0x3F];\n    result += map[64];\n    result += map[64];\n  }\n\n  return result;\n}\n\nfunction isBinary(obj) {\n  return Object.prototype.toString.call(obj) ===  '[object Uint8Array]';\n}\n\nvar binary = new type('tag:yaml.org,2002:binary', {\n  kind: 'scalar',\n  resolve: resolveYamlBinary,\n  construct: constructYamlBinary,\n  predicate: isBinary,\n  represent: representYamlBinary\n});\n\nvar _hasOwnProperty$3 = Object.prototype.hasOwnProperty;\nvar _toString$2       = Object.prototype.toString;\n\nfunction resolveYamlOmap(data) {\n  if (data === null) return true;\n\n  var objectKeys = [], index, length, pair, pairKey, pairHasKey,\n      object = data;\n\n  for (index = 0, length = object.length; index < length; index += 1) {\n    pair = object[index];\n    pairHasKey = false;\n\n    if (_toString$2.call(pair) !== '[object Object]') return false;\n\n    for (pairKey in pair) {\n      if (_hasOwnProperty$3.call(pair, pairKey)) {\n        if (!pairHasKey) pairHasKey = true;\n        else return false;\n      }\n    }\n\n    if (!pairHasKey) return false;\n\n    if (objectKeys.indexOf(pairKey) === -1) objectKeys.push(pairKey);\n    else return false;\n  }\n\n  return true;\n}\n\nfunction constructYamlOmap(data) {\n  return data !== null ? data : [];\n}\n\nvar omap = new type('tag:yaml.org,2002:omap', {\n  kind: 'sequence',\n  resolve: resolveYamlOmap,\n  construct: constructYamlOmap\n});\n\nvar _toString$1 = Object.prototype.toString;\n\nfunction resolveYamlPairs(data) {\n  if (data === null) return true;\n\n  var index, length, pair, keys, result,\n      object = data;\n\n  result = new Array(object.length);\n\n  for (index = 0, length = object.length; index < length; index += 1) {\n    pair = object[index];\n\n    if (_toString$1.call(pair) !== '[object Object]') return false;\n\n    keys = Object.keys(pair);\n\n    if (keys.length !== 1) return false;\n\n    result[index] = [ keys[0], pair[keys[0]] ];\n  }\n\n  return true;\n}\n\nfunction constructYamlPairs(data) {\n  if (data === null) return [];\n\n  var index, length, pair, keys, result,\n      object = data;\n\n  result = new Array(object.length);\n\n  for (index = 0, length = object.length; index < length; index += 1) {\n    pair = object[index];\n\n    keys = Object.keys(pair);\n\n    result[index] = [ keys[0], pair[keys[0]] ];\n  }\n\n  return result;\n}\n\nvar pairs = new type('tag:yaml.org,2002:pairs', {\n  kind: 'sequence',\n  resolve: resolveYamlPairs,\n  construct: constructYamlPairs\n});\n\nvar _hasOwnProperty$2 = Object.prototype.hasOwnProperty;\n\nfunction resolveYamlSet(data) {\n  if (data === null) return true;\n\n  var key, object = data;\n\n  for (key in object) {\n    if (_hasOwnProperty$2.call(object, key)) {\n      if (object[key] !== null) return false;\n    }\n  }\n\n  return true;\n}\n\nfunction constructYamlSet(data) {\n  return data !== null ? data : {};\n}\n\nvar set = new type('tag:yaml.org,2002:set', {\n  kind: 'mapping',\n  resolve: resolveYamlSet,\n  construct: constructYamlSet\n});\n\nvar _default = core.extend({\n  implicit: [\n    timestamp,\n    merge\n  ],\n  explicit: [\n    binary,\n    omap,\n    pairs,\n    set\n  ]\n});\n\n/*eslint-disable max-len,no-use-before-define*/\n\n\n\n\n\n\n\nvar _hasOwnProperty$1 = Object.prototype.hasOwnProperty;\n\n\nvar CONTEXT_FLOW_IN   = 1;\nvar CONTEXT_FLOW_OUT  = 2;\nvar CONTEXT_BLOCK_IN  = 3;\nvar CONTEXT_BLOCK_OUT = 4;\n\n\nvar CHOMPING_CLIP  = 1;\nvar CHOMPING_STRIP = 2;\nvar CHOMPING_KEEP  = 3;\n\n\nvar PATTERN_NON_PRINTABLE         = /[\\x00-\\x08\\x0B\\x0C\\x0E-\\x1F\\x7F-\\x84\\x86-\\x9F\\uFFFE\\uFFFF]|[\\uD800-\\uDBFF](?![\\uDC00-\\uDFFF])|(?:[^\\uD800-\\uDBFF]|^)[\\uDC00-\\uDFFF]/;\nvar PATTERN_NON_ASCII_LINE_BREAKS = /[\\x85\\u2028\\u2029]/;\nvar PATTERN_FLOW_INDICATORS       = /[,\\[\\]\\{\\}]/;\nvar PATTERN_TAG_HANDLE            = /^(?:!|!!|![a-z\\-]+!)$/i;\nvar PATTERN_TAG_URI               = /^(?:!|[^,\\[\\]\\{\\}])(?:%[0-9a-f]{2}|[0-9a-z\\-#;\\/\\?:@&=\\+\\$,_\\.!~\\*'\\(\\)\\[\\]])*$/i;\n\n\nfunction _class(obj) { return Object.prototype.toString.call(obj); }\n\nfunction is_EOL(c) {\n  return (c === 0x0A/* LF */) || (c === 0x0D/* CR */);\n}\n\nfunction is_WHITE_SPACE(c) {\n  return (c === 0x09/* Tab */) || (c === 0x20/* Space */);\n}\n\nfunction is_WS_OR_EOL(c) {\n  return (c === 0x09/* Tab */) ||\n         (c === 0x20/* Space */) ||\n         (c === 0x0A/* LF */) ||\n         (c === 0x0D/* CR */);\n}\n\nfunction is_FLOW_INDICATOR(c) {\n  return c === 0x2C/* , */ ||\n         c === 0x5B/* [ */ ||\n         c === 0x5D/* ] */ ||\n         c === 0x7B/* { */ ||\n         c === 0x7D/* } */;\n}\n\nfunction fromHexCode(c) {\n  var lc;\n\n  if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) {\n    return c - 0x30;\n  }\n\n  /*eslint-disable no-bitwise*/\n  lc = c | 0x20;\n\n  if ((0x61/* a */ <= lc) && (lc <= 0x66/* f */)) {\n    return lc - 0x61 + 10;\n  }\n\n  return -1;\n}\n\nfunction escapedHexLen(c) {\n  if (c === 0x78/* x */) { return 2; }\n  if (c === 0x75/* u */) { return 4; }\n  if (c === 0x55/* U */) { return 8; }\n  return 0;\n}\n\nfunction fromDecimalCode(c) {\n  if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) {\n    return c - 0x30;\n  }\n\n  return -1;\n}\n\nfunction simpleEscapeSequence(c) {\n  /* eslint-disable indent */\n  return (c === 0x30/* 0 */) ? '\\x00' :\n        (c === 0x61/* a */) ? '\\x07' :\n        (c === 0x62/* b */) ? '\\x08' :\n        (c === 0x74/* t */) ? '\\x09' :\n        (c === 0x09/* Tab */) ? '\\x09' :\n        (c === 0x6E/* n */) ? '\\x0A' :\n        (c === 0x76/* v */) ? '\\x0B' :\n        (c === 0x66/* f */) ? '\\x0C' :\n        (c === 0x72/* r */) ? '\\x0D' :\n        (c === 0x65/* e */) ? '\\x1B' :\n        (c === 0x20/* Space */) ? ' ' :\n        (c === 0x22/* \" */) ? '\\x22' :\n        (c === 0x2F/* / */) ? '/' :\n        (c === 0x5C/* \\ */) ? '\\x5C' :\n        (c === 0x4E/* N */) ? '\\x85' :\n        (c === 0x5F/* _ */) ? '\\xA0' :\n        (c === 0x4C/* L */) ? '\\u2028' :\n        (c === 0x50/* P */) ? '\\u2029' : '';\n}\n\nfunction charFromCodepoint(c) {\n  if (c <= 0xFFFF) {\n    return String.fromCharCode(c);\n  }\n  // Encode UTF-16 surrogate pair\n  // https://en.wikipedia.org/wiki/UTF-16#Code_points_U.2B010000_to_U.2B10FFFF\n  return String.fromCharCode(\n    ((c - 0x010000) >> 10) + 0xD800,\n    ((c - 0x010000) & 0x03FF) + 0xDC00\n  );\n}\n\n// set a property of a literal object, while protecting against prototype pollution,\n// see https://github.com/nodeca/js-yaml/issues/164 for more details\nfunction setProperty(object, key, value) {\n  // used for this specific key only because Object.defineProperty is slow\n  if (key === '__proto__') {\n    Object.defineProperty(object, key, {\n      configurable: true,\n      enumerable: true,\n      writable: true,\n      value: value\n    });\n  } else {\n    object[key] = value;\n  }\n}\n\nvar simpleEscapeCheck = new Array(256); // integer, for fast access\nvar simpleEscapeMap = new Array(256);\nfor (var i = 0; i < 256; i++) {\n  simpleEscapeCheck[i] = simpleEscapeSequence(i) ? 1 : 0;\n  simpleEscapeMap[i] = simpleEscapeSequence(i);\n}\n\n\nfunction State$1(input, options) {\n  this.input = input;\n\n  this.filename  = options['filename']  || null;\n  this.schema    = options['schema']    || _default;\n  this.onWarning = options['onWarning'] || null;\n  // (Hidden) Remove? makes the loader to expect YAML 1.1 documents\n  // if such documents have no explicit %YAML directive\n  this.legacy    = options['legacy']    || false;\n\n  this.json      = options['json']      || false;\n  this.listener  = options['listener']  || null;\n\n  this.implicitTypes = this.schema.compiledImplicit;\n  this.typeMap       = this.schema.compiledTypeMap;\n\n  this.length     = input.length;\n  this.position   = 0;\n  this.line       = 0;\n  this.lineStart  = 0;\n  this.lineIndent = 0;\n\n  // position of first leading tab in the current line,\n  // used to make sure there are no tabs in the indentation\n  this.firstTabInLine = -1;\n\n  this.documents = [];\n\n  /*\n  this.version;\n  this.checkLineBreaks;\n  this.tagMap;\n  this.anchorMap;\n  this.tag;\n  this.anchor;\n  this.kind;\n  this.result;*/\n\n}\n\n\nfunction generateError(state, message) {\n  var mark = {\n    name:     state.filename,\n    buffer:   state.input.slice(0, -1), // omit trailing \\0\n    position: state.position,\n    line:     state.line,\n    column:   state.position - state.lineStart\n  };\n\n  mark.snippet = snippet(mark);\n\n  return new exception(message, mark);\n}\n\nfunction throwError(state, message) {\n  throw generateError(state, message);\n}\n\nfunction throwWarning(state, message) {\n  if (state.onWarning) {\n    state.onWarning.call(null, generateError(state, message));\n  }\n}\n\n\nvar directiveHandlers = {\n\n  YAML: function handleYamlDirective(state, name, args) {\n\n    var match, major, minor;\n\n    if (state.version !== null) {\n      throwError(state, 'duplication of %YAML directive');\n    }\n\n    if (args.length !== 1) {\n      throwError(state, 'YAML directive accepts exactly one argument');\n    }\n\n    match = /^([0-9]+)\\.([0-9]+)$/.exec(args[0]);\n\n    if (match === null) {\n      throwError(state, 'ill-formed argument of the YAML directive');\n    }\n\n    major = parseInt(match[1], 10);\n    minor = parseInt(match[2], 10);\n\n    if (major !== 1) {\n      throwError(state, 'unacceptable YAML version of the document');\n    }\n\n    state.version = args[0];\n    state.checkLineBreaks = (minor < 2);\n\n    if (minor !== 1 && minor !== 2) {\n      throwWarning(state, 'unsupported YAML version of the document');\n    }\n  },\n\n  TAG: function handleTagDirective(state, name, args) {\n\n    var handle, prefix;\n\n    if (args.length !== 2) {\n      throwError(state, 'TAG directive accepts exactly two arguments');\n    }\n\n    handle = args[0];\n    prefix = args[1];\n\n    if (!PATTERN_TAG_HANDLE.test(handle)) {\n      throwError(state, 'ill-formed tag handle (first argument) of the TAG directive');\n    }\n\n    if (_hasOwnProperty$1.call(state.tagMap, handle)) {\n      throwError(state, 'there is a previously declared suffix for \"' + handle + '\" tag handle');\n    }\n\n    if (!PATTERN_TAG_URI.test(prefix)) {\n      throwError(state, 'ill-formed tag prefix (second argument) of the TAG directive');\n    }\n\n    try {\n      prefix = decodeURIComponent(prefix);\n    } catch (err) {\n      throwError(state, 'tag prefix is malformed: ' + prefix);\n    }\n\n    state.tagMap[handle] = prefix;\n  }\n};\n\n\nfunction captureSegment(state, start, end, checkJson) {\n  var _position, _length, _character, _result;\n\n  if (start < end) {\n    _result = state.input.slice(start, end);\n\n    if (checkJson) {\n      for (_position = 0, _length = _result.length; _position < _length; _position += 1) {\n        _character = _result.charCodeAt(_position);\n        if (!(_character === 0x09 ||\n              (0x20 <= _character && _character <= 0x10FFFF))) {\n          throwError(state, 'expected valid JSON character');\n        }\n      }\n    } else if (PATTERN_NON_PRINTABLE.test(_result)) {\n      throwError(state, 'the stream contains non-printable characters');\n    }\n\n    state.result += _result;\n  }\n}\n\nfunction mergeMappings(state, destination, source, overridableKeys) {\n  var sourceKeys, key, index, quantity;\n\n  if (!common.isObject(source)) {\n    throwError(state, 'cannot merge mappings; the provided source object is unacceptable');\n  }\n\n  sourceKeys = Object.keys(source);\n\n  for (index = 0, quantity = sourceKeys.length; index < quantity; index += 1) {\n    key = sourceKeys[index];\n\n    if (!_hasOwnProperty$1.call(destination, key)) {\n      setProperty(destination, key, source[key]);\n      overridableKeys[key] = true;\n    }\n  }\n}\n\nfunction storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode,\n  startLine, startLineStart, startPos) {\n\n  var index, quantity;\n\n  // The output is a plain object here, so keys can only be strings.\n  // We need to convert keyNode to a string, but doing so can hang the process\n  // (deeply nested arrays that explode exponentially using aliases).\n  if (Array.isArray(keyNode)) {\n    keyNode = Array.prototype.slice.call(keyNode);\n\n    for (index = 0, quantity = keyNode.length; index < quantity; index += 1) {\n      if (Array.isArray(keyNode[index])) {\n        throwError(state, 'nested arrays are not supported inside keys');\n      }\n\n      if (typeof keyNode === 'object' && _class(keyNode[index]) === '[object Object]') {\n        keyNode[index] = '[object Object]';\n      }\n    }\n  }\n\n  // Avoid code execution in load() via toString property\n  // (still use its own toString for arrays, timestamps,\n  // and whatever user schema extensions happen to have @@toStringTag)\n  if (typeof keyNode === 'object' && _class(keyNode) === '[object Object]') {\n    keyNode = '[object Object]';\n  }\n\n\n  keyNode = String(keyNode);\n\n  if (_result === null) {\n    _result = {};\n  }\n\n  if (keyTag === 'tag:yaml.org,2002:merge') {\n    if (Array.isArray(valueNode)) {\n      for (index = 0, quantity = valueNode.length; index < quantity; index += 1) {\n        mergeMappings(state, _result, valueNode[index], overridableKeys);\n      }\n    } else {\n      mergeMappings(state, _result, valueNode, overridableKeys);\n    }\n  } else {\n    if (!state.json &&\n        !_hasOwnProperty$1.call(overridableKeys, keyNode) &&\n        _hasOwnProperty$1.call(_result, keyNode)) {\n      state.line = startLine || state.line;\n      state.lineStart = startLineStart || state.lineStart;\n      state.position = startPos || state.position;\n      throwError(state, 'duplicated mapping key');\n    }\n\n    setProperty(_result, keyNode, valueNode);\n    delete overridableKeys[keyNode];\n  }\n\n  return _result;\n}\n\nfunction readLineBreak(state) {\n  var ch;\n\n  ch = state.input.charCodeAt(state.position);\n\n  if (ch === 0x0A/* LF */) {\n    state.position++;\n  } else if (ch === 0x0D/* CR */) {\n    state.position++;\n    if (state.input.charCodeAt(state.position) === 0x0A/* LF */) {\n      state.position++;\n    }\n  } else {\n    throwError(state, 'a line break is expected');\n  }\n\n  state.line += 1;\n  state.lineStart = state.position;\n  state.firstTabInLine = -1;\n}\n\nfunction skipSeparationSpace(state, allowComments, checkIndent) {\n  var lineBreaks = 0,\n      ch = state.input.charCodeAt(state.position);\n\n  while (ch !== 0) {\n    while (is_WHITE_SPACE(ch)) {\n      if (ch === 0x09/* Tab */ && state.firstTabInLine === -1) {\n        state.firstTabInLine = state.position;\n      }\n      ch = state.input.charCodeAt(++state.position);\n    }\n\n    if (allowComments && ch === 0x23/* # */) {\n      do {\n        ch = state.input.charCodeAt(++state.position);\n      } while (ch !== 0x0A/* LF */ && ch !== 0x0D/* CR */ && ch !== 0);\n    }\n\n    if (is_EOL(ch)) {\n      readLineBreak(state);\n\n      ch = state.input.charCodeAt(state.position);\n      lineBreaks++;\n      state.lineIndent = 0;\n\n      while (ch === 0x20/* Space */) {\n        state.lineIndent++;\n        ch = state.input.charCodeAt(++state.position);\n      }\n    } else {\n      break;\n    }\n  }\n\n  if (checkIndent !== -1 && lineBreaks !== 0 && state.lineIndent < checkIndent) {\n    throwWarning(state, 'deficient indentation');\n  }\n\n  return lineBreaks;\n}\n\nfunction testDocumentSeparator(state) {\n  var _position = state.position,\n      ch;\n\n  ch = state.input.charCodeAt(_position);\n\n  // Condition state.position === state.lineStart is tested\n  // in parent on each call, for efficiency. No needs to test here again.\n  if ((ch === 0x2D/* - */ || ch === 0x2E/* . */) &&\n      ch === state.input.charCodeAt(_position + 1) &&\n      ch === state.input.charCodeAt(_position + 2)) {\n\n    _position += 3;\n\n    ch = state.input.charCodeAt(_position);\n\n    if (ch === 0 || is_WS_OR_EOL(ch)) {\n      return true;\n    }\n  }\n\n  return false;\n}\n\nfunction writeFoldedLines(state, count) {\n  if (count === 1) {\n    state.result += ' ';\n  } else if (count > 1) {\n    state.result += common.repeat('\\n', count - 1);\n  }\n}\n\n\nfunction readPlainScalar(state, nodeIndent, withinFlowCollection) {\n  var preceding,\n      following,\n      captureStart,\n      captureEnd,\n      hasPendingContent,\n      _line,\n      _lineStart,\n      _lineIndent,\n      _kind = state.kind,\n      _result = state.result,\n      ch;\n\n  ch = state.input.charCodeAt(state.position);\n\n  if (is_WS_OR_EOL(ch)      ||\n      is_FLOW_INDICATOR(ch) ||\n      ch === 0x23/* # */    ||\n      ch === 0x26/* & */    ||\n      ch === 0x2A/* * */    ||\n      ch === 0x21/* ! */    ||\n      ch === 0x7C/* | */    ||\n      ch === 0x3E/* > */    ||\n      ch === 0x27/* ' */    ||\n      ch === 0x22/* \" */    ||\n      ch === 0x25/* % */    ||\n      ch === 0x40/* @ */    ||\n      ch === 0x60/* ` */) {\n    return false;\n  }\n\n  if (ch === 0x3F/* ? */ || ch === 0x2D/* - */) {\n    following = state.input.charCodeAt(state.position + 1);\n\n    if (is_WS_OR_EOL(following) ||\n        withinFlowCollection && is_FLOW_INDICATOR(following)) {\n      return false;\n    }\n  }\n\n  state.kind = 'scalar';\n  state.result = '';\n  captureStart = captureEnd = state.position;\n  hasPendingContent = false;\n\n  while (ch !== 0) {\n    if (ch === 0x3A/* : */) {\n      following = state.input.charCodeAt(state.position + 1);\n\n      if (is_WS_OR_EOL(following) ||\n          withinFlowCollection && is_FLOW_INDICATOR(following)) {\n        break;\n      }\n\n    } else if (ch === 0x23/* # */) {\n      preceding = state.input.charCodeAt(state.position - 1);\n\n      if (is_WS_OR_EOL(preceding)) {\n        break;\n      }\n\n    } else if ((state.position === state.lineStart && testDocumentSeparator(state)) ||\n               withinFlowCollection && is_FLOW_INDICATOR(ch)) {\n      break;\n\n    } else if (is_EOL(ch)) {\n      _line = state.line;\n      _lineStart = state.lineStart;\n      _lineIndent = state.lineIndent;\n      skipSeparationSpace(state, false, -1);\n\n      if (state.lineIndent >= nodeIndent) {\n        hasPendingContent = true;\n        ch = state.input.charCodeAt(state.position);\n        continue;\n      } else {\n        state.position = captureEnd;\n        state.line = _line;\n        state.lineStart = _lineStart;\n        state.lineIndent = _lineIndent;\n        break;\n      }\n    }\n\n    if (hasPendingContent) {\n      captureSegment(state, captureStart, captureEnd, false);\n      writeFoldedLines(state, state.line - _line);\n      captureStart = captureEnd = state.position;\n      hasPendingContent = false;\n    }\n\n    if (!is_WHITE_SPACE(ch)) {\n      captureEnd = state.position + 1;\n    }\n\n    ch = state.input.charCodeAt(++state.position);\n  }\n\n  captureSegment(state, captureStart, captureEnd, false);\n\n  if (state.result) {\n    return true;\n  }\n\n  state.kind = _kind;\n  state.result = _result;\n  return false;\n}\n\nfunction readSingleQuotedScalar(state, nodeIndent) {\n  var ch,\n      captureStart, captureEnd;\n\n  ch = state.input.charCodeAt(state.position);\n\n  if (ch !== 0x27/* ' */) {\n    return false;\n  }\n\n  state.kind = 'scalar';\n  state.result = '';\n  state.position++;\n  captureStart = captureEnd = state.position;\n\n  while ((ch = state.input.charCodeAt(state.position)) !== 0) {\n    if (ch === 0x27/* ' */) {\n      captureSegment(state, captureStart, state.position, true);\n      ch = state.input.charCodeAt(++state.position);\n\n      if (ch === 0x27/* ' */) {\n        captureStart = state.position;\n        state.position++;\n        captureEnd = state.position;\n      } else {\n        return true;\n      }\n\n    } else if (is_EOL(ch)) {\n      captureSegment(state, captureStart, captureEnd, true);\n      writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent));\n      captureStart = captureEnd = state.position;\n\n    } else if (state.position === state.lineStart && testDocumentSeparator(state)) {\n      throwError(state, 'unexpected end of the document within a single quoted scalar');\n\n    } else {\n      state.position++;\n      captureEnd = state.position;\n    }\n  }\n\n  throwError(state, 'unexpected end of the stream within a single quoted scalar');\n}\n\nfunction readDoubleQuotedScalar(state, nodeIndent) {\n  var captureStart,\n      captureEnd,\n      hexLength,\n      hexResult,\n      tmp,\n      ch;\n\n  ch = state.input.charCodeAt(state.position);\n\n  if (ch !== 0x22/* \" */) {\n    return false;\n  }\n\n  state.kind = 'scalar';\n  state.result = '';\n  state.position++;\n  captureStart = captureEnd = state.position;\n\n  while ((ch = state.input.charCodeAt(state.position)) !== 0) {\n    if (ch === 0x22/* \" */) {\n      captureSegment(state, captureStart, state.position, true);\n      state.position++;\n      return true;\n\n    } else if (ch === 0x5C/* \\ */) {\n      captureSegment(state, captureStart, state.position, true);\n      ch = state.input.charCodeAt(++state.position);\n\n      if (is_EOL(ch)) {\n        skipSeparationSpace(state, false, nodeIndent);\n\n        // TODO: rework to inline fn with no type cast?\n      } else if (ch < 256 && simpleEscapeCheck[ch]) {\n        state.result += simpleEscapeMap[ch];\n        state.position++;\n\n      } else if ((tmp = escapedHexLen(ch)) > 0) {\n        hexLength = tmp;\n        hexResult = 0;\n\n        for (; hexLength > 0; hexLength--) {\n          ch = state.input.charCodeAt(++state.position);\n\n          if ((tmp = fromHexCode(ch)) >= 0) {\n            hexResult = (hexResult << 4) + tmp;\n\n          } else {\n            throwError(state, 'expected hexadecimal character');\n          }\n        }\n\n        state.result += charFromCodepoint(hexResult);\n\n        state.position++;\n\n      } else {\n        throwError(state, 'unknown escape sequence');\n      }\n\n      captureStart = captureEnd = state.position;\n\n    } else if (is_EOL(ch)) {\n      captureSegment(state, captureStart, captureEnd, true);\n      writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent));\n      captureStart = captureEnd = state.position;\n\n    } else if (state.position === state.lineStart && testDocumentSeparator(state)) {\n      throwError(state, 'unexpected end of the document within a double quoted scalar');\n\n    } else {\n      state.position++;\n      captureEnd = state.position;\n    }\n  }\n\n  throwError(state, 'unexpected end of the stream within a double quoted scalar');\n}\n\nfunction readFlowCollection(state, nodeIndent) {\n  var readNext = true,\n      _line,\n      _lineStart,\n      _pos,\n      _tag     = state.tag,\n      _result,\n      _anchor  = state.anchor,\n      following,\n      terminator,\n      isPair,\n      isExplicitPair,\n      isMapping,\n      overridableKeys = Object.create(null),\n      keyNode,\n      keyTag,\n      valueNode,\n      ch;\n\n  ch = state.input.charCodeAt(state.position);\n\n  if (ch === 0x5B/* [ */) {\n    terminator = 0x5D;/* ] */\n    isMapping = false;\n    _result = [];\n  } else if (ch === 0x7B/* { */) {\n    terminator = 0x7D;/* } */\n    isMapping = true;\n    _result = {};\n  } else {\n    return false;\n  }\n\n  if (state.anchor !== null) {\n    state.anchorMap[state.anchor] = _result;\n  }\n\n  ch = state.input.charCodeAt(++state.position);\n\n  while (ch !== 0) {\n    skipSeparationSpace(state, true, nodeIndent);\n\n    ch = state.input.charCodeAt(state.position);\n\n    if (ch === terminator) {\n      state.position++;\n      state.tag = _tag;\n      state.anchor = _anchor;\n      state.kind = isMapping ? 'mapping' : 'sequence';\n      state.result = _result;\n      return true;\n    } else if (!readNext) {\n      throwError(state, 'missed comma between flow collection entries');\n    } else if (ch === 0x2C/* , */) {\n      // \"flow collection entries can never be completely empty\", as per YAML 1.2, section 7.4\n      throwError(state, \"expected the node content, but found ','\");\n    }\n\n    keyTag = keyNode = valueNode = null;\n    isPair = isExplicitPair = false;\n\n    if (ch === 0x3F/* ? */) {\n      following = state.input.charCodeAt(state.position + 1);\n\n      if (is_WS_OR_EOL(following)) {\n        isPair = isExplicitPair = true;\n        state.position++;\n        skipSeparationSpace(state, true, nodeIndent);\n      }\n    }\n\n    _line = state.line; // Save the current line.\n    _lineStart = state.lineStart;\n    _pos = state.position;\n    composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true);\n    keyTag = state.tag;\n    keyNode = state.result;\n    skipSeparationSpace(state, true, nodeIndent);\n\n    ch = state.input.charCodeAt(state.position);\n\n    if ((isExplicitPair || state.line === _line) && ch === 0x3A/* : */) {\n      isPair = true;\n      ch = state.input.charCodeAt(++state.position);\n      skipSeparationSpace(state, true, nodeIndent);\n      composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true);\n      valueNode = state.result;\n    }\n\n    if (isMapping) {\n      storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos);\n    } else if (isPair) {\n      _result.push(storeMappingPair(state, null, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos));\n    } else {\n      _result.push(keyNode);\n    }\n\n    skipSeparationSpace(state, true, nodeIndent);\n\n    ch = state.input.charCodeAt(state.position);\n\n    if (ch === 0x2C/* , */) {\n      readNext = true;\n      ch = state.input.charCodeAt(++state.position);\n    } else {\n      readNext = false;\n    }\n  }\n\n  throwError(state, 'unexpected end of the stream within a flow collection');\n}\n\nfunction readBlockScalar(state, nodeIndent) {\n  var captureStart,\n      folding,\n      chomping       = CHOMPING_CLIP,\n      didReadContent = false,\n      detectedIndent = false,\n      textIndent     = nodeIndent,\n      emptyLines     = 0,\n      atMoreIndented = false,\n      tmp,\n      ch;\n\n  ch = state.input.charCodeAt(state.position);\n\n  if (ch === 0x7C/* | */) {\n    folding = false;\n  } else if (ch === 0x3E/* > */) {\n    folding = true;\n  } else {\n    return false;\n  }\n\n  state.kind = 'scalar';\n  state.result = '';\n\n  while (ch !== 0) {\n    ch = state.input.charCodeAt(++state.position);\n\n    if (ch === 0x2B/* + */ || ch === 0x2D/* - */) {\n      if (CHOMPING_CLIP === chomping) {\n        chomping = (ch === 0x2B/* + */) ? CHOMPING_KEEP : CHOMPING_STRIP;\n      } else {\n        throwError(state, 'repeat of a chomping mode identifier');\n      }\n\n    } else if ((tmp = fromDecimalCode(ch)) >= 0) {\n      if (tmp === 0) {\n        throwError(state, 'bad explicit indentation width of a block scalar; it cannot be less than one');\n      } else if (!detectedIndent) {\n        textIndent = nodeIndent + tmp - 1;\n        detectedIndent = true;\n      } else {\n        throwError(state, 'repeat of an indentation width identifier');\n      }\n\n    } else {\n      break;\n    }\n  }\n\n  if (is_WHITE_SPACE(ch)) {\n    do { ch = state.input.charCodeAt(++state.position); }\n    while (is_WHITE_SPACE(ch));\n\n    if (ch === 0x23/* # */) {\n      do { ch = state.input.charCodeAt(++state.position); }\n      while (!is_EOL(ch) && (ch !== 0));\n    }\n  }\n\n  while (ch !== 0) {\n    readLineBreak(state);\n    state.lineIndent = 0;\n\n    ch = state.input.charCodeAt(state.position);\n\n    while ((!detectedIndent || state.lineIndent < textIndent) &&\n           (ch === 0x20/* Space */)) {\n      state.lineIndent++;\n      ch = state.input.charCodeAt(++state.position);\n    }\n\n    if (!detectedIndent && state.lineIndent > textIndent) {\n      textIndent = state.lineIndent;\n    }\n\n    if (is_EOL(ch)) {\n      emptyLines++;\n      continue;\n    }\n\n    // End of the scalar.\n    if (state.lineIndent < textIndent) {\n\n      // Perform the chomping.\n      if (chomping === CHOMPING_KEEP) {\n        state.result += common.repeat('\\n', didReadContent ? 1 + emptyLines : emptyLines);\n      } else if (chomping === CHOMPING_CLIP) {\n        if (didReadContent) { // i.e. only if the scalar is not empty.\n          state.result += '\\n';\n        }\n      }\n\n      // Break this `while` cycle and go to the funciton's epilogue.\n      break;\n    }\n\n    // Folded style: use fancy rules to handle line breaks.\n    if (folding) {\n\n      // Lines starting with white space characters (more-indented lines) are not folded.\n      if (is_WHITE_SPACE(ch)) {\n        atMoreIndented = true;\n        // except for the first content line (cf. Example 8.1)\n        state.result += common.repeat('\\n', didReadContent ? 1 + emptyLines : emptyLines);\n\n      // End of more-indented block.\n      } else if (atMoreIndented) {\n        atMoreIndented = false;\n        state.result += common.repeat('\\n', emptyLines + 1);\n\n      // Just one line break - perceive as the same line.\n      } else if (emptyLines === 0) {\n        if (didReadContent) { // i.e. only if we have already read some scalar content.\n          state.result += ' ';\n        }\n\n      // Several line breaks - perceive as different lines.\n      } else {\n        state.result += common.repeat('\\n', emptyLines);\n      }\n\n    // Literal style: just add exact number of line breaks between content lines.\n    } else {\n      // Keep all line breaks except the header line break.\n      state.result += common.repeat('\\n', didReadContent ? 1 + emptyLines : emptyLines);\n    }\n\n    didReadContent = true;\n    detectedIndent = true;\n    emptyLines = 0;\n    captureStart = state.position;\n\n    while (!is_EOL(ch) && (ch !== 0)) {\n      ch = state.input.charCodeAt(++state.position);\n    }\n\n    captureSegment(state, captureStart, state.position, false);\n  }\n\n  return true;\n}\n\nfunction readBlockSequence(state, nodeIndent) {\n  var _line,\n      _tag      = state.tag,\n      _anchor   = state.anchor,\n      _result   = [],\n      following,\n      detected  = false,\n      ch;\n\n  // there is a leading tab before this token, so it can't be a block sequence/mapping;\n  // it can still be flow sequence/mapping or a scalar\n  if (state.firstTabInLine !== -1) return false;\n\n  if (state.anchor !== null) {\n    state.anchorMap[state.anchor] = _result;\n  }\n\n  ch = state.input.charCodeAt(state.position);\n\n  while (ch !== 0) {\n    if (state.firstTabInLine !== -1) {\n      state.position = state.firstTabInLine;\n      throwError(state, 'tab characters must not be used in indentation');\n    }\n\n    if (ch !== 0x2D/* - */) {\n      break;\n    }\n\n    following = state.input.charCodeAt(state.position + 1);\n\n    if (!is_WS_OR_EOL(following)) {\n      break;\n    }\n\n    detected = true;\n    state.position++;\n\n    if (skipSeparationSpace(state, true, -1)) {\n      if (state.lineIndent <= nodeIndent) {\n        _result.push(null);\n        ch = state.input.charCodeAt(state.position);\n        continue;\n      }\n    }\n\n    _line = state.line;\n    composeNode(state, nodeIndent, CONTEXT_BLOCK_IN, false, true);\n    _result.push(state.result);\n    skipSeparationSpace(state, true, -1);\n\n    ch = state.input.charCodeAt(state.position);\n\n    if ((state.line === _line || state.lineIndent > nodeIndent) && (ch !== 0)) {\n      throwError(state, 'bad indentation of a sequence entry');\n    } else if (state.lineIndent < nodeIndent) {\n      break;\n    }\n  }\n\n  if (detected) {\n    state.tag = _tag;\n    state.anchor = _anchor;\n    state.kind = 'sequence';\n    state.result = _result;\n    return true;\n  }\n  return false;\n}\n\nfunction readBlockMapping(state, nodeIndent, flowIndent) {\n  var following,\n      allowCompact,\n      _line,\n      _keyLine,\n      _keyLineStart,\n      _keyPos,\n      _tag          = state.tag,\n      _anchor       = state.anchor,\n      _result       = {},\n      overridableKeys = Object.create(null),\n      keyTag        = null,\n      keyNode       = null,\n      valueNode     = null,\n      atExplicitKey = false,\n      detected      = false,\n      ch;\n\n  // there is a leading tab before this token, so it can't be a block sequence/mapping;\n  // it can still be flow sequence/mapping or a scalar\n  if (state.firstTabInLine !== -1) return false;\n\n  if (state.anchor !== null) {\n    state.anchorMap[state.anchor] = _result;\n  }\n\n  ch = state.input.charCodeAt(state.position);\n\n  while (ch !== 0) {\n    if (!atExplicitKey && state.firstTabInLine !== -1) {\n      state.position = state.firstTabInLine;\n      throwError(state, 'tab characters must not be used in indentation');\n    }\n\n    following = state.input.charCodeAt(state.position + 1);\n    _line = state.line; // Save the current line.\n\n    //\n    // Explicit notation case. There are two separate blocks:\n    // first for the key (denoted by \"?\") and second for the value (denoted by \":\")\n    //\n    if ((ch === 0x3F/* ? */ || ch === 0x3A/* : */) && is_WS_OR_EOL(following)) {\n\n      if (ch === 0x3F/* ? */) {\n        if (atExplicitKey) {\n          storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos);\n          keyTag = keyNode = valueNode = null;\n        }\n\n        detected = true;\n        atExplicitKey = true;\n        allowCompact = true;\n\n      } else if (atExplicitKey) {\n        // i.e. 0x3A/* : */ === character after the explicit key.\n        atExplicitKey = false;\n        allowCompact = true;\n\n      } else {\n        throwError(state, 'incomplete explicit mapping pair; a key node is missed; or followed by a non-tabulated empty line');\n      }\n\n      state.position += 1;\n      ch = following;\n\n    //\n    // Implicit notation case. Flow-style node as the key first, then \":\", and the value.\n    //\n    } else {\n      _keyLine = state.line;\n      _keyLineStart = state.lineStart;\n      _keyPos = state.position;\n\n      if (!composeNode(state, flowIndent, CONTEXT_FLOW_OUT, false, true)) {\n        // Neither implicit nor explicit notation.\n        // Reading is done. Go to the epilogue.\n        break;\n      }\n\n      if (state.line === _line) {\n        ch = state.input.charCodeAt(state.position);\n\n        while (is_WHITE_SPACE(ch)) {\n          ch = state.input.charCodeAt(++state.position);\n        }\n\n        if (ch === 0x3A/* : */) {\n          ch = state.input.charCodeAt(++state.position);\n\n          if (!is_WS_OR_EOL(ch)) {\n            throwError(state, 'a whitespace character is expected after the key-value separator within a block mapping');\n          }\n\n          if (atExplicitKey) {\n            storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos);\n            keyTag = keyNode = valueNode = null;\n          }\n\n          detected = true;\n          atExplicitKey = false;\n          allowCompact = false;\n          keyTag = state.tag;\n          keyNode = state.result;\n\n        } else if (detected) {\n          throwError(state, 'can not read an implicit mapping pair; a colon is missed');\n\n        } else {\n          state.tag = _tag;\n          state.anchor = _anchor;\n          return true; // Keep the result of `composeNode`.\n        }\n\n      } else if (detected) {\n        throwError(state, 'can not read a block mapping entry; a multiline key may not be an implicit key');\n\n      } else {\n        state.tag = _tag;\n        state.anchor = _anchor;\n        return true; // Keep the result of `composeNode`.\n      }\n    }\n\n    //\n    // Common reading code for both explicit and implicit notations.\n    //\n    if (state.line === _line || state.lineIndent > nodeIndent) {\n      if (atExplicitKey) {\n        _keyLine = state.line;\n        _keyLineStart = state.lineStart;\n        _keyPos = state.position;\n      }\n\n      if (composeNode(state, nodeIndent, CONTEXT_BLOCK_OUT, true, allowCompact)) {\n        if (atExplicitKey) {\n          keyNode = state.result;\n        } else {\n          valueNode = state.result;\n        }\n      }\n\n      if (!atExplicitKey) {\n        storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _keyLine, _keyLineStart, _keyPos);\n        keyTag = keyNode = valueNode = null;\n      }\n\n      skipSeparationSpace(state, true, -1);\n      ch = state.input.charCodeAt(state.position);\n    }\n\n    if ((state.line === _line || state.lineIndent > nodeIndent) && (ch !== 0)) {\n      throwError(state, 'bad indentation of a mapping entry');\n    } else if (state.lineIndent < nodeIndent) {\n      break;\n    }\n  }\n\n  //\n  // Epilogue.\n  //\n\n  // Special case: last mapping's node contains only the key in explicit notation.\n  if (atExplicitKey) {\n    storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos);\n  }\n\n  // Expose the resulting mapping.\n  if (detected) {\n    state.tag = _tag;\n    state.anchor = _anchor;\n    state.kind = 'mapping';\n    state.result = _result;\n  }\n\n  return detected;\n}\n\nfunction readTagProperty(state) {\n  var _position,\n      isVerbatim = false,\n      isNamed    = false,\n      tagHandle,\n      tagName,\n      ch;\n\n  ch = state.input.charCodeAt(state.position);\n\n  if (ch !== 0x21/* ! */) return false;\n\n  if (state.tag !== null) {\n    throwError(state, 'duplication of a tag property');\n  }\n\n  ch = state.input.charCodeAt(++state.position);\n\n  if (ch === 0x3C/* < */) {\n    isVerbatim = true;\n    ch = state.input.charCodeAt(++state.position);\n\n  } else if (ch === 0x21/* ! */) {\n    isNamed = true;\n    tagHandle = '!!';\n    ch = state.input.charCodeAt(++state.position);\n\n  } else {\n    tagHandle = '!';\n  }\n\n  _position = state.position;\n\n  if (isVerbatim) {\n    do { ch = state.input.charCodeAt(++state.position); }\n    while (ch !== 0 && ch !== 0x3E/* > */);\n\n    if (state.position < state.length) {\n      tagName = state.input.slice(_position, state.position);\n      ch = state.input.charCodeAt(++state.position);\n    } else {\n      throwError(state, 'unexpected end of the stream within a verbatim tag');\n    }\n  } else {\n    while (ch !== 0 && !is_WS_OR_EOL(ch)) {\n\n      if (ch === 0x21/* ! */) {\n        if (!isNamed) {\n          tagHandle = state.input.slice(_position - 1, state.position + 1);\n\n          if (!PATTERN_TAG_HANDLE.test(tagHandle)) {\n            throwError(state, 'named tag handle cannot contain such characters');\n          }\n\n          isNamed = true;\n          _position = state.position + 1;\n        } else {\n          throwError(state, 'tag suffix cannot contain exclamation marks');\n        }\n      }\n\n      ch = state.input.charCodeAt(++state.position);\n    }\n\n    tagName = state.input.slice(_position, state.position);\n\n    if (PATTERN_FLOW_INDICATORS.test(tagName)) {\n      throwError(state, 'tag suffix cannot contain flow indicator characters');\n    }\n  }\n\n  if (tagName && !PATTERN_TAG_URI.test(tagName)) {\n    throwError(state, 'tag name cannot contain such characters: ' + tagName);\n  }\n\n  try {\n    tagName = decodeURIComponent(tagName);\n  } catch (err) {\n    throwError(state, 'tag name is malformed: ' + tagName);\n  }\n\n  if (isVerbatim) {\n    state.tag = tagName;\n\n  } else if (_hasOwnProperty$1.call(state.tagMap, tagHandle)) {\n    state.tag = state.tagMap[tagHandle] + tagName;\n\n  } else if (tagHandle === '!') {\n    state.tag = '!' + tagName;\n\n  } else if (tagHandle === '!!') {\n    state.tag = 'tag:yaml.org,2002:' + tagName;\n\n  } else {\n    throwError(state, 'undeclared tag handle \"' + tagHandle + '\"');\n  }\n\n  return true;\n}\n\nfunction readAnchorProperty(state) {\n  var _position,\n      ch;\n\n  ch = state.input.charCodeAt(state.position);\n\n  if (ch !== 0x26/* & */) return false;\n\n  if (state.anchor !== null) {\n    throwError(state, 'duplication of an anchor property');\n  }\n\n  ch = state.input.charCodeAt(++state.position);\n  _position = state.position;\n\n  while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) {\n    ch = state.input.charCodeAt(++state.position);\n  }\n\n  if (state.position === _position) {\n    throwError(state, 'name of an anchor node must contain at least one character');\n  }\n\n  state.anchor = state.input.slice(_position, state.position);\n  return true;\n}\n\nfunction readAlias(state) {\n  var _position, alias,\n      ch;\n\n  ch = state.input.charCodeAt(state.position);\n\n  if (ch !== 0x2A/* * */) return false;\n\n  ch = state.input.charCodeAt(++state.position);\n  _position = state.position;\n\n  while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) {\n    ch = state.input.charCodeAt(++state.position);\n  }\n\n  if (state.position === _position) {\n    throwError(state, 'name of an alias node must contain at least one character');\n  }\n\n  alias = state.input.slice(_position, state.position);\n\n  if (!_hasOwnProperty$1.call(state.anchorMap, alias)) {\n    throwError(state, 'unidentified alias \"' + alias + '\"');\n  }\n\n  state.result = state.anchorMap[alias];\n  skipSeparationSpace(state, true, -1);\n  return true;\n}\n\nfunction composeNode(state, parentIndent, nodeContext, allowToSeek, allowCompact) {\n  var allowBlockStyles,\n      allowBlockScalars,\n      allowBlockCollections,\n      indentStatus = 1, // 1: this>parent, 0: this=parent, -1: this parentIndent) {\n        indentStatus = 1;\n      } else if (state.lineIndent === parentIndent) {\n        indentStatus = 0;\n      } else if (state.lineIndent < parentIndent) {\n        indentStatus = -1;\n      }\n    }\n  }\n\n  if (indentStatus === 1) {\n    while (readTagProperty(state) || readAnchorProperty(state)) {\n      if (skipSeparationSpace(state, true, -1)) {\n        atNewLine = true;\n        allowBlockCollections = allowBlockStyles;\n\n        if (state.lineIndent > parentIndent) {\n          indentStatus = 1;\n        } else if (state.lineIndent === parentIndent) {\n          indentStatus = 0;\n        } else if (state.lineIndent < parentIndent) {\n          indentStatus = -1;\n        }\n      } else {\n        allowBlockCollections = false;\n      }\n    }\n  }\n\n  if (allowBlockCollections) {\n    allowBlockCollections = atNewLine || allowCompact;\n  }\n\n  if (indentStatus === 1 || CONTEXT_BLOCK_OUT === nodeContext) {\n    if (CONTEXT_FLOW_IN === nodeContext || CONTEXT_FLOW_OUT === nodeContext) {\n      flowIndent = parentIndent;\n    } else {\n      flowIndent = parentIndent + 1;\n    }\n\n    blockIndent = state.position - state.lineStart;\n\n    if (indentStatus === 1) {\n      if (allowBlockCollections &&\n          (readBlockSequence(state, blockIndent) ||\n           readBlockMapping(state, blockIndent, flowIndent)) ||\n          readFlowCollection(state, flowIndent)) {\n        hasContent = true;\n      } else {\n        if ((allowBlockScalars && readBlockScalar(state, flowIndent)) ||\n            readSingleQuotedScalar(state, flowIndent) ||\n            readDoubleQuotedScalar(state, flowIndent)) {\n          hasContent = true;\n\n        } else if (readAlias(state)) {\n          hasContent = true;\n\n          if (state.tag !== null || state.anchor !== null) {\n            throwError(state, 'alias node should not have any properties');\n          }\n\n        } else if (readPlainScalar(state, flowIndent, CONTEXT_FLOW_IN === nodeContext)) {\n          hasContent = true;\n\n          if (state.tag === null) {\n            state.tag = '?';\n          }\n        }\n\n        if (state.anchor !== null) {\n          state.anchorMap[state.anchor] = state.result;\n        }\n      }\n    } else if (indentStatus === 0) {\n      // Special case: block sequences are allowed to have same indentation level as the parent.\n      // http://www.yaml.org/spec/1.2/spec.html#id2799784\n      hasContent = allowBlockCollections && readBlockSequence(state, blockIndent);\n    }\n  }\n\n  if (state.tag === null) {\n    if (state.anchor !== null) {\n      state.anchorMap[state.anchor] = state.result;\n    }\n\n  } else if (state.tag === '?') {\n    // Implicit resolving is not allowed for non-scalar types, and '?'\n    // non-specific tag is only automatically assigned to plain scalars.\n    //\n    // We only need to check kind conformity in case user explicitly assigns '?'\n    // tag, for example like this: \"! [0]\"\n    //\n    if (state.result !== null && state.kind !== 'scalar') {\n      throwError(state, 'unacceptable node kind for ! tag; it should be \"scalar\", not \"' + state.kind + '\"');\n    }\n\n    for (typeIndex = 0, typeQuantity = state.implicitTypes.length; typeIndex < typeQuantity; typeIndex += 1) {\n      type = state.implicitTypes[typeIndex];\n\n      if (type.resolve(state.result)) { // `state.result` updated in resolver if matched\n        state.result = type.construct(state.result);\n        state.tag = type.tag;\n        if (state.anchor !== null) {\n          state.anchorMap[state.anchor] = state.result;\n        }\n        break;\n      }\n    }\n  } else if (state.tag !== '!') {\n    if (_hasOwnProperty$1.call(state.typeMap[state.kind || 'fallback'], state.tag)) {\n      type = state.typeMap[state.kind || 'fallback'][state.tag];\n    } else {\n      // looking for multi type\n      type = null;\n      typeList = state.typeMap.multi[state.kind || 'fallback'];\n\n      for (typeIndex = 0, typeQuantity = typeList.length; typeIndex < typeQuantity; typeIndex += 1) {\n        if (state.tag.slice(0, typeList[typeIndex].tag.length) === typeList[typeIndex].tag) {\n          type = typeList[typeIndex];\n          break;\n        }\n      }\n    }\n\n    if (!type) {\n      throwError(state, 'unknown tag !<' + state.tag + '>');\n    }\n\n    if (state.result !== null && type.kind !== state.kind) {\n      throwError(state, 'unacceptable node kind for !<' + state.tag + '> tag; it should be \"' + type.kind + '\", not \"' + state.kind + '\"');\n    }\n\n    if (!type.resolve(state.result, state.tag)) { // `state.result` updated in resolver if matched\n      throwError(state, 'cannot resolve a node with !<' + state.tag + '> explicit tag');\n    } else {\n      state.result = type.construct(state.result, state.tag);\n      if (state.anchor !== null) {\n        state.anchorMap[state.anchor] = state.result;\n      }\n    }\n  }\n\n  if (state.listener !== null) {\n    state.listener('close', state);\n  }\n  return state.tag !== null ||  state.anchor !== null || hasContent;\n}\n\nfunction readDocument(state) {\n  var documentStart = state.position,\n      _position,\n      directiveName,\n      directiveArgs,\n      hasDirectives = false,\n      ch;\n\n  state.version = null;\n  state.checkLineBreaks = state.legacy;\n  state.tagMap = Object.create(null);\n  state.anchorMap = Object.create(null);\n\n  while ((ch = state.input.charCodeAt(state.position)) !== 0) {\n    skipSeparationSpace(state, true, -1);\n\n    ch = state.input.charCodeAt(state.position);\n\n    if (state.lineIndent > 0 || ch !== 0x25/* % */) {\n      break;\n    }\n\n    hasDirectives = true;\n    ch = state.input.charCodeAt(++state.position);\n    _position = state.position;\n\n    while (ch !== 0 && !is_WS_OR_EOL(ch)) {\n      ch = state.input.charCodeAt(++state.position);\n    }\n\n    directiveName = state.input.slice(_position, state.position);\n    directiveArgs = [];\n\n    if (directiveName.length < 1) {\n      throwError(state, 'directive name must not be less than one character in length');\n    }\n\n    while (ch !== 0) {\n      while (is_WHITE_SPACE(ch)) {\n        ch = state.input.charCodeAt(++state.position);\n      }\n\n      if (ch === 0x23/* # */) {\n        do { ch = state.input.charCodeAt(++state.position); }\n        while (ch !== 0 && !is_EOL(ch));\n        break;\n      }\n\n      if (is_EOL(ch)) break;\n\n      _position = state.position;\n\n      while (ch !== 0 && !is_WS_OR_EOL(ch)) {\n        ch = state.input.charCodeAt(++state.position);\n      }\n\n      directiveArgs.push(state.input.slice(_position, state.position));\n    }\n\n    if (ch !== 0) readLineBreak(state);\n\n    if (_hasOwnProperty$1.call(directiveHandlers, directiveName)) {\n      directiveHandlers[directiveName](state, directiveName, directiveArgs);\n    } else {\n      throwWarning(state, 'unknown document directive \"' + directiveName + '\"');\n    }\n  }\n\n  skipSeparationSpace(state, true, -1);\n\n  if (state.lineIndent === 0 &&\n      state.input.charCodeAt(state.position)     === 0x2D/* - */ &&\n      state.input.charCodeAt(state.position + 1) === 0x2D/* - */ &&\n      state.input.charCodeAt(state.position + 2) === 0x2D/* - */) {\n    state.position += 3;\n    skipSeparationSpace(state, true, -1);\n\n  } else if (hasDirectives) {\n    throwError(state, 'directives end mark is expected');\n  }\n\n  composeNode(state, state.lineIndent - 1, CONTEXT_BLOCK_OUT, false, true);\n  skipSeparationSpace(state, true, -1);\n\n  if (state.checkLineBreaks &&\n      PATTERN_NON_ASCII_LINE_BREAKS.test(state.input.slice(documentStart, state.position))) {\n    throwWarning(state, 'non-ASCII line breaks are interpreted as content');\n  }\n\n  state.documents.push(state.result);\n\n  if (state.position === state.lineStart && testDocumentSeparator(state)) {\n\n    if (state.input.charCodeAt(state.position) === 0x2E/* . */) {\n      state.position += 3;\n      skipSeparationSpace(state, true, -1);\n    }\n    return;\n  }\n\n  if (state.position < (state.length - 1)) {\n    throwError(state, 'end of the stream or a document separator is expected');\n  } else {\n    return;\n  }\n}\n\n\nfunction loadDocuments(input, options) {\n  input = String(input);\n  options = options || {};\n\n  if (input.length !== 0) {\n\n    // Add tailing `\\n` if not exists\n    if (input.charCodeAt(input.length - 1) !== 0x0A/* LF */ &&\n        input.charCodeAt(input.length - 1) !== 0x0D/* CR */) {\n      input += '\\n';\n    }\n\n    // Strip BOM\n    if (input.charCodeAt(0) === 0xFEFF) {\n      input = input.slice(1);\n    }\n  }\n\n  var state = new State$1(input, options);\n\n  var nullpos = input.indexOf('\\0');\n\n  if (nullpos !== -1) {\n    state.position = nullpos;\n    throwError(state, 'null byte is not allowed in input');\n  }\n\n  // Use 0 as string terminator. That significantly simplifies bounds check.\n  state.input += '\\0';\n\n  while (state.input.charCodeAt(state.position) === 0x20/* Space */) {\n    state.lineIndent += 1;\n    state.position += 1;\n  }\n\n  while (state.position < (state.length - 1)) {\n    readDocument(state);\n  }\n\n  return state.documents;\n}\n\n\nfunction loadAll$1(input, iterator, options) {\n  if (iterator !== null && typeof iterator === 'object' && typeof options === 'undefined') {\n    options = iterator;\n    iterator = null;\n  }\n\n  var documents = loadDocuments(input, options);\n\n  if (typeof iterator !== 'function') {\n    return documents;\n  }\n\n  for (var index = 0, length = documents.length; index < length; index += 1) {\n    iterator(documents[index]);\n  }\n}\n\n\nfunction load$1(input, options) {\n  var documents = loadDocuments(input, options);\n\n  if (documents.length === 0) {\n    /*eslint-disable no-undefined*/\n    return undefined;\n  } else if (documents.length === 1) {\n    return documents[0];\n  }\n  throw new exception('expected a single document in the stream, but found more');\n}\n\n\nvar loadAll_1 = loadAll$1;\nvar load_1    = load$1;\n\nvar loader = {\n\tloadAll: loadAll_1,\n\tload: load_1\n};\n\n/*eslint-disable no-use-before-define*/\n\n\n\n\n\nvar _toString       = Object.prototype.toString;\nvar _hasOwnProperty = Object.prototype.hasOwnProperty;\n\nvar CHAR_BOM                  = 0xFEFF;\nvar CHAR_TAB                  = 0x09; /* Tab */\nvar CHAR_LINE_FEED            = 0x0A; /* LF */\nvar CHAR_CARRIAGE_RETURN      = 0x0D; /* CR */\nvar CHAR_SPACE                = 0x20; /* Space */\nvar CHAR_EXCLAMATION          = 0x21; /* ! */\nvar CHAR_DOUBLE_QUOTE         = 0x22; /* \" */\nvar CHAR_SHARP                = 0x23; /* # */\nvar CHAR_PERCENT              = 0x25; /* % */\nvar CHAR_AMPERSAND            = 0x26; /* & */\nvar CHAR_SINGLE_QUOTE         = 0x27; /* ' */\nvar CHAR_ASTERISK             = 0x2A; /* * */\nvar CHAR_COMMA                = 0x2C; /* , */\nvar CHAR_MINUS                = 0x2D; /* - */\nvar CHAR_COLON                = 0x3A; /* : */\nvar CHAR_EQUALS               = 0x3D; /* = */\nvar CHAR_GREATER_THAN         = 0x3E; /* > */\nvar CHAR_QUESTION             = 0x3F; /* ? */\nvar CHAR_COMMERCIAL_AT        = 0x40; /* @ */\nvar CHAR_LEFT_SQUARE_BRACKET  = 0x5B; /* [ */\nvar CHAR_RIGHT_SQUARE_BRACKET = 0x5D; /* ] */\nvar CHAR_GRAVE_ACCENT         = 0x60; /* ` */\nvar CHAR_LEFT_CURLY_BRACKET   = 0x7B; /* { */\nvar CHAR_VERTICAL_LINE        = 0x7C; /* | */\nvar CHAR_RIGHT_CURLY_BRACKET  = 0x7D; /* } */\n\nvar ESCAPE_SEQUENCES = {};\n\nESCAPE_SEQUENCES[0x00]   = '\\\\0';\nESCAPE_SEQUENCES[0x07]   = '\\\\a';\nESCAPE_SEQUENCES[0x08]   = '\\\\b';\nESCAPE_SEQUENCES[0x09]   = '\\\\t';\nESCAPE_SEQUENCES[0x0A]   = '\\\\n';\nESCAPE_SEQUENCES[0x0B]   = '\\\\v';\nESCAPE_SEQUENCES[0x0C]   = '\\\\f';\nESCAPE_SEQUENCES[0x0D]   = '\\\\r';\nESCAPE_SEQUENCES[0x1B]   = '\\\\e';\nESCAPE_SEQUENCES[0x22]   = '\\\\\"';\nESCAPE_SEQUENCES[0x5C]   = '\\\\\\\\';\nESCAPE_SEQUENCES[0x85]   = '\\\\N';\nESCAPE_SEQUENCES[0xA0]   = '\\\\_';\nESCAPE_SEQUENCES[0x2028] = '\\\\L';\nESCAPE_SEQUENCES[0x2029] = '\\\\P';\n\nvar DEPRECATED_BOOLEANS_SYNTAX = [\n  'y', 'Y', 'yes', 'Yes', 'YES', 'on', 'On', 'ON',\n  'n', 'N', 'no', 'No', 'NO', 'off', 'Off', 'OFF'\n];\n\nvar DEPRECATED_BASE60_SYNTAX = /^[-+]?[0-9_]+(?::[0-9_]+)+(?:\\.[0-9_]*)?$/;\n\nfunction compileStyleMap(schema, map) {\n  var result, keys, index, length, tag, style, type;\n\n  if (map === null) return {};\n\n  result = {};\n  keys = Object.keys(map);\n\n  for (index = 0, length = keys.length; index < length; index += 1) {\n    tag = keys[index];\n    style = String(map[tag]);\n\n    if (tag.slice(0, 2) === '!!') {\n      tag = 'tag:yaml.org,2002:' + tag.slice(2);\n    }\n    type = schema.compiledTypeMap['fallback'][tag];\n\n    if (type && _hasOwnProperty.call(type.styleAliases, style)) {\n      style = type.styleAliases[style];\n    }\n\n    result[tag] = style;\n  }\n\n  return result;\n}\n\nfunction encodeHex(character) {\n  var string, handle, length;\n\n  string = character.toString(16).toUpperCase();\n\n  if (character <= 0xFF) {\n    handle = 'x';\n    length = 2;\n  } else if (character <= 0xFFFF) {\n    handle = 'u';\n    length = 4;\n  } else if (character <= 0xFFFFFFFF) {\n    handle = 'U';\n    length = 8;\n  } else {\n    throw new exception('code point within a string may not be greater than 0xFFFFFFFF');\n  }\n\n  return '\\\\' + handle + common.repeat('0', length - string.length) + string;\n}\n\n\nvar QUOTING_TYPE_SINGLE = 1,\n    QUOTING_TYPE_DOUBLE = 2;\n\nfunction State(options) {\n  this.schema        = options['schema'] || _default;\n  this.indent        = Math.max(1, (options['indent'] || 2));\n  this.noArrayIndent = options['noArrayIndent'] || false;\n  this.skipInvalid   = options['skipInvalid'] || false;\n  this.flowLevel     = (common.isNothing(options['flowLevel']) ? -1 : options['flowLevel']);\n  this.styleMap      = compileStyleMap(this.schema, options['styles'] || null);\n  this.sortKeys      = options['sortKeys'] || false;\n  this.lineWidth     = options['lineWidth'] || 80;\n  this.noRefs        = options['noRefs'] || false;\n  this.noCompatMode  = options['noCompatMode'] || false;\n  this.condenseFlow  = options['condenseFlow'] || false;\n  this.quotingType   = options['quotingType'] === '\"' ? QUOTING_TYPE_DOUBLE : QUOTING_TYPE_SINGLE;\n  this.forceQuotes   = options['forceQuotes'] || false;\n  this.replacer      = typeof options['replacer'] === 'function' ? options['replacer'] : null;\n\n  this.implicitTypes = this.schema.compiledImplicit;\n  this.explicitTypes = this.schema.compiledExplicit;\n\n  this.tag = null;\n  this.result = '';\n\n  this.duplicates = [];\n  this.usedDuplicates = null;\n}\n\n// Indents every line in a string. Empty lines (\\n only) are not indented.\nfunction indentString(string, spaces) {\n  var ind = common.repeat(' ', spaces),\n      position = 0,\n      next = -1,\n      result = '',\n      line,\n      length = string.length;\n\n  while (position < length) {\n    next = string.indexOf('\\n', position);\n    if (next === -1) {\n      line = string.slice(position);\n      position = length;\n    } else {\n      line = string.slice(position, next + 1);\n      position = next + 1;\n    }\n\n    if (line.length && line !== '\\n') result += ind;\n\n    result += line;\n  }\n\n  return result;\n}\n\nfunction generateNextLine(state, level) {\n  return '\\n' + common.repeat(' ', state.indent * level);\n}\n\nfunction testImplicitResolving(state, str) {\n  var index, length, type;\n\n  for (index = 0, length = state.implicitTypes.length; index < length; index += 1) {\n    type = state.implicitTypes[index];\n\n    if (type.resolve(str)) {\n      return true;\n    }\n  }\n\n  return false;\n}\n\n// [33] s-white ::= s-space | s-tab\nfunction isWhitespace(c) {\n  return c === CHAR_SPACE || c === CHAR_TAB;\n}\n\n// Returns true if the character can be printed without escaping.\n// From YAML 1.2: \"any allowed characters known to be non-printable\n// should also be escaped. [However,] This isn\u2019t mandatory\"\n// Derived from nb-char - \\t - #x85 - #xA0 - #x2028 - #x2029.\nfunction isPrintable(c) {\n  return  (0x00020 <= c && c <= 0x00007E)\n      || ((0x000A1 <= c && c <= 0x00D7FF) && c !== 0x2028 && c !== 0x2029)\n      || ((0x0E000 <= c && c <= 0x00FFFD) && c !== CHAR_BOM)\n      ||  (0x10000 <= c && c <= 0x10FFFF);\n}\n\n// [34] ns-char ::= nb-char - s-white\n// [27] nb-char ::= c-printable - b-char - c-byte-order-mark\n// [26] b-char  ::= b-line-feed | b-carriage-return\n// Including s-white (for some reason, examples doesn't match specs in this aspect)\n// ns-char ::= c-printable - b-line-feed - b-carriage-return - c-byte-order-mark\nfunction isNsCharOrWhitespace(c) {\n  return isPrintable(c)\n    && c !== CHAR_BOM\n    // - b-char\n    && c !== CHAR_CARRIAGE_RETURN\n    && c !== CHAR_LINE_FEED;\n}\n\n// [127]  ns-plain-safe(c) ::= c = flow-out  \u21D2 ns-plain-safe-out\n//                             c = flow-in   \u21D2 ns-plain-safe-in\n//                             c = block-key \u21D2 ns-plain-safe-out\n//                             c = flow-key  \u21D2 ns-plain-safe-in\n// [128] ns-plain-safe-out ::= ns-char\n// [129]  ns-plain-safe-in ::= ns-char - c-flow-indicator\n// [130]  ns-plain-char(c) ::=  ( ns-plain-safe(c) - \u201C:\u201D - \u201C#\u201D )\n//                            | ( /* An ns-char preceding */ \u201C#\u201D )\n//                            | ( \u201C:\u201D /* Followed by an ns-plain-safe(c) */ )\nfunction isPlainSafe(c, prev, inblock) {\n  var cIsNsCharOrWhitespace = isNsCharOrWhitespace(c);\n  var cIsNsChar = cIsNsCharOrWhitespace && !isWhitespace(c);\n  return (\n    // ns-plain-safe\n    inblock ? // c = flow-in\n      cIsNsCharOrWhitespace\n      : cIsNsCharOrWhitespace\n        // - c-flow-indicator\n        && c !== CHAR_COMMA\n        && c !== CHAR_LEFT_SQUARE_BRACKET\n        && c !== CHAR_RIGHT_SQUARE_BRACKET\n        && c !== CHAR_LEFT_CURLY_BRACKET\n        && c !== CHAR_RIGHT_CURLY_BRACKET\n  )\n    // ns-plain-char\n    && c !== CHAR_SHARP // false on '#'\n    && !(prev === CHAR_COLON && !cIsNsChar) // false on ': '\n    || (isNsCharOrWhitespace(prev) && !isWhitespace(prev) && c === CHAR_SHARP) // change to true on '[^ ]#'\n    || (prev === CHAR_COLON && cIsNsChar); // change to true on ':[^ ]'\n}\n\n// Simplified test for values allowed as the first character in plain style.\nfunction isPlainSafeFirst(c) {\n  // Uses a subset of ns-char - c-indicator\n  // where ns-char = nb-char - s-white.\n  // No support of ( ( \u201C?\u201D | \u201C:\u201D | \u201C-\u201D ) /* Followed by an ns-plain-safe(c)) */ ) part\n  return isPrintable(c) && c !== CHAR_BOM\n    && !isWhitespace(c) // - s-white\n    // - (c-indicator ::=\n    // \u201C-\u201D | \u201C?\u201D | \u201C:\u201D | \u201C,\u201D | \u201C[\u201D | \u201C]\u201D | \u201C{\u201D | \u201C}\u201D\n    && c !== CHAR_MINUS\n    && c !== CHAR_QUESTION\n    && c !== CHAR_COLON\n    && c !== CHAR_COMMA\n    && c !== CHAR_LEFT_SQUARE_BRACKET\n    && c !== CHAR_RIGHT_SQUARE_BRACKET\n    && c !== CHAR_LEFT_CURLY_BRACKET\n    && c !== CHAR_RIGHT_CURLY_BRACKET\n    // | \u201C#\u201D | \u201C&\u201D | \u201C*\u201D | \u201C!\u201D | \u201C|\u201D | \u201C=\u201D | \u201C>\u201D | \u201C'\u201D | \u201C\"\u201D\n    && c !== CHAR_SHARP\n    && c !== CHAR_AMPERSAND\n    && c !== CHAR_ASTERISK\n    && c !== CHAR_EXCLAMATION\n    && c !== CHAR_VERTICAL_LINE\n    && c !== CHAR_EQUALS\n    && c !== CHAR_GREATER_THAN\n    && c !== CHAR_SINGLE_QUOTE\n    && c !== CHAR_DOUBLE_QUOTE\n    // | \u201C%\u201D | \u201C@\u201D | \u201C`\u201D)\n    && c !== CHAR_PERCENT\n    && c !== CHAR_COMMERCIAL_AT\n    && c !== CHAR_GRAVE_ACCENT;\n}\n\n// Simplified test for values allowed as the last character in plain style.\nfunction isPlainSafeLast(c) {\n  // just not whitespace or colon, it will be checked to be plain character later\n  return !isWhitespace(c) && c !== CHAR_COLON;\n}\n\n// Same as 'string'.codePointAt(pos), but works in older browsers.\nfunction codePointAt(string, pos) {\n  var first = string.charCodeAt(pos), second;\n  if (first >= 0xD800 && first <= 0xDBFF && pos + 1 < string.length) {\n    second = string.charCodeAt(pos + 1);\n    if (second >= 0xDC00 && second <= 0xDFFF) {\n      // https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae\n      return (first - 0xD800) * 0x400 + second - 0xDC00 + 0x10000;\n    }\n  }\n  return first;\n}\n\n// Determines whether block indentation indicator is required.\nfunction needIndentIndicator(string) {\n  var leadingSpaceRe = /^\\n* /;\n  return leadingSpaceRe.test(string);\n}\n\nvar STYLE_PLAIN   = 1,\n    STYLE_SINGLE  = 2,\n    STYLE_LITERAL = 3,\n    STYLE_FOLDED  = 4,\n    STYLE_DOUBLE  = 5;\n\n// Determines which scalar styles are possible and returns the preferred style.\n// lineWidth = -1 => no limit.\n// Pre-conditions: str.length > 0.\n// Post-conditions:\n//    STYLE_PLAIN or STYLE_SINGLE => no \\n are in the string.\n//    STYLE_LITERAL => no lines are suitable for folding (or lineWidth is -1).\n//    STYLE_FOLDED => a line > lineWidth and can be folded (and lineWidth != -1).\nfunction chooseScalarStyle(string, singleLineOnly, indentPerLevel, lineWidth,\n  testAmbiguousType, quotingType, forceQuotes, inblock) {\n\n  var i;\n  var char = 0;\n  var prevChar = null;\n  var hasLineBreak = false;\n  var hasFoldableLine = false; // only checked if shouldTrackWidth\n  var shouldTrackWidth = lineWidth !== -1;\n  var previousLineBreak = -1; // count the first line correctly\n  var plain = isPlainSafeFirst(codePointAt(string, 0))\n          && isPlainSafeLast(codePointAt(string, string.length - 1));\n\n  if (singleLineOnly || forceQuotes) {\n    // Case: no block styles.\n    // Check for disallowed characters to rule out plain and single.\n    for (i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) {\n      char = codePointAt(string, i);\n      if (!isPrintable(char)) {\n        return STYLE_DOUBLE;\n      }\n      plain = plain && isPlainSafe(char, prevChar, inblock);\n      prevChar = char;\n    }\n  } else {\n    // Case: block styles permitted.\n    for (i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) {\n      char = codePointAt(string, i);\n      if (char === CHAR_LINE_FEED) {\n        hasLineBreak = true;\n        // Check if any line can be folded.\n        if (shouldTrackWidth) {\n          hasFoldableLine = hasFoldableLine ||\n            // Foldable line = too long, and not more-indented.\n            (i - previousLineBreak - 1 > lineWidth &&\n             string[previousLineBreak + 1] !== ' ');\n          previousLineBreak = i;\n        }\n      } else if (!isPrintable(char)) {\n        return STYLE_DOUBLE;\n      }\n      plain = plain && isPlainSafe(char, prevChar, inblock);\n      prevChar = char;\n    }\n    // in case the end is missing a \\n\n    hasFoldableLine = hasFoldableLine || (shouldTrackWidth &&\n      (i - previousLineBreak - 1 > lineWidth &&\n       string[previousLineBreak + 1] !== ' '));\n  }\n  // Although every style can represent \\n without escaping, prefer block styles\n  // for multiline, since they're more readable and they don't add empty lines.\n  // Also prefer folding a super-long line.\n  if (!hasLineBreak && !hasFoldableLine) {\n    // Strings interpretable as another type have to be quoted;\n    // e.g. the string 'true' vs. the boolean true.\n    if (plain && !forceQuotes && !testAmbiguousType(string)) {\n      return STYLE_PLAIN;\n    }\n    return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE;\n  }\n  // Edge case: block indentation indicator can only have one digit.\n  if (indentPerLevel > 9 && needIndentIndicator(string)) {\n    return STYLE_DOUBLE;\n  }\n  // At this point we know block styles are valid.\n  // Prefer literal style unless we want to fold.\n  if (!forceQuotes) {\n    return hasFoldableLine ? STYLE_FOLDED : STYLE_LITERAL;\n  }\n  return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE;\n}\n\n// Note: line breaking/folding is implemented for only the folded style.\n// NB. We drop the last trailing newline (if any) of a returned block scalar\n//  since the dumper adds its own newline. This always works:\n//    \u2022 No ending newline => unaffected; already using strip \"-\" chomping.\n//    \u2022 Ending newline    => removed then restored.\n//  Importantly, this keeps the \"+\" chomp indicator from gaining an extra line.\nfunction writeScalar(state, string, level, iskey, inblock) {\n  state.dump = (function () {\n    if (string.length === 0) {\n      return state.quotingType === QUOTING_TYPE_DOUBLE ? '\"\"' : \"''\";\n    }\n    if (!state.noCompatMode) {\n      if (DEPRECATED_BOOLEANS_SYNTAX.indexOf(string) !== -1 || DEPRECATED_BASE60_SYNTAX.test(string)) {\n        return state.quotingType === QUOTING_TYPE_DOUBLE ? ('\"' + string + '\"') : (\"'\" + string + \"'\");\n      }\n    }\n\n    var indent = state.indent * Math.max(1, level); // no 0-indent scalars\n    // As indentation gets deeper, let the width decrease monotonically\n    // to the lower bound min(state.lineWidth, 40).\n    // Note that this implies\n    //  state.lineWidth \u2264 40 + state.indent: width is fixed at the lower bound.\n    //  state.lineWidth > 40 + state.indent: width decreases until the lower bound.\n    // This behaves better than a constant minimum width which disallows narrower options,\n    // or an indent threshold which causes the width to suddenly increase.\n    var lineWidth = state.lineWidth === -1\n      ? -1 : Math.max(Math.min(state.lineWidth, 40), state.lineWidth - indent);\n\n    // Without knowing if keys are implicit/explicit, assume implicit for safety.\n    var singleLineOnly = iskey\n      // No block styles in flow mode.\n      || (state.flowLevel > -1 && level >= state.flowLevel);\n    function testAmbiguity(string) {\n      return testImplicitResolving(state, string);\n    }\n\n    switch (chooseScalarStyle(string, singleLineOnly, state.indent, lineWidth,\n      testAmbiguity, state.quotingType, state.forceQuotes && !iskey, inblock)) {\n\n      case STYLE_PLAIN:\n        return string;\n      case STYLE_SINGLE:\n        return \"'\" + string.replace(/'/g, \"''\") + \"'\";\n      case STYLE_LITERAL:\n        return '|' + blockHeader(string, state.indent)\n          + dropEndingNewline(indentString(string, indent));\n      case STYLE_FOLDED:\n        return '>' + blockHeader(string, state.indent)\n          + dropEndingNewline(indentString(foldString(string, lineWidth), indent));\n      case STYLE_DOUBLE:\n        return '\"' + escapeString(string) + '\"';\n      default:\n        throw new exception('impossible error: invalid scalar style');\n    }\n  }());\n}\n\n// Pre-conditions: string is valid for a block scalar, 1 <= indentPerLevel <= 9.\nfunction blockHeader(string, indentPerLevel) {\n  var indentIndicator = needIndentIndicator(string) ? String(indentPerLevel) : '';\n\n  // note the special case: the string '\\n' counts as a \"trailing\" empty line.\n  var clip =          string[string.length - 1] === '\\n';\n  var keep = clip && (string[string.length - 2] === '\\n' || string === '\\n');\n  var chomp = keep ? '+' : (clip ? '' : '-');\n\n  return indentIndicator + chomp + '\\n';\n}\n\n// (See the note for writeScalar.)\nfunction dropEndingNewline(string) {\n  return string[string.length - 1] === '\\n' ? string.slice(0, -1) : string;\n}\n\n// Note: a long line without a suitable break point will exceed the width limit.\n// Pre-conditions: every char in str isPrintable, str.length > 0, width > 0.\nfunction foldString(string, width) {\n  // In folded style, $k$ consecutive newlines output as $k+1$ newlines\u2014\n  // unless they're before or after a more-indented line, or at the very\n  // beginning or end, in which case $k$ maps to $k$.\n  // Therefore, parse each chunk as newline(s) followed by a content line.\n  var lineRe = /(\\n+)([^\\n]*)/g;\n\n  // first line (possibly an empty line)\n  var result = (function () {\n    var nextLF = string.indexOf('\\n');\n    nextLF = nextLF !== -1 ? nextLF : string.length;\n    lineRe.lastIndex = nextLF;\n    return foldLine(string.slice(0, nextLF), width);\n  }());\n  // If we haven't reached the first content line yet, don't add an extra \\n.\n  var prevMoreIndented = string[0] === '\\n' || string[0] === ' ';\n  var moreIndented;\n\n  // rest of the lines\n  var match;\n  while ((match = lineRe.exec(string))) {\n    var prefix = match[1], line = match[2];\n    moreIndented = (line[0] === ' ');\n    result += prefix\n      + (!prevMoreIndented && !moreIndented && line !== ''\n        ? '\\n' : '')\n      + foldLine(line, width);\n    prevMoreIndented = moreIndented;\n  }\n\n  return result;\n}\n\n// Greedy line breaking.\n// Picks the longest line under the limit each time,\n// otherwise settles for the shortest line over the limit.\n// NB. More-indented lines *cannot* be folded, as that would add an extra \\n.\nfunction foldLine(line, width) {\n  if (line === '' || line[0] === ' ') return line;\n\n  // Since a more-indented line adds a \\n, breaks can't be followed by a space.\n  var breakRe = / [^ ]/g; // note: the match index will always be <= length-2.\n  var match;\n  // start is an inclusive index. end, curr, and next are exclusive.\n  var start = 0, end, curr = 0, next = 0;\n  var result = '';\n\n  // Invariants: 0 <= start <= length-1.\n  //   0 <= curr <= next <= max(0, length-2). curr - start <= width.\n  // Inside the loop:\n  //   A match implies length >= 2, so curr and next are <= length-2.\n  while ((match = breakRe.exec(line))) {\n    next = match.index;\n    // maintain invariant: curr - start <= width\n    if (next - start > width) {\n      end = (curr > start) ? curr : next; // derive end <= length-2\n      result += '\\n' + line.slice(start, end);\n      // skip the space that was output as \\n\n      start = end + 1;                    // derive start <= length-1\n    }\n    curr = next;\n  }\n\n  // By the invariants, start <= length-1, so there is something left over.\n  // It is either the whole string or a part starting from non-whitespace.\n  result += '\\n';\n  // Insert a break if the remainder is too long and there is a break available.\n  if (line.length - start > width && curr > start) {\n    result += line.slice(start, curr) + '\\n' + line.slice(curr + 1);\n  } else {\n    result += line.slice(start);\n  }\n\n  return result.slice(1); // drop extra \\n joiner\n}\n\n// Escapes a double-quoted string.\nfunction escapeString(string) {\n  var result = '';\n  var char = 0;\n  var escapeSeq;\n\n  for (var i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) {\n    char = codePointAt(string, i);\n    escapeSeq = ESCAPE_SEQUENCES[char];\n\n    if (!escapeSeq && isPrintable(char)) {\n      result += string[i];\n      if (char >= 0x10000) result += string[i + 1];\n    } else {\n      result += escapeSeq || encodeHex(char);\n    }\n  }\n\n  return result;\n}\n\nfunction writeFlowSequence(state, level, object) {\n  var _result = '',\n      _tag    = state.tag,\n      index,\n      length,\n      value;\n\n  for (index = 0, length = object.length; index < length; index += 1) {\n    value = object[index];\n\n    if (state.replacer) {\n      value = state.replacer.call(object, String(index), value);\n    }\n\n    // Write only valid elements, put null instead of invalid elements.\n    if (writeNode(state, level, value, false, false) ||\n        (typeof value === 'undefined' &&\n         writeNode(state, level, null, false, false))) {\n\n      if (_result !== '') _result += ',' + (!state.condenseFlow ? ' ' : '');\n      _result += state.dump;\n    }\n  }\n\n  state.tag = _tag;\n  state.dump = '[' + _result + ']';\n}\n\nfunction writeBlockSequence(state, level, object, compact) {\n  var _result = '',\n      _tag    = state.tag,\n      index,\n      length,\n      value;\n\n  for (index = 0, length = object.length; index < length; index += 1) {\n    value = object[index];\n\n    if (state.replacer) {\n      value = state.replacer.call(object, String(index), value);\n    }\n\n    // Write only valid elements, put null instead of invalid elements.\n    if (writeNode(state, level + 1, value, true, true, false, true) ||\n        (typeof value === 'undefined' &&\n         writeNode(state, level + 1, null, true, true, false, true))) {\n\n      if (!compact || _result !== '') {\n        _result += generateNextLine(state, level);\n      }\n\n      if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) {\n        _result += '-';\n      } else {\n        _result += '- ';\n      }\n\n      _result += state.dump;\n    }\n  }\n\n  state.tag = _tag;\n  state.dump = _result || '[]'; // Empty sequence if no valid values.\n}\n\nfunction writeFlowMapping(state, level, object) {\n  var _result       = '',\n      _tag          = state.tag,\n      objectKeyList = Object.keys(object),\n      index,\n      length,\n      objectKey,\n      objectValue,\n      pairBuffer;\n\n  for (index = 0, length = objectKeyList.length; index < length; index += 1) {\n\n    pairBuffer = '';\n    if (_result !== '') pairBuffer += ', ';\n\n    if (state.condenseFlow) pairBuffer += '\"';\n\n    objectKey = objectKeyList[index];\n    objectValue = object[objectKey];\n\n    if (state.replacer) {\n      objectValue = state.replacer.call(object, objectKey, objectValue);\n    }\n\n    if (!writeNode(state, level, objectKey, false, false)) {\n      continue; // Skip this pair because of invalid key;\n    }\n\n    if (state.dump.length > 1024) pairBuffer += '? ';\n\n    pairBuffer += state.dump + (state.condenseFlow ? '\"' : '') + ':' + (state.condenseFlow ? '' : ' ');\n\n    if (!writeNode(state, level, objectValue, false, false)) {\n      continue; // Skip this pair because of invalid value.\n    }\n\n    pairBuffer += state.dump;\n\n    // Both key and value are valid.\n    _result += pairBuffer;\n  }\n\n  state.tag = _tag;\n  state.dump = '{' + _result + '}';\n}\n\nfunction writeBlockMapping(state, level, object, compact) {\n  var _result       = '',\n      _tag          = state.tag,\n      objectKeyList = Object.keys(object),\n      index,\n      length,\n      objectKey,\n      objectValue,\n      explicitPair,\n      pairBuffer;\n\n  // Allow sorting keys so that the output file is deterministic\n  if (state.sortKeys === true) {\n    // Default sorting\n    objectKeyList.sort();\n  } else if (typeof state.sortKeys === 'function') {\n    // Custom sort function\n    objectKeyList.sort(state.sortKeys);\n  } else if (state.sortKeys) {\n    // Something is wrong\n    throw new exception('sortKeys must be a boolean or a function');\n  }\n\n  for (index = 0, length = objectKeyList.length; index < length; index += 1) {\n    pairBuffer = '';\n\n    if (!compact || _result !== '') {\n      pairBuffer += generateNextLine(state, level);\n    }\n\n    objectKey = objectKeyList[index];\n    objectValue = object[objectKey];\n\n    if (state.replacer) {\n      objectValue = state.replacer.call(object, objectKey, objectValue);\n    }\n\n    if (!writeNode(state, level + 1, objectKey, true, true, true)) {\n      continue; // Skip this pair because of invalid key.\n    }\n\n    explicitPair = (state.tag !== null && state.tag !== '?') ||\n                   (state.dump && state.dump.length > 1024);\n\n    if (explicitPair) {\n      if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) {\n        pairBuffer += '?';\n      } else {\n        pairBuffer += '? ';\n      }\n    }\n\n    pairBuffer += state.dump;\n\n    if (explicitPair) {\n      pairBuffer += generateNextLine(state, level);\n    }\n\n    if (!writeNode(state, level + 1, objectValue, true, explicitPair)) {\n      continue; // Skip this pair because of invalid value.\n    }\n\n    if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) {\n      pairBuffer += ':';\n    } else {\n      pairBuffer += ': ';\n    }\n\n    pairBuffer += state.dump;\n\n    // Both key and value are valid.\n    _result += pairBuffer;\n  }\n\n  state.tag = _tag;\n  state.dump = _result || '{}'; // Empty mapping if no valid pairs.\n}\n\nfunction detectType(state, object, explicit) {\n  var _result, typeList, index, length, type, style;\n\n  typeList = explicit ? state.explicitTypes : state.implicitTypes;\n\n  for (index = 0, length = typeList.length; index < length; index += 1) {\n    type = typeList[index];\n\n    if ((type.instanceOf  || type.predicate) &&\n        (!type.instanceOf || ((typeof object === 'object') && (object instanceof type.instanceOf))) &&\n        (!type.predicate  || type.predicate(object))) {\n\n      if (explicit) {\n        if (type.multi && type.representName) {\n          state.tag = type.representName(object);\n        } else {\n          state.tag = type.tag;\n        }\n      } else {\n        state.tag = '?';\n      }\n\n      if (type.represent) {\n        style = state.styleMap[type.tag] || type.defaultStyle;\n\n        if (_toString.call(type.represent) === '[object Function]') {\n          _result = type.represent(object, style);\n        } else if (_hasOwnProperty.call(type.represent, style)) {\n          _result = type.represent[style](object, style);\n        } else {\n          throw new exception('!<' + type.tag + '> tag resolver accepts not \"' + style + '\" style');\n        }\n\n        state.dump = _result;\n      }\n\n      return true;\n    }\n  }\n\n  return false;\n}\n\n// Serializes `object` and writes it to global `result`.\n// Returns true on success, or false on invalid object.\n//\nfunction writeNode(state, level, object, block, compact, iskey, isblockseq) {\n  state.tag = null;\n  state.dump = object;\n\n  if (!detectType(state, object, false)) {\n    detectType(state, object, true);\n  }\n\n  var type = _toString.call(state.dump);\n  var inblock = block;\n  var tagStr;\n\n  if (block) {\n    block = (state.flowLevel < 0 || state.flowLevel > level);\n  }\n\n  var objectOrArray = type === '[object Object]' || type === '[object Array]',\n      duplicateIndex,\n      duplicate;\n\n  if (objectOrArray) {\n    duplicateIndex = state.duplicates.indexOf(object);\n    duplicate = duplicateIndex !== -1;\n  }\n\n  if ((state.tag !== null && state.tag !== '?') || duplicate || (state.indent !== 2 && level > 0)) {\n    compact = false;\n  }\n\n  if (duplicate && state.usedDuplicates[duplicateIndex]) {\n    state.dump = '*ref_' + duplicateIndex;\n  } else {\n    if (objectOrArray && duplicate && !state.usedDuplicates[duplicateIndex]) {\n      state.usedDuplicates[duplicateIndex] = true;\n    }\n    if (type === '[object Object]') {\n      if (block && (Object.keys(state.dump).length !== 0)) {\n        writeBlockMapping(state, level, state.dump, compact);\n        if (duplicate) {\n          state.dump = '&ref_' + duplicateIndex + state.dump;\n        }\n      } else {\n        writeFlowMapping(state, level, state.dump);\n        if (duplicate) {\n          state.dump = '&ref_' + duplicateIndex + ' ' + state.dump;\n        }\n      }\n    } else if (type === '[object Array]') {\n      if (block && (state.dump.length !== 0)) {\n        if (state.noArrayIndent && !isblockseq && level > 0) {\n          writeBlockSequence(state, level - 1, state.dump, compact);\n        } else {\n          writeBlockSequence(state, level, state.dump, compact);\n        }\n        if (duplicate) {\n          state.dump = '&ref_' + duplicateIndex + state.dump;\n        }\n      } else {\n        writeFlowSequence(state, level, state.dump);\n        if (duplicate) {\n          state.dump = '&ref_' + duplicateIndex + ' ' + state.dump;\n        }\n      }\n    } else if (type === '[object String]') {\n      if (state.tag !== '?') {\n        writeScalar(state, state.dump, level, iskey, inblock);\n      }\n    } else if (type === '[object Undefined]') {\n      return false;\n    } else {\n      if (state.skipInvalid) return false;\n      throw new exception('unacceptable kind of an object to dump ' + type);\n    }\n\n    if (state.tag !== null && state.tag !== '?') {\n      // Need to encode all characters except those allowed by the spec:\n      //\n      // [35] ns-dec-digit    ::=  [#x30-#x39] /* 0-9 */\n      // [36] ns-hex-digit    ::=  ns-dec-digit\n      //                         | [#x41-#x46] /* A-F */ | [#x61-#x66] /* a-f */\n      // [37] ns-ascii-letter ::=  [#x41-#x5A] /* A-Z */ | [#x61-#x7A] /* a-z */\n      // [38] ns-word-char    ::=  ns-dec-digit | ns-ascii-letter | \u201C-\u201D\n      // [39] ns-uri-char     ::=  \u201C%\u201D ns-hex-digit ns-hex-digit | ns-word-char | \u201C#\u201D\n      //                         | \u201C;\u201D | \u201C/\u201D | \u201C?\u201D | \u201C:\u201D | \u201C@\u201D | \u201C&\u201D | \u201C=\u201D | \u201C+\u201D | \u201C$\u201D | \u201C,\u201D\n      //                         | \u201C_\u201D | \u201C.\u201D | \u201C!\u201D | \u201C~\u201D | \u201C*\u201D | \u201C'\u201D | \u201C(\u201D | \u201C)\u201D | \u201C[\u201D | \u201C]\u201D\n      //\n      // Also need to encode '!' because it has special meaning (end of tag prefix).\n      //\n      tagStr = encodeURI(\n        state.tag[0] === '!' ? state.tag.slice(1) : state.tag\n      ).replace(/!/g, '%21');\n\n      if (state.tag[0] === '!') {\n        tagStr = '!' + tagStr;\n      } else if (tagStr.slice(0, 18) === 'tag:yaml.org,2002:') {\n        tagStr = '!!' + tagStr.slice(18);\n      } else {\n        tagStr = '!<' + tagStr + '>';\n      }\n\n      state.dump = tagStr + ' ' + state.dump;\n    }\n  }\n\n  return true;\n}\n\nfunction getDuplicateReferences(object, state) {\n  var objects = [],\n      duplicatesIndexes = [],\n      index,\n      length;\n\n  inspectNode(object, objects, duplicatesIndexes);\n\n  for (index = 0, length = duplicatesIndexes.length; index < length; index += 1) {\n    state.duplicates.push(objects[duplicatesIndexes[index]]);\n  }\n  state.usedDuplicates = new Array(length);\n}\n\nfunction inspectNode(object, objects, duplicatesIndexes) {\n  var objectKeyList,\n      index,\n      length;\n\n  if (object !== null && typeof object === 'object') {\n    index = objects.indexOf(object);\n    if (index !== -1) {\n      if (duplicatesIndexes.indexOf(index) === -1) {\n        duplicatesIndexes.push(index);\n      }\n    } else {\n      objects.push(object);\n\n      if (Array.isArray(object)) {\n        for (index = 0, length = object.length; index < length; index += 1) {\n          inspectNode(object[index], objects, duplicatesIndexes);\n        }\n      } else {\n        objectKeyList = Object.keys(object);\n\n        for (index = 0, length = objectKeyList.length; index < length; index += 1) {\n          inspectNode(object[objectKeyList[index]], objects, duplicatesIndexes);\n        }\n      }\n    }\n  }\n}\n\nfunction dump$1(input, options) {\n  options = options || {};\n\n  var state = new State(options);\n\n  if (!state.noRefs) getDuplicateReferences(input, state);\n\n  var value = input;\n\n  if (state.replacer) {\n    value = state.replacer.call({ '': value }, '', value);\n  }\n\n  if (writeNode(state, 0, value, true, true)) return state.dump + '\\n';\n\n  return '';\n}\n\nvar dump_1 = dump$1;\n\nvar dumper = {\n\tdump: dump_1\n};\n\nfunction renamed(from, to) {\n  return function () {\n    throw new Error('Function yaml.' + from + ' is removed in js-yaml 4. ' +\n      'Use yaml.' + to + ' instead, which is now safe by default.');\n  };\n}\n\n\nvar Type                = type;\nvar Schema              = schema;\nvar FAILSAFE_SCHEMA     = failsafe;\nvar JSON_SCHEMA         = json;\nvar CORE_SCHEMA         = core;\nvar DEFAULT_SCHEMA      = _default;\nvar load                = loader.load;\nvar loadAll             = loader.loadAll;\nvar dump                = dumper.dump;\nvar YAMLException       = exception;\n\n// Re-export all types in case user wants to create custom schema\nvar types = {\n  binary:    binary,\n  float:     float,\n  map:       map,\n  null:      _null,\n  pairs:     pairs,\n  set:       set,\n  timestamp: timestamp,\n  bool:      bool,\n  int:       int,\n  merge:     merge,\n  omap:      omap,\n  seq:       seq,\n  str:       str\n};\n\n// Removed functions from JS-YAML 3.0.x\nvar safeLoad            = renamed('safeLoad', 'load');\nvar safeLoadAll         = renamed('safeLoadAll', 'loadAll');\nvar safeDump            = renamed('safeDump', 'dump');\n\nvar jsYaml = {\n\tType: Type,\n\tSchema: Schema,\n\tFAILSAFE_SCHEMA: FAILSAFE_SCHEMA,\n\tJSON_SCHEMA: JSON_SCHEMA,\n\tCORE_SCHEMA: CORE_SCHEMA,\n\tDEFAULT_SCHEMA: DEFAULT_SCHEMA,\n\tload: load,\n\tloadAll: loadAll,\n\tdump: dump,\n\tYAMLException: YAMLException,\n\ttypes: types,\n\tsafeLoad: safeLoad,\n\tsafeLoadAll: safeLoadAll,\n\tsafeDump: safeDump\n};\n\nexport { CORE_SCHEMA, DEFAULT_SCHEMA, FAILSAFE_SCHEMA, JSON_SCHEMA, Schema, Type, YAMLException, jsYaml as default, dump, load, loadAll, safeDump, safeLoad, safeLoadAll, types };\n", "export const balanced = (\n  a: string | RegExp,\n  b: string | RegExp,\n  str: string,\n) => {\n  const ma = a instanceof RegExp ? maybeMatch(a, str) : a\n  const mb = b instanceof RegExp ? maybeMatch(b, str) : b\n\n  const r = ma !== null && mb != null && range(ma, mb, str)\n\n  return (\n    r && {\n      start: r[0],\n      end: r[1],\n      pre: str.slice(0, r[0]),\n      body: str.slice(r[0] + ma.length, r[1]),\n      post: str.slice(r[1] + mb.length),\n    }\n  )\n}\n\nconst maybeMatch = (reg: RegExp, str: string) => {\n  const m = str.match(reg)\n  return m ? m[0] : null\n}\n\nexport const range = (\n  a: string,\n  b: string,\n  str: string,\n): undefined | [number, number] => {\n  let begs: number[],\n    beg: number | undefined,\n    left: number,\n    right: number | undefined = undefined,\n    result: undefined | [number, number]\n  let ai = str.indexOf(a)\n  let bi = str.indexOf(b, ai + 1)\n  let i = ai\n\n  if (ai >= 0 && bi > 0) {\n    if (a === b) {\n      return [ai, bi]\n    }\n    begs = []\n    left = str.length\n\n    while (i >= 0 && !result) {\n      if (i === ai) {\n        begs.push(i)\n        ai = str.indexOf(a, i + 1)\n      } else if (begs.length === 1) {\n        const r = begs.pop()\n        if (r !== undefined) result = [r, bi]\n      } else {\n        beg = begs.pop()\n        if (beg !== undefined && beg < left) {\n          left = beg\n          right = bi\n        }\n\n        bi = str.indexOf(b, i + 1)\n      }\n\n      i = ai < bi && ai >= 0 ? ai : bi\n    }\n\n    if (begs.length && right !== undefined) {\n      result = [left, right]\n    }\n  }\n\n  return result\n}\n", "import { balanced } from 'balanced-match'\n\nconst escSlash = '\\0SLASH' + Math.random() + '\\0'\nconst escOpen = '\\0OPEN' + Math.random() + '\\0'\nconst escClose = '\\0CLOSE' + Math.random() + '\\0'\nconst escComma = '\\0COMMA' + Math.random() + '\\0'\nconst escPeriod = '\\0PERIOD' + Math.random() + '\\0'\nconst escSlashPattern = new RegExp(escSlash, 'g')\nconst escOpenPattern = new RegExp(escOpen, 'g')\nconst escClosePattern = new RegExp(escClose, 'g')\nconst escCommaPattern = new RegExp(escComma, 'g')\nconst escPeriodPattern = new RegExp(escPeriod, 'g')\nconst slashPattern = /\\\\\\\\/g\nconst openPattern = /\\\\{/g\nconst closePattern = /\\\\}/g\nconst commaPattern = /\\\\,/g\nconst periodPattern = /\\\\\\./g\n\nexport const EXPANSION_MAX = 100_000\n\nfunction numeric(str: string) {\n  return !isNaN(str as any) ? parseInt(str, 10) : str.charCodeAt(0)\n}\n\nfunction escapeBraces(str: string) {\n  return str\n    .replace(slashPattern, escSlash)\n    .replace(openPattern, escOpen)\n    .replace(closePattern, escClose)\n    .replace(commaPattern, escComma)\n    .replace(periodPattern, escPeriod)\n}\n\nfunction unescapeBraces(str: string) {\n  return str\n    .replace(escSlashPattern, '\\\\')\n    .replace(escOpenPattern, '{')\n    .replace(escClosePattern, '}')\n    .replace(escCommaPattern, ',')\n    .replace(escPeriodPattern, '.')\n}\n\n/**\n * Basically just str.split(\",\"), but handling cases\n * where we have nested braced sections, which should be\n * treated as individual members, like {a,{b,c},d}\n */\nfunction parseCommaParts(str: string) {\n  if (!str) {\n    return ['']\n  }\n\n  const parts: string[] = []\n  const m = balanced('{', '}', str)\n\n  if (!m) {\n    return str.split(',')\n  }\n\n  const { pre, body, post } = m\n  const p = pre.split(',')\n\n  p[p.length - 1] += '{' + body + '}'\n  const postParts = parseCommaParts(post)\n  if (post.length) {\n    ;(p[p.length - 1] as string) += postParts.shift()\n    p.push.apply(p, postParts)\n  }\n\n  parts.push.apply(parts, p)\n\n  return parts\n}\n\nexport type BraceExpansionOptions = {\n  max?: number\n}\n\nexport function expand(str: string, options: BraceExpansionOptions = {}) {\n  if (!str) {\n    return []\n  }\n\n  const { max = EXPANSION_MAX } = options\n\n  // I don't know why Bash 4.3 does this, but it does.\n  // Anything starting with {} will have the first two bytes preserved\n  // but *only* at the top level, so {},a}b will not expand to anything,\n  // but a{},b}c will be expanded to [a}c,abc].\n  // One could argue that this is a bug in Bash, but since the goal of\n  // this module is to match Bash's rules, we escape a leading {}\n  if (str.slice(0, 2) === '{}') {\n    str = '\\\\{\\\\}' + str.slice(2)\n  }\n\n  return expand_(escapeBraces(str), max, true).map(unescapeBraces)\n}\n\nfunction embrace(str: string) {\n  return '{' + str + '}'\n}\n\nfunction isPadded(el: string) {\n  return /^-?0\\d/.test(el)\n}\n\nfunction lte(i: number, y: number) {\n  return i <= y\n}\n\nfunction gte(i: number, y: number) {\n  return i >= y\n}\n\nfunction expand_(str: string, max: number, isTop: boolean): string[] {\n  /** @type {string[]} */\n  const expansions: string[] = []\n\n  const m = balanced('{', '}', str)\n  if (!m) return [str]\n\n  // no need to expand pre, since it is guaranteed to be free of brace-sets\n  const pre = m.pre\n  const post: string[] = m.post.length ? expand_(m.post, max, false) : ['']\n\n  if (/\\$$/.test(m.pre)) {\n    for (let k = 0; k < post.length && k < max; k++) {\n      const expansion = pre + '{' + m.body + '}' + post[k]\n      expansions.push(expansion)\n    }\n  } else {\n    const isNumericSequence = /^-?\\d+\\.\\.-?\\d+(?:\\.\\.-?\\d+)?$/.test(m.body)\n    const isAlphaSequence = /^[a-zA-Z]\\.\\.[a-zA-Z](?:\\.\\.-?\\d+)?$/.test(\n      m.body,\n    )\n    const isSequence = isNumericSequence || isAlphaSequence\n    const isOptions = m.body.indexOf(',') >= 0\n    if (!isSequence && !isOptions) {\n      // {a},b}\n      if (m.post.match(/,(?!,).*\\}/)) {\n        str = m.pre + '{' + m.body + escClose + m.post\n        return expand_(str, max, true)\n      }\n      return [str]\n    }\n\n    let n: string[]\n    if (isSequence) {\n      n = m.body.split(/\\.\\./)\n    } else {\n      n = parseCommaParts(m.body)\n      if (n.length === 1 && n[0] !== undefined) {\n        // x{{a,b}}y ==> x{a}y x{b}y\n        n = expand_(n[0], max, false).map(embrace)\n        //XXX is this necessary? Can't seem to hit it in tests.\n        /* c8 ignore start */\n        if (n.length === 1) {\n          return post.map(p => m.pre + n[0] + p)\n        }\n        /* c8 ignore stop */\n      }\n    }\n\n    // at this point, n is the parts, and we know it's not a comma set\n    // with a single entry.\n    let N: string[]\n\n    if (isSequence && n[0] !== undefined && n[1] !== undefined) {\n      const x = numeric(n[0])\n      const y = numeric(n[1])\n      const width = Math.max(n[0].length, n[1].length)\n      let incr =\n        n.length === 3 && n[2] !== undefined ?\n          Math.max(Math.abs(numeric(n[2])), 1)\n        : 1\n      let test = lte\n      const reverse = y < x\n      if (reverse) {\n        incr *= -1\n        test = gte\n      }\n      const pad = n.some(isPadded)\n\n      N = []\n\n      for (let i = x; test(i, y); i += incr) {\n        let c\n        if (isAlphaSequence) {\n          c = String.fromCharCode(i)\n          if (c === '\\\\') {\n            c = ''\n          }\n        } else {\n          c = String(i)\n          if (pad) {\n            const need = width - c.length\n            if (need > 0) {\n              const z = new Array(need + 1).join('0')\n              if (i < 0) {\n                c = '-' + z + c.slice(1)\n              } else {\n                c = z + c\n              }\n            }\n          }\n        }\n        N.push(c)\n      }\n    } else {\n      N = []\n\n      for (let j = 0; j < n.length; j++) {\n        N.push.apply(N, expand_(n[j] as string, max, false))\n      }\n    }\n\n    for (let j = 0; j < N.length; j++) {\n      for (let k = 0; k < post.length && expansions.length < max; k++) {\n        const expansion = pre + N[j] + post[k]\n        if (!isTop || isSequence || expansion) {\n          expansions.push(expansion)\n        }\n      }\n    }\n  }\n\n  return expansions\n}\n", "const MAX_PATTERN_LENGTH = 1024 * 64\nexport const assertValidPattern: (pattern: unknown) => void = (\n  pattern: unknown,\n): asserts pattern is string => {\n  if (typeof pattern !== 'string') {\n    throw new TypeError('invalid pattern')\n  }\n\n  if (pattern.length > MAX_PATTERN_LENGTH) {\n    throw new TypeError('pattern is too long')\n  }\n}\n", "// translate the various posix character classes into unicode properties\n// this works across all unicode locales\n\n// { : [, /u flag required, negated]\nconst posixClasses: { [k: string]: [e: string, u: boolean, n?: boolean] } =\n  {\n    '[:alnum:]': ['\\\\p{L}\\\\p{Nl}\\\\p{Nd}', true],\n    '[:alpha:]': ['\\\\p{L}\\\\p{Nl}', true],\n    '[:ascii:]': ['\\\\x' + '00-\\\\x' + '7f', false],\n    '[:blank:]': ['\\\\p{Zs}\\\\t', true],\n    '[:cntrl:]': ['\\\\p{Cc}', true],\n    '[:digit:]': ['\\\\p{Nd}', true],\n    '[:graph:]': ['\\\\p{Z}\\\\p{C}', true, true],\n    '[:lower:]': ['\\\\p{Ll}', true],\n    '[:print:]': ['\\\\p{C}', true],\n    '[:punct:]': ['\\\\p{P}', true],\n    '[:space:]': ['\\\\p{Z}\\\\t\\\\r\\\\n\\\\v\\\\f', true],\n    '[:upper:]': ['\\\\p{Lu}', true],\n    '[:word:]': ['\\\\p{L}\\\\p{Nl}\\\\p{Nd}\\\\p{Pc}', true],\n    '[:xdigit:]': ['A-Fa-f0-9', false],\n  }\n\n// only need to escape a few things inside of brace expressions\n// escapes: [ \\ ] -\nconst braceEscape = (s: string) => s.replace(/[[\\]\\\\-]/g, '\\\\$&')\n// escape all regexp magic characters\nconst regexpEscape = (s: string) =>\n  s.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&')\n\n// everything has already been escaped, we just have to join\nconst rangesToString = (ranges: string[]): string => ranges.join('')\n\nexport type ParseClassResult = [\n  src: string,\n  uFlag: boolean,\n  consumed: number,\n  hasMagic: boolean,\n]\n\n// takes a glob string at a posix brace expression, and returns\n// an equivalent regular expression source, and boolean indicating\n// whether the /u flag needs to be applied, and the number of chars\n// consumed to parse the character class.\n// This also removes out of order ranges, and returns ($.) if the\n// entire class just no good.\nexport const parseClass = (\n  glob: string,\n  position: number,\n): ParseClassResult => {\n  const pos = position\n  /* c8 ignore start */\n  if (glob.charAt(pos) !== '[') {\n    throw new Error('not in a brace expression')\n  }\n  /* c8 ignore stop */\n  const ranges: string[] = []\n  const negs: string[] = []\n\n  let i = pos + 1\n  let sawStart = false\n  let uflag = false\n  let escaping = false\n  let negate = false\n  let endPos = pos\n  let rangeStart = ''\n  WHILE: while (i < glob.length) {\n    const c = glob.charAt(i)\n    if ((c === '!' || c === '^') && i === pos + 1) {\n      negate = true\n      i++\n      continue\n    }\n\n    if (c === ']' && sawStart && !escaping) {\n      endPos = i + 1\n      break\n    }\n\n    sawStart = true\n    if (c === '\\\\') {\n      if (!escaping) {\n        escaping = true\n        i++\n        continue\n      }\n      // escaped \\ char, fall through and treat like normal char\n    }\n    if (c === '[' && !escaping) {\n      // either a posix class, a collation equivalent, or just a [\n      for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {\n        if (glob.startsWith(cls, i)) {\n          // invalid, [a-[] is fine, but not [a-[:alpha]]\n          if (rangeStart) {\n            return ['$.', false, glob.length - pos, true]\n          }\n          i += cls.length\n          if (neg) negs.push(unip)\n          else ranges.push(unip)\n          uflag = uflag || u\n          continue WHILE\n        }\n      }\n    }\n\n    // now it's just a normal character, effectively\n    escaping = false\n    if (rangeStart) {\n      // throw this range away if it's not valid, but others\n      // can still match.\n      if (c > rangeStart) {\n        ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c))\n      } else if (c === rangeStart) {\n        ranges.push(braceEscape(c))\n      }\n      rangeStart = ''\n      i++\n      continue\n    }\n\n    // now might be the start of a range.\n    // can be either c-d or c-] or c] or c] at this point\n    if (glob.startsWith('-]', i + 1)) {\n      ranges.push(braceEscape(c + '-'))\n      i += 2\n      continue\n    }\n    if (glob.startsWith('-', i + 1)) {\n      rangeStart = c\n      i += 2\n      continue\n    }\n\n    // not the start of a range, just a single character\n    ranges.push(braceEscape(c))\n    i++\n  }\n\n  if (endPos < i) {\n    // didn't see the end of the class, not a valid class,\n    // but might still be valid as a literal match.\n    return ['', false, 0, false]\n  }\n\n  // if we got no ranges and no negates, then we have a range that\n  // cannot possibly match anything, and that poisons the whole glob\n  if (!ranges.length && !negs.length) {\n    return ['$.', false, glob.length - pos, true]\n  }\n\n  // if we got one positive range, and it's a single character, then that's\n  // not actually a magic pattern, it's just that one literal character.\n  // we should not treat that as \"magic\", we should just return the literal\n  // character. [_] is a perfectly valid way to escape glob magic chars.\n  if (\n    negs.length === 0 &&\n    ranges.length === 1 &&\n    /^\\\\?.$/.test(ranges[0]) &&\n    !negate\n  ) {\n    const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0]\n    return [regexpEscape(r), false, endPos - pos, false]\n  }\n\n  const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']'\n  const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']'\n  const comb =\n    ranges.length && negs.length ? '(' + sranges + '|' + snegs + ')'\n    : ranges.length ? sranges\n    : snegs\n\n  return [comb, uflag, endPos - pos, true]\n}\n", "import { MinimatchOptions } from './index.js'\n\n/**\n * Un-escape a string that has been escaped with {@link escape}.\n *\n * If the {@link MinimatchOptions.windowsPathsNoEscape} option is used, then\n * square-bracket escapes are removed, but not backslash escapes.\n *\n * For example, it will turn the string `'[*]'` into `*`, but it will not\n * turn `'\\\\*'` into `'*'`, because `\\` is a path separator in\n * `windowsPathsNoEscape` mode.\n *\n * When `windowsPathsNoEscape` is not set, then both square-bracket escapes and\n * backslash escapes are removed.\n *\n * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped\n * or unescaped.\n *\n * When `magicalBraces` is not set, escapes of braces (`{` and `}`) will not be\n * unescaped.\n */\n\nexport const unescape = (\n  s: string,\n  {\n    windowsPathsNoEscape = false,\n    magicalBraces = true,\n  }: Pick = {},\n) => {\n  if (magicalBraces) {\n    return windowsPathsNoEscape ?\n        s.replace(/\\[([^\\/\\\\])\\]/g, '$1')\n      : s\n          .replace(/((?!\\\\).|^)\\[([^\\/\\\\])\\]/g, '$1$2')\n          .replace(/\\\\([^\\/])/g, '$1')\n  }\n  return windowsPathsNoEscape ?\n      s.replace(/\\[([^\\/\\\\{}])\\]/g, '$1')\n    : s\n        .replace(/((?!\\\\).|^)\\[([^\\/\\\\{}])\\]/g, '$1$2')\n        .replace(/\\\\([^\\/{}])/g, '$1')\n}\n", "// parse a single path portion\n\nimport { parseClass } from './brace-expressions.js'\nimport { MinimatchOptions, MMRegExp } from './index.js'\nimport { unescape } from './unescape.js'\n\n// classes [] are handled by the parseClass method\n// for positive extglobs, we sub-parse the contents, and combine,\n// with the appropriate regexp close.\n// for negative extglobs, we sub-parse the contents, but then\n// have to include the rest of the pattern, then the parent, etc.,\n// as the thing that cannot be because RegExp negative lookaheads\n// are different from globs.\n//\n// So for example:\n// a@(i|w!(x|y)z|j)b => ^a(i|w((!?(x|y)zb).*)z|j)b$\n//   1   2 3   4 5 6      1   2    3   46      5 6\n//\n// Assembling the extglob requires not just the negated patterns themselves,\n// but also anything following the negative patterns up to the boundary\n// of the current pattern, plus anything following in the parent pattern.\n//\n//\n// So, first, we parse the string into an AST of extglobs, without turning\n// anything into regexps yet.\n//\n// ['a', {@ [['i'], ['w', {!['x', 'y']}, 'z'], ['j']]}, 'b']\n//\n// Then, for all the negative extglobs, we append whatever comes after in\n// each parent as their tail\n//\n// ['a', {@ [['i'], ['w', {!['x', 'y'], 'z', 'b'}, 'z'], ['j']]}, 'b']\n//\n// Lastly, we turn each of these pieces into a regexp, and join\n//\n//                                 v----- .* because there's more following,\n//                                 v    v  otherwise, .+ because it must be\n//                                 v    v  *something* there.\n// ['^a', {@ ['i', 'w(?:(!?(?:x|y).*zb$).*)z', 'j' ]}, 'b$']\n//   copy what follows into here--^^^^^\n// ['^a', '(?:i|w(?:(?!(?:x|y).*zb$).*)z|j)', 'b$']\n// ['^a(?:i|w(?:(?!(?:x|y).*zb$).*)z|j)b$']\n\nexport type ExtglobType = '!' | '?' | '+' | '*' | '@'\nconst types = new Set(['!', '?', '+', '*', '@'])\nconst isExtglobType = (c: string | null): c is ExtglobType =>\n  types.has(c as ExtglobType)\nconst isExtglobAST = (c: AST): c is AST & { type: ExtglobType } =>\n  isExtglobType(c.type)\n\n// Map of which extglob types can adopt the children of a nested extglob\n//\n// anything but ! can adopt a matching type:\n// +(a|+(b|c)|d) => +(a|b|c|d)\n// *(a|*(b|c)|d) => *(a|b|c|d)\n// @(a|@(b|c)|d) => @(a|b|c|d)\n// ?(a|?(b|c)|d) => ?(a|b|c|d)\n//\n// * can adopt anything, because 0 or repetition is allowed\n// *(a|?(b|c)|d) => *(a|b|c|d)\n// *(a|+(b|c)|d) => *(a|b|c|d)\n// *(a|@(b|c)|d) => *(a|b|c|d)\n//\n// + can adopt @, because 1 or repetition is allowed\n// +(a|@(b|c)|d) => +(a|b|c|d)\n//\n// + and @ CANNOT adopt *, because 0 would be allowed\n// +(a|*(b|c)|d) => would match \"\", on *(b|c)\n// @(a|*(b|c)|d) => would match \"\", on *(b|c)\n//\n// + and @ CANNOT adopt ?, because 0 would be allowed\n// +(a|?(b|c)|d) => would match \"\", on ?(b|c)\n// @(a|?(b|c)|d) => would match \"\", on ?(b|c)\n//\n// ? can adopt @, because 0 or 1 is allowed\n// ?(a|@(b|c)|d) => ?(a|b|c|d)\n//\n// ? and @ CANNOT adopt * or +, because >1 would be allowed\n// ?(a|*(b|c)|d) => would match bbb on *(b|c)\n// @(a|*(b|c)|d) => would match bbb on *(b|c)\n// ?(a|+(b|c)|d) => would match bbb on +(b|c)\n// @(a|+(b|c)|d) => would match bbb on +(b|c)\n//\n// ! CANNOT adopt ! (nothing else can either)\n// !(a|!(b|c)|d) => !(a|b|c|d) would fail to match on b (not not b|c)\n//\n// ! can adopt @\n// !(a|@(b|c)|d) => !(a|b|c|d)\n//\n// ! CANNOT adopt *\n// !(a|*(b|c)|d) => !(a|b|c|d) would match on bbb, not allowed\n//\n// ! CANNOT adopt +\n// !(a|+(b|c)|d) => !(a|b|c|d) would match on bbb, not allowed\n//\n// ! CANNOT adopt ?\n// x!(a|?(b|c)|d) => x!(a|b|c|d) would fail to match \"x\"\nconst adoptionMap = new Map([\n  ['!', ['@']],\n  ['?', ['?', '@']],\n  ['@', ['@']],\n  ['*', ['*', '+', '?', '@']],\n  ['+', ['+', '@']],\n])\n\n// nested extglobs that can be adopted in, but with the addition of\n// a blank '' element.\nconst adoptionWithSpaceMap = new Map([\n  ['!', ['?']],\n  ['@', ['?']],\n  ['+', ['?', '*']],\n])\n\n// union of the previous two maps\nconst adoptionAnyMap = new Map([\n  ['!', ['?', '@']],\n  ['?', ['?', '@']],\n  ['@', ['?', '@']],\n  ['*', ['*', '+', '?', '@']],\n  ['+', ['+', '@', '?', '*']],\n])\n\n// Extglobs that can take over their parent if they are the only child\n// the key is parent, value maps child to resulting extglob parent type\n// '@' is omitted because it's a special case. An `@` extglob with a single\n// member can always be usurped by that subpattern.\nconst usurpMap = new Map>([\n  ['!', new Map([['!', '@']])],\n  [\n    '?',\n    new Map([\n      ['*', '*'],\n      ['+', '*'],\n    ]),\n  ],\n  [\n    '@',\n    new Map([\n      ['!', '!'],\n      ['?', '?'],\n      ['@', '@'],\n      ['*', '*'],\n      ['+', '+'],\n    ]),\n  ],\n  [\n    '+',\n    new Map([\n      ['?', '*'],\n      ['*', '*'],\n    ]),\n  ],\n])\n\n// Patterns that get prepended to bind to the start of either the\n// entire string, or just a single path portion, to prevent dots\n// and/or traversal patterns, when needed.\n// Exts don't need the ^ or / bit, because the root binds that already.\nconst startNoTraversal = '(?!(?:^|/)\\\\.\\\\.?(?:$|/))'\nconst startNoDot = '(?!\\\\.)'\n\n// characters that indicate a start of pattern needs the \"no dots\" bit,\n// because a dot *might* be matched. ( is not in the list, because in\n// the case of a child extglob, it will handle the prevention itself.\nconst addPatternStart = new Set(['[', '.'])\n// cases where traversal is A-OK, no dot prevention needed\nconst justDots = new Set(['..', '.'])\nconst reSpecials = new Set('().*{}+?[]^$\\\\!')\nconst regExpEscape = (s: string) =>\n  s.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&')\n\n// any single thing other than /\nconst qmark = '[^/]'\n\n// * => any number of characters\nconst star = qmark + '*?'\n// use + when we need to ensure that *something* matches, because the * is\n// the only thing in the path portion.\nconst starNoEmpty = qmark + '+?'\n\n// remove the \\ chars that we added if we end up doing a nonmagic compare\n// const deslash = (s: string) => s.replace(/\\\\(.)/g, '$1')\n\nlet ID = 0\nexport class AST {\n  type: ExtglobType | null\n  readonly #root: AST\n\n  #hasMagic?: boolean\n  #uflag: boolean = false\n  #parts: (string | AST)[] = []\n  #parent?: AST\n  #parentIndex: number\n  #negs: AST[]\n  #filledNegs: boolean = false\n  #options: MinimatchOptions\n  #toString?: string\n  // set to true if it's an extglob with no children\n  // (which really means one child of '')\n  #emptyExt: boolean = false\n  id = ++ID\n\n  get depth(): number {\n    return (this.#parent?.depth ?? -1) + 1\n  }\n\n  [Symbol.for('nodejs.util.inspect.custom')]() {\n    return {\n      '@@type': 'AST',\n      id: this.id,\n      type: this.type,\n      root: this.#root.id,\n      parent: this.#parent?.id,\n      depth: this.depth,\n      partsLength: this.#parts.length,\n      parts: this.#parts,\n    }\n  }\n\n  constructor(\n    type: ExtglobType | null,\n    parent?: AST,\n    options: MinimatchOptions = {},\n  ) {\n    this.type = type\n    // extglobs are inherently magical\n    if (type) this.#hasMagic = true\n    this.#parent = parent\n    this.#root = this.#parent ? this.#parent.#root : this\n    this.#options = this.#root === this ? options : this.#root.#options\n    this.#negs = this.#root === this ? [] : this.#root.#negs\n    if (type === '!' && !this.#root.#filledNegs) this.#negs.push(this)\n    this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0\n  }\n\n  get hasMagic(): boolean | undefined {\n    /* c8 ignore start */\n    if (this.#hasMagic !== undefined) return this.#hasMagic\n    /* c8 ignore stop */\n    for (const p of this.#parts) {\n      if (typeof p === 'string') continue\n      if (p.type || p.hasMagic) return (this.#hasMagic = true)\n    }\n    // note: will be undefined until we generate the regexp src and find out\n    return this.#hasMagic\n  }\n\n  // reconstructs the pattern\n  toString(): string {\n    if (this.#toString !== undefined) return this.#toString\n    if (!this.type) {\n      return (this.#toString = this.#parts.map(p => String(p)).join(''))\n    } else {\n      return (this.#toString =\n        this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')')\n    }\n  }\n\n  #fillNegs() {\n    /* c8 ignore start */\n    if (this !== this.#root) throw new Error('should only call on root')\n    if (this.#filledNegs) return this\n    /* c8 ignore stop */\n\n    // call toString() once to fill this out\n    this.toString()\n    this.#filledNegs = true\n    let n: AST | undefined\n    while ((n = this.#negs.pop())) {\n      if (n.type !== '!') continue\n      // walk up the tree, appending everthing that comes AFTER parentIndex\n      let p: AST | undefined = n\n      let pp = p.#parent\n      while (pp) {\n        for (\n          let i = p.#parentIndex + 1;\n          !pp.type && i < pp.#parts.length;\n          i++\n        ) {\n          for (const part of n.#parts) {\n            /* c8 ignore start */\n            if (typeof part === 'string') {\n              throw new Error('string part in extglob AST??')\n            }\n            /* c8 ignore stop */\n            part.copyIn(pp.#parts[i])\n          }\n        }\n        p = pp\n        pp = p.#parent\n      }\n    }\n    return this\n  }\n\n  push(...parts: (string | AST)[]) {\n    for (const p of parts) {\n      if (p === '') continue\n      /* c8 ignore start */\n      if (\n        typeof p !== 'string' &&\n        !(p instanceof AST && p.#parent === this)\n      ) {\n        throw new Error('invalid part: ' + p)\n      }\n      /* c8 ignore stop */\n      this.#parts.push(p)\n    }\n  }\n\n  toJSON() {\n    const ret: any[] =\n      this.type === null ?\n        this.#parts\n          .slice()\n          .map(p => (typeof p === 'string' ? p : p.toJSON()))\n      : [this.type, ...this.#parts.map(p => (p as AST).toJSON())]\n    if (this.isStart() && !this.type) ret.unshift([])\n    if (\n      this.isEnd() &&\n      (this === this.#root ||\n        (this.#root.#filledNegs && this.#parent?.type === '!'))\n    ) {\n      ret.push({})\n    }\n    return ret\n  }\n\n  isStart(): boolean {\n    if (this.#root === this) return true\n    // if (this.type) return !!this.#parent?.isStart()\n    if (!this.#parent?.isStart()) return false\n    if (this.#parentIndex === 0) return true\n    // if everything AHEAD of this is a negation, then it's still the \"start\"\n    const p = this.#parent\n    for (let i = 0; i < this.#parentIndex; i++) {\n      const pp = p.#parts[i]\n      if (!(pp instanceof AST && pp.type === '!')) {\n        return false\n      }\n    }\n    return true\n  }\n\n  isEnd(): boolean {\n    if (this.#root === this) return true\n    if (this.#parent?.type === '!') return true\n    if (!this.#parent?.isEnd()) return false\n    if (!this.type) return this.#parent?.isEnd()\n    // if not root, it'll always have a parent\n    /* c8 ignore start */\n    const pl = this.#parent ? this.#parent.#parts.length : 0\n    /* c8 ignore stop */\n    return this.#parentIndex === pl - 1\n  }\n\n  copyIn(part: AST | string) {\n    if (typeof part === 'string') this.push(part)\n    else this.push(part.clone(this))\n  }\n\n  clone(parent: AST) {\n    const c = new AST(this.type, parent)\n    for (const p of this.#parts) {\n      c.copyIn(p)\n    }\n    return c\n  }\n\n  static #parseAST(\n    str: string,\n    ast: AST,\n    pos: number,\n    opt: MinimatchOptions,\n    extDepth: number,\n  ): number {\n    const maxDepth = opt.maxExtglobRecursion ?? 2\n    let escaping = false\n    let inBrace = false\n    let braceStart = -1\n    let braceNeg = false\n    if (ast.type === null) {\n      // outside of a extglob, append until we find a start\n      let i = pos\n      let acc = ''\n      while (i < str.length) {\n        const c = str.charAt(i++)\n        // still accumulate escapes at this point, but we do ignore\n        // starts that are escaped\n        if (escaping || c === '\\\\') {\n          escaping = !escaping\n          acc += c\n          continue\n        }\n\n        if (inBrace) {\n          if (i === braceStart + 1) {\n            if (c === '^' || c === '!') {\n              braceNeg = true\n            }\n          } else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {\n            inBrace = false\n          }\n          acc += c\n          continue\n        } else if (c === '[') {\n          inBrace = true\n          braceStart = i\n          braceNeg = false\n          acc += c\n          continue\n        }\n\n        // we don't have to check for adoption here, because that's\n        // done at the other recursion point.\n        const doRecurse =\n          !opt.noext &&\n          isExtglobType(c) &&\n          str.charAt(i) === '(' &&\n          extDepth <= maxDepth\n        if (doRecurse) {\n          ast.push(acc)\n          acc = ''\n          const ext = new AST(c, ast)\n          i = AST.#parseAST(str, ext, i, opt, extDepth + 1)\n          ast.push(ext)\n          continue\n        }\n        acc += c\n      }\n      ast.push(acc)\n      return i\n    }\n\n    // some kind of extglob, pos is at the (\n    // find the next | or )\n    let i = pos + 1\n    let part = new AST(null, ast)\n    const parts: AST[] = []\n    let acc = ''\n    while (i < str.length) {\n      const c = str.charAt(i++)\n      // still accumulate escapes at this point, but we do ignore\n      // starts that are escaped\n      if (escaping || c === '\\\\') {\n        escaping = !escaping\n        acc += c\n        continue\n      }\n\n      if (inBrace) {\n        if (i === braceStart + 1) {\n          if (c === '^' || c === '!') {\n            braceNeg = true\n          }\n        } else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {\n          inBrace = false\n        }\n        acc += c\n        continue\n      } else if (c === '[') {\n        inBrace = true\n        braceStart = i\n        braceNeg = false\n        acc += c\n        continue\n      }\n\n      const doRecurse =\n        !opt.noext &&\n        isExtglobType(c) &&\n        str.charAt(i) === '(' &&\n        /* c8 ignore start - the maxDepth is sufficient here */\n        (extDepth <= maxDepth || (ast && ast.#canAdoptType(c)))\n      /* c8 ignore stop */\n      if (doRecurse) {\n        const depthAdd = ast && ast.#canAdoptType(c) ? 0 : 1\n        part.push(acc)\n        acc = ''\n        const ext = new AST(c, part)\n        part.push(ext)\n        i = AST.#parseAST(str, ext, i, opt, extDepth + depthAdd)\n        continue\n      }\n      if (c === '|') {\n        part.push(acc)\n        acc = ''\n        parts.push(part)\n        part = new AST(null, ast)\n        continue\n      }\n      if (c === ')') {\n        if (acc === '' && ast.#parts.length === 0) {\n          ast.#emptyExt = true\n        }\n        part.push(acc)\n        acc = ''\n        ast.push(...parts, part)\n        return i\n      }\n      acc += c\n    }\n\n    // unfinished extglob\n    // if we got here, it was a malformed extglob! not an extglob, but\n    // maybe something else in there.\n    ast.type = null\n    ast.#hasMagic = undefined\n    ast.#parts = [str.substring(pos - 1)]\n    return i\n  }\n\n  #canAdoptWithSpace(child?: AST | string): child is AST & {\n    type: null\n    parts: [AST & { type: ExtglobType }]\n  } {\n    return this.#canAdopt(child, adoptionWithSpaceMap)\n  }\n\n  #canAdopt(\n    child?: AST | string,\n    map: Map = adoptionMap,\n  ): child is AST & {\n    type: null\n    parts: [AST & { type: ExtglobType }]\n  } {\n    if (\n      !child ||\n      typeof child !== 'object' ||\n      child.type !== null ||\n      child.#parts.length !== 1 ||\n      this.type === null\n    ) {\n      return false\n    }\n    const gc = child.#parts[0]\n    if (!gc || typeof gc !== 'object' || gc.type === null) {\n      return false\n    }\n    return (this as AST & { type: ExtglobType }).#canAdoptType(\n      gc.type,\n      map,\n    )\n  }\n  #canAdoptType(\n    c: string,\n    map: Map = adoptionAnyMap,\n  ): c is ExtglobType {\n    return !!map.get(this.type as ExtglobType)?.includes(c as ExtglobType)\n  }\n\n  #adoptWithSpace(\n    this: AST & { type: ExtglobType },\n    child: AST & {\n      type: null\n    },\n    index: number,\n  ) {\n    const gc = child.#parts[0] as AST & { type: ExtglobType }\n    const blank = new AST(null, gc, this.options)\n    blank.#parts.push('')\n    gc.push(blank)\n    this.#adopt(child, index)\n  }\n\n  #adopt(\n    child: AST & {\n      type: null\n    },\n    index: number,\n  ) {\n    const gc = child.#parts[0] as AST & { type: ExtglobType }\n    this.#parts.splice(index, 1, ...gc.#parts)\n    for (const p of gc.#parts) {\n      if (typeof p === 'object') p.#parent = this\n    }\n    this.#toString = undefined\n  }\n\n  #canUsurpType(c: string): boolean {\n    const m = usurpMap.get(this.type as ExtglobType)\n    return !!(m?.has(c as ExtglobType))\n  }\n\n  #canUsurp (\n    child?: AST | string,\n  ): child is AST & {\n    type: null\n    parts: [AST & { type: ExtglobType }]\n  } {\n    if (\n      !child ||\n      typeof child !== 'object' ||\n      child.type !== null ||\n      child.#parts.length !== 1 ||\n      this.type === null ||\n      this.#parts.length !== 1\n    ) {\n      return false\n    }\n    const gc = child.#parts[0]\n    if (!gc || typeof gc !== 'object' || gc.type === null) {\n      return false\n    }\n    return (this as AST & { type: ExtglobType }).#canUsurpType(gc.type)\n  }\n\n  #usurp(this: AST & { type: ExtglobType }, child: AST & { type: null }) {\n    const m = usurpMap.get(this.type as ExtglobType)\n    const gc = child.#parts[0] as AST & { type: ExtglobType }\n    const nt = m?.get(gc.type)\n    /* c8 ignore start - impossible */\n    if (!nt) return false\n    /* c8 ignore stop */\n    this.#parts = gc.#parts\n    for (const p of this.#parts) {\n      if (typeof p === 'object') {\n        p.#parent = this\n      }\n    }\n    this.type = nt\n    this.#toString = undefined\n    this.#emptyExt = false\n  }\n\n  static fromGlob(pattern: string, options: MinimatchOptions = {}) {\n    const ast = new AST(null, undefined, options)\n    AST.#parseAST(pattern, ast, 0, options, 0)\n    return ast\n  }\n\n  // returns the regular expression if there's magic, or the unescaped\n  // string if not.\n  toMMPattern(): MMRegExp | string {\n    // should only be called on root\n    /* c8 ignore start */\n    if (this !== this.#root) return this.#root.toMMPattern()\n    /* c8 ignore stop */\n    const glob = this.toString()\n    const [re, body, hasMagic, uflag] = this.toRegExpSource()\n    // if we're in nocase mode, and not nocaseMagicOnly, then we do\n    // still need a regular expression if we have to case-insensitively\n    // match capital/lowercase characters.\n    const anyMagic =\n      hasMagic ||\n      this.#hasMagic ||\n      (this.#options.nocase &&\n        !this.#options.nocaseMagicOnly &&\n        glob.toUpperCase() !== glob.toLowerCase())\n    if (!anyMagic) {\n      return body\n    }\n\n    const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '')\n    return Object.assign(new RegExp(`^${re}$`, flags), {\n      _src: re,\n      _glob: glob,\n    })\n  }\n\n  get options() {\n    return this.#options\n  }\n\n  // returns the string match, the regexp source, whether there's magic\n  // in the regexp (so a regular expression is required) and whether or\n  // not the uflag is needed for the regular expression (for posix classes)\n  // TODO: instead of injecting the start/end at this point, just return\n  // the BODY of the regexp, along with the start/end portions suitable\n  // for binding the start/end in either a joined full-path makeRe context\n  // (where we bind to (^|/), or a standalone matchPart context (where\n  // we bind to ^, and not /).  Otherwise slashes get duped!\n  //\n  // In part-matching mode, the start is:\n  // - if not isStart: nothing\n  // - if traversal possible, but not allowed: ^(?!\\.\\.?$)\n  // - if dots allowed or not possible: ^\n  // - if dots possible and not allowed: ^(?!\\.)\n  // end is:\n  // - if not isEnd(): nothing\n  // - else: $\n  //\n  // In full-path matching mode, we put the slash at the START of the\n  // pattern, so start is:\n  // - if first pattern: same as part-matching mode\n  // - if not isStart(): nothing\n  // - if traversal possible, but not allowed: /(?!\\.\\.?(?:$|/))\n  // - if dots allowed or not possible: /\n  // - if dots possible and not allowed: /(?!\\.)\n  // end is:\n  // - if last pattern, same as part-matching mode\n  // - else nothing\n  //\n  // Always put the (?:$|/) on negated tails, though, because that has to be\n  // there to bind the end of the negated pattern portion, and it's easier to\n  // just stick it in now rather than try to inject it later in the middle of\n  // the pattern.\n  //\n  // We can just always return the same end, and leave it up to the caller\n  // to know whether it's going to be used joined or in parts.\n  // And, if the start is adjusted slightly, can do the same there:\n  // - if not isStart: nothing\n  // - if traversal possible, but not allowed: (?:/|^)(?!\\.\\.?$)\n  // - if dots allowed or not possible: (?:/|^)\n  // - if dots possible and not allowed: (?:/|^)(?!\\.)\n  //\n  // But it's better to have a simpler binding without a conditional, for\n  // performance, so probably better to return both start options.\n  //\n  // Then the caller just ignores the end if it's not the first pattern,\n  // and the start always gets applied.\n  //\n  // But that's always going to be $ if it's the ending pattern, or nothing,\n  // so the caller can just attach $ at the end of the pattern when building.\n  //\n  // So the todo is:\n  // - better detect what kind of start is needed\n  // - return both flavors of starting pattern\n  // - attach $ at the end of the pattern when creating the actual RegExp\n  //\n  // Ah, but wait, no, that all only applies to the root when the first pattern\n  // is not an extglob. If the first pattern IS an extglob, then we need all\n  // that dot prevention biz to live in the extglob portions, because eg\n  // +(*|.x*) can match .xy but not .yx.\n  //\n  // So, return the two flavors if it's #root and the first child is not an\n  // AST, otherwise leave it to the child AST to handle it, and there,\n  // use the (?:^|/) style of start binding.\n  //\n  // Even simplified further:\n  // - Since the start for a join is eg /(?!\\.) and the start for a part\n  // is ^(?!\\.), we can just prepend (?!\\.) to the pattern (either root\n  // or start or whatever) and prepend ^ or / at the Regexp construction.\n  toRegExpSource(\n    allowDot?: boolean,\n  ): [re: string, body: string, hasMagic: boolean, uflag: boolean] {\n    const dot = allowDot ?? !!this.#options.dot\n    if (this.#root === this) {\n      this.#flatten()\n      this.#fillNegs()\n    }\n    if (!isExtglobAST(this)) {\n      const noEmpty =\n        this.isStart() &&\n        this.isEnd() &&\n        !this.#parts.some(s => typeof s !== 'string')\n      const src = this.#parts\n        .map(p => {\n          const [re, _, hasMagic, uflag] =\n            typeof p === 'string' ?\n              AST.#parseGlob(p, this.#hasMagic, noEmpty)\n            : p.toRegExpSource(allowDot)\n          this.#hasMagic = this.#hasMagic || hasMagic\n          this.#uflag = this.#uflag || uflag\n          return re\n        })\n        .join('')\n\n      let start = ''\n      if (this.isStart()) {\n        if (typeof this.#parts[0] === 'string') {\n          // this is the string that will match the start of the pattern,\n          // so we need to protect against dots and such.\n\n          // '.' and '..' cannot match unless the pattern is that exactly,\n          // even if it starts with . or dot:true is set.\n          const dotTravAllowed =\n            this.#parts.length === 1 && justDots.has(this.#parts[0])\n          if (!dotTravAllowed) {\n            const aps = addPatternStart\n            // check if we have a possibility of matching . or ..,\n            // and prevent that.\n            const needNoTrav =\n              // dots are allowed, and the pattern starts with [ or .\n              (dot && aps.has(src.charAt(0))) ||\n              // the pattern starts with \\., and then [ or .\n              (src.startsWith('\\\\.') && aps.has(src.charAt(2))) ||\n              // the pattern starts with \\.\\., and then [ or .\n              (src.startsWith('\\\\.\\\\.') && aps.has(src.charAt(4)))\n            // no need to prevent dots if it can't match a dot, or if a\n            // sub-pattern will be preventing it anyway.\n            const needNoDot = !dot && !allowDot && aps.has(src.charAt(0))\n\n            start =\n              needNoTrav ? startNoTraversal\n              : needNoDot ? startNoDot\n              : ''\n          }\n        }\n      }\n\n      // append the \"end of path portion\" pattern to negation tails\n      let end = ''\n      if (\n        this.isEnd() &&\n        this.#root.#filledNegs &&\n        this.#parent?.type === '!'\n      ) {\n        end = '(?:$|\\\\/)'\n      }\n      const final = start + src + end\n      return [\n        final,\n        unescape(src),\n        (this.#hasMagic = !!this.#hasMagic),\n        this.#uflag,\n      ]\n    }\n\n    // We need to calculate the body *twice* if it's a repeat pattern\n    // at the start, once in nodot mode, then again in dot mode, so a\n    // pattern like *(?) can match 'x.y'\n\n    const repeated = this.type === '*' || this.type === '+'\n    // some kind of extglob\n    const start = this.type === '!' ? '(?:(?!(?:' : '(?:'\n    let body = this.#partsToRegExp(dot)\n\n    if (this.isStart() && this.isEnd() && !body && this.type !== '!') {\n      // invalid extglob, has to at least be *something* present, if it's\n      // the entire path portion.\n      const s = this.toString()\n      const me = this as AST\n      me.#parts = [s]\n      me.type = null\n      me.#hasMagic = undefined\n      return [s, unescape(this.toString()), false, false]\n    }\n\n    let bodyDotAllowed =\n      !repeated || allowDot || dot || !startNoDot ?\n        ''\n      : this.#partsToRegExp(true)\n    if (bodyDotAllowed === body) {\n      bodyDotAllowed = ''\n    }\n    if (bodyDotAllowed) {\n      body = `(?:${body})(?:${bodyDotAllowed})*?`\n    }\n\n    // an empty !() is exactly equivalent to a starNoEmpty\n    let final = ''\n    if (this.type === '!' && this.#emptyExt) {\n      final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty\n    } else {\n      const close =\n        this.type === '!' ?\n          // !() must match something,but !(x) can match ''\n          '))' +\n          (this.isStart() && !dot && !allowDot ? startNoDot : '') +\n          star +\n          ')'\n        : this.type === '@' ? ')'\n        : this.type === '?' ? ')?'\n        : this.type === '+' && bodyDotAllowed ? ')'\n        : this.type === '*' && bodyDotAllowed ? `)?`\n        : `)${this.type}`\n      final = start + body + close\n    }\n    return [\n      final,\n      unescape(body),\n      (this.#hasMagic = !!this.#hasMagic),\n      this.#uflag,\n    ]\n  }\n\n  #flatten() {\n    if (!isExtglobAST(this)) {\n      for (const p of this.#parts) {\n        if (typeof p === 'object') {\n          p.#flatten()\n        }\n      }\n    } else {\n      // do up to 10 passes to flatten as much as possible\n      let iterations = 0\n      let done = false\n      do {\n        done = true\n        for (let i = 0; i < this.#parts.length; i++) {\n          const c = this.#parts[i]\n          if (typeof c === 'object') {\n            c.#flatten()\n            if (this.#canAdopt(c)) {\n              done = false\n              this.#adopt(c, i)\n            } else if (this.#canAdoptWithSpace(c)) {\n              done = false\n              this.#adoptWithSpace(c, i)\n            } else if (this.#canUsurp(c)) {\n              done = false\n              this.#usurp(c)\n            }\n          }\n        }\n      } while (!done && ++iterations < 10)\n    }\n    this.#toString = undefined\n  }\n\n  #partsToRegExp(this: AST & { type: ExtglobType }, dot: boolean) {\n    return this.#parts\n      .map(p => {\n        // extglob ASTs should only contain parent ASTs\n        /* c8 ignore start */\n        if (typeof p === 'string') {\n          throw new Error('string type in extglob ast??')\n        }\n        /* c8 ignore stop */\n        // can ignore hasMagic, because extglobs are already always magic\n        const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot)\n        this.#uflag = this.#uflag || uflag\n        return re\n      })\n      .filter(p => !(this.isStart() && this.isEnd()) || !!p)\n      .join('|')\n  }\n\n  static #parseGlob(\n    glob: string,\n    hasMagic: boolean | undefined,\n    noEmpty: boolean = false,\n  ): [re: string, body: string, hasMagic: boolean, uflag: boolean] {\n    let escaping = false\n    let re = ''\n    let uflag = false\n    // multiple stars that aren't globstars coalesce into one *\n    let inStar = false\n    for (let i = 0; i < glob.length; i++) {\n      const c = glob.charAt(i)\n      if (escaping) {\n        escaping = false\n        re += (reSpecials.has(c) ? '\\\\' : '') + c\n        continue\n      }\n      if (c === '*') {\n        if (inStar) continue\n        inStar = true\n        re += noEmpty && /^[*]+$/.test(glob) ? starNoEmpty : star\n        hasMagic = true\n        continue\n      } else {\n        inStar = false\n      }\n      if (c === '\\\\') {\n        if (i === glob.length - 1) {\n          re += '\\\\\\\\'\n        } else {\n          escaping = true\n        }\n        continue\n      }\n      if (c === '[') {\n        const [src, needUflag, consumed, magic] = parseClass(glob, i)\n        if (consumed) {\n          re += src\n          uflag = uflag || needUflag\n          i += consumed - 1\n          hasMagic = hasMagic || magic\n          continue\n        }\n      }\n      if (c === '?') {\n        re += qmark\n        hasMagic = true\n        continue\n      }\n      re += regExpEscape(c)\n    }\n    return [re, unescape(glob), !!hasMagic, uflag]\n  }\n}\n", "import { MinimatchOptions } from './index.js'\n\n/**\n * Escape all magic characters in a glob pattern.\n *\n * If the {@link MinimatchOptions.windowsPathsNoEscape}\n * option is used, then characters are escaped by wrapping in `[]`, because\n * a magic character wrapped in a character class can only be satisfied by\n * that exact character.  In this mode, `\\` is _not_ escaped, because it is\n * not interpreted as a magic character, but instead as a path separator.\n *\n * If the {@link MinimatchOptions.magicalBraces} option is used,\n * then braces (`{` and `}`) will be escaped.\n */\nexport const escape = (\n  s: string,\n  {\n    windowsPathsNoEscape = false,\n    magicalBraces = false,\n  }: Pick = {},\n) => {\n  // don't need to escape +@! because we escape the parens\n  // that make those magic, and escaping ! as [!] isn't valid,\n  // because [!]] is a valid glob class meaning not ']'.\n  if (magicalBraces) {\n    return windowsPathsNoEscape ?\n        s.replace(/[?*()[\\]{}]/g, '[$&]')\n      : s.replace(/[?*()[\\]\\\\{}]/g, '\\\\$&')\n  }\n  return windowsPathsNoEscape ?\n      s.replace(/[?*()[\\]]/g, '[$&]')\n    : s.replace(/[?*()[\\]\\\\]/g, '\\\\$&')\n}\n", "import { expand } from 'brace-expansion'\nimport { assertValidPattern } from './assert-valid-pattern.js'\nimport { AST, ExtglobType } from './ast.js'\nimport { escape } from './escape.js'\nimport { unescape } from './unescape.js'\n\nexport type Platform =\n  | 'aix'\n  | 'android'\n  | 'darwin'\n  | 'freebsd'\n  | 'haiku'\n  | 'linux'\n  | 'openbsd'\n  | 'sunos'\n  | 'win32'\n  | 'cygwin'\n  | 'netbsd'\n\nexport interface MinimatchOptions {\n  /** do not expand `{x,y}` style braces */\n  nobrace?: boolean\n  /** do not treat patterns starting with `#` as a comment */\n  nocomment?: boolean\n  /** do not treat patterns starting with `!` as a negation */\n  nonegate?: boolean\n  /** print LOTS of debugging output */\n  debug?: boolean\n  /** treat `**` the same as `*` */\n  noglobstar?: boolean\n  /** do not expand extglobs like `+(a|b)` */\n  noext?: boolean\n  /** return the pattern if nothing matches */\n  nonull?: boolean\n  /** treat `\\\\` as a path separator, not an escape character */\n  windowsPathsNoEscape?: boolean\n  /**\n   * inverse of {@link MinimatchOptions.windowsPathsNoEscape}\n   * @deprecated\n   */\n  allowWindowsEscape?: boolean\n  /**\n   * Compare a partial path to a pattern. As long as the parts\n   * of the path that are present are not contradicted by the\n   * pattern, it will be treated as a match. This is useful in\n   * applications where you're walking through a folder structure,\n   * and don't yet have the full path, but want to ensure that you\n   * do not walk down paths that can never be a match.\n   */\n  partial?: boolean\n  /** allow matches that start with `.` even if the pattern does not */\n  dot?: boolean\n  /** ignore case */\n  nocase?: boolean\n  /** ignore case only in wildcard patterns */\n  nocaseMagicOnly?: boolean\n  /** consider braces to be \"magic\" for the purpose of `hasMagic` */\n  magicalBraces?: boolean\n  /**\n   * If set, then patterns without slashes will be matched\n   * against the basename of the path if it contains slashes.\n   * For example, `a?b` would match the path `/xyz/123/acb`, but\n   * not `/xyz/acb/123`.\n   */\n  matchBase?: boolean\n  /** invert the results of negated matches */\n  flipNegate?: boolean\n  /** do not collapse multiple `/` into a single `/` */\n  preserveMultipleSlashes?: boolean\n  /**\n   * A number indicating the level of optimization that should be done\n   * to the pattern prior to parsing and using it for matches.\n   */\n  optimizationLevel?: number\n  /** operating system platform */\n  platform?: Platform\n  /**\n   * When a pattern starts with a UNC path or drive letter, and in\n   * `nocase:true` mode, do not convert the root portions of the\n   * pattern into a case-insensitive regular expression, and instead\n   * leave them as strings.\n   *\n   * This is the default when the platform is `win32` and\n   * `nocase:true` is set.\n   */\n  windowsNoMagicRoot?: boolean\n  /**\n   * max number of `{...}` patterns to expand. Default 100_000.\n   */\n  braceExpandMax?: number\n  /**\n   * Max number of non-adjacent `**` patterns to recursively walk down.\n   *\n   * The default of 200 is almost certainly high enough for most purposes,\n   * and can handle absurdly excessive patterns.\n   */\n  maxGlobstarRecursion?: number\n\n  /**\n   * Max depth to traverse for nested extglobs like `*(a|b|c)`\n   *\n   * Default is 2, which is quite low, but any higher value\n   * swiftly results in punishing performance impacts. Note\n   * that this is *not*  relevant when the globstar types can\n   * be safely coalesced into a single set.\n   *\n   * For example, `*(a|@(b|c)|d)` would be flattened into\n   * `*(a|b|c|d)`. Thus, many common extglobs will retain good\n   * performance and  never hit this limit, even if they are\n   * excessively deep and complicated.\n   *\n   * If the limit is hit, then the extglob characters are simply\n   * not parsed, and the pattern effectively switches into\n   * `noextglob: true` mode for the contents of that nested\n   * sub-pattern. This will typically _not_ result in a match,\n   * but is considered a valid trade-off for security and\n   * performance.\n   */\n  maxExtglobRecursion?: number\n}\n\nexport const minimatch = (\n  p: string,\n  pattern: string,\n  options: MinimatchOptions = {},\n) => {\n  assertValidPattern(pattern)\n\n  // shortcut: comments match nothing.\n  if (!options.nocomment && pattern.charAt(0) === '#') {\n    return false\n  }\n\n  return new Minimatch(pattern, options).match(p)\n}\n\n// Optimized checking for the most common glob patterns.\nconst starDotExtRE = /^\\*+([^+@!?\\*\\[\\(]*)$/\nconst starDotExtTest = (ext: string) => (f: string) =>\n  !f.startsWith('.') && f.endsWith(ext)\nconst starDotExtTestDot = (ext: string) => (f: string) => f.endsWith(ext)\nconst starDotExtTestNocase = (ext: string) => {\n  ext = ext.toLowerCase()\n  return (f: string) => !f.startsWith('.') && f.toLowerCase().endsWith(ext)\n}\nconst starDotExtTestNocaseDot = (ext: string) => {\n  ext = ext.toLowerCase()\n  return (f: string) => f.toLowerCase().endsWith(ext)\n}\nconst starDotStarRE = /^\\*+\\.\\*+$/\nconst starDotStarTest = (f: string) =>\n  !f.startsWith('.') && f.includes('.')\nconst starDotStarTestDot = (f: string) =>\n  f !== '.' && f !== '..' && f.includes('.')\nconst dotStarRE = /^\\.\\*+$/\nconst dotStarTest = (f: string) =>\n  f !== '.' && f !== '..' && f.startsWith('.')\nconst starRE = /^\\*+$/\nconst starTest = (f: string) => f.length !== 0 && !f.startsWith('.')\nconst starTestDot = (f: string) =>\n  f.length !== 0 && f !== '.' && f !== '..'\nconst qmarksRE = /^\\?+([^+@!?\\*\\[\\(]*)?$/\nconst qmarksTestNocase = ([$0, ext = '']: RegExpMatchArray) => {\n  const noext = qmarksTestNoExt([$0])\n  if (!ext) return noext\n  ext = ext.toLowerCase()\n  return (f: string) => noext(f) && f.toLowerCase().endsWith(ext)\n}\nconst qmarksTestNocaseDot = ([$0, ext = '']: RegExpMatchArray) => {\n  const noext = qmarksTestNoExtDot([$0])\n  if (!ext) return noext\n  ext = ext.toLowerCase()\n  return (f: string) => noext(f) && f.toLowerCase().endsWith(ext)\n}\nconst qmarksTestDot = ([$0, ext = '']: RegExpMatchArray) => {\n  const noext = qmarksTestNoExtDot([$0])\n  return !ext ? noext : (f: string) => noext(f) && f.endsWith(ext)\n}\nconst qmarksTest = ([$0, ext = '']: RegExpMatchArray) => {\n  const noext = qmarksTestNoExt([$0])\n  return !ext ? noext : (f: string) => noext(f) && f.endsWith(ext)\n}\nconst qmarksTestNoExt = ([$0]: RegExpMatchArray) => {\n  const len = $0.length\n  return (f: string) => f.length === len && !f.startsWith('.')\n}\nconst qmarksTestNoExtDot = ([$0]: RegExpMatchArray) => {\n  const len = $0.length\n  return (f: string) => f.length === len && f !== '.' && f !== '..'\n}\n\n/* c8 ignore start */\nconst defaultPlatform: Platform = (\n  typeof process === 'object' && process ?\n    (typeof process.env === 'object' &&\n      process.env &&\n      process.env.__MINIMATCH_TESTING_PLATFORM__) ||\n    process.platform\n  : 'posix') as Platform\n\nexport type Sep = '\\\\' | '/'\n\nconst path: { [k: string]: { sep: Sep } } = {\n  win32: { sep: '\\\\' },\n  posix: { sep: '/' },\n}\n/* c8 ignore stop */\n\nexport const sep =\n  defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep\nminimatch.sep = sep\n\nexport const GLOBSTAR = Symbol('globstar **')\nminimatch.GLOBSTAR = GLOBSTAR\n\n// any single thing other than /\n// don't need to escape / when using new RegExp()\nconst qmark = '[^/]'\n\n// * => any number of characters\nconst star = qmark + '*?'\n\n// ** when dots are allowed.  Anything goes, except .. and .\n// not (^ or / followed by one or two dots followed by $ or /),\n// followed by anything, any number of times.\nconst twoStarDot = '(?:(?!(?:\\\\/|^)(?:\\\\.{1,2})($|\\\\/)).)*?'\n\n// not a ^ or / followed by a dot,\n// followed by anything, any number of times.\nconst twoStarNoDot = '(?:(?!(?:\\\\/|^)\\\\.).)*?'\n\nexport const filter =\n  (pattern: string, options: MinimatchOptions = {}) =>\n  (p: string) =>\n    minimatch(p, pattern, options)\nminimatch.filter = filter\n\nconst ext = (a: MinimatchOptions, b: MinimatchOptions = {}) =>\n  Object.assign({}, a, b)\n\nexport const defaults = (def: MinimatchOptions): typeof minimatch => {\n  if (!def || typeof def !== 'object' || !Object.keys(def).length) {\n    return minimatch\n  }\n\n  const orig = minimatch\n\n  const m = (p: string, pattern: string, options: MinimatchOptions = {}) =>\n    orig(p, pattern, ext(def, options))\n\n  return Object.assign(m, {\n    Minimatch: class Minimatch extends orig.Minimatch {\n      constructor(pattern: string, options: MinimatchOptions = {}) {\n        super(pattern, ext(def, options))\n      }\n      static defaults(options: MinimatchOptions) {\n        return orig.defaults(ext(def, options)).Minimatch\n      }\n    },\n\n    AST: class AST extends orig.AST {\n      /* c8 ignore start */\n      constructor(\n        type: ExtglobType | null,\n        parent?: AST,\n        options: MinimatchOptions = {},\n      ) {\n        super(type, parent, ext(def, options))\n      }\n      /* c8 ignore stop */\n\n      static fromGlob(pattern: string, options: MinimatchOptions = {}) {\n        return orig.AST.fromGlob(pattern, ext(def, options))\n      }\n    },\n\n    unescape: (\n      s: string,\n      options: Pick<\n        MinimatchOptions,\n        'windowsPathsNoEscape' | 'magicalBraces'\n      > = {},\n    ) => orig.unescape(s, ext(def, options)),\n\n    escape: (\n      s: string,\n      options: Pick<\n        MinimatchOptions,\n        'windowsPathsNoEscape' | 'magicalBraces'\n      > = {},\n    ) => orig.escape(s, ext(def, options)),\n\n    filter: (pattern: string, options: MinimatchOptions = {}) =>\n      orig.filter(pattern, ext(def, options)),\n\n    defaults: (options: MinimatchOptions) =>\n      orig.defaults(ext(def, options)),\n\n    makeRe: (pattern: string, options: MinimatchOptions = {}) =>\n      orig.makeRe(pattern, ext(def, options)),\n\n    braceExpand: (pattern: string, options: MinimatchOptions = {}) =>\n      orig.braceExpand(pattern, ext(def, options)),\n\n    match: (\n      list: string[],\n      pattern: string,\n      options: MinimatchOptions = {},\n    ) => orig.match(list, pattern, ext(def, options)),\n\n    sep: orig.sep,\n    GLOBSTAR: GLOBSTAR as typeof GLOBSTAR,\n  })\n}\nminimatch.defaults = defaults\n\n// Brace expansion:\n// a{b,c}d -> abd acd\n// a{b,}c -> abc ac\n// a{0..3}d -> a0d a1d a2d a3d\n// a{b,c{d,e}f}g -> abg acdfg acefg\n// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg\n//\n// Invalid sets are not expanded.\n// a{2..}b -> a{2..}b\n// a{b}c -> a{b}c\nexport const braceExpand = (\n  pattern: string,\n  options: MinimatchOptions = {},\n) => {\n  assertValidPattern(pattern)\n\n  // Thanks to Yeting Li  for\n  // improving this regexp to avoid a ReDOS vulnerability.\n  if (options.nobrace || !/\\{(?:(?!\\{).)*\\}/.test(pattern)) {\n    // shortcut. no need to expand.\n    return [pattern]\n  }\n\n  return expand(pattern, { max: options.braceExpandMax })\n}\nminimatch.braceExpand = braceExpand\n\n// parse a component of the expanded set.\n// At this point, no pattern may contain \"/\" in it\n// so we're going to return a 2d array, where each entry is the full\n// pattern, split on '/', and then turned into a regular expression.\n// A regexp is made at the end which joins each array with an\n// escaped /, and another full one which joins each regexp with |.\n//\n// Following the lead of Bash 4.1, note that \"**\" only has special meaning\n// when it is the *only* thing in a path portion.  Otherwise, any series\n// of * is equivalent to a single *.  Globstar behavior is enabled by\n// default, and can be disabled by setting options.noglobstar.\n\nexport const makeRe = (pattern: string, options: MinimatchOptions = {}) =>\n  new Minimatch(pattern, options).makeRe()\nminimatch.makeRe = makeRe\n\nexport const match = (\n  list: string[],\n  pattern: string,\n  options: MinimatchOptions = {},\n) => {\n  const mm = new Minimatch(pattern, options)\n  list = list.filter(f => mm.match(f))\n  if (mm.options.nonull && !list.length) {\n    list.push(pattern)\n  }\n  return list\n}\nminimatch.match = match\n\n// replace stuff like \\* with *\nconst globMagic = /[?*]|[+@!]\\(.*?\\)|\\[|\\]/\nconst regExpEscape = (s: string) =>\n  s.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&')\n\nexport type MMRegExp = RegExp & {\n  _src?: string\n  _glob?: string\n}\n\nexport type ParseReturnFiltered = string | MMRegExp | typeof GLOBSTAR\nexport type ParseReturn = ParseReturnFiltered | false\n\nexport class Minimatch {\n  options: MinimatchOptions\n  set: ParseReturnFiltered[][]\n  pattern: string\n\n  windowsPathsNoEscape: boolean\n  nonegate: boolean\n  negate: boolean\n  comment: boolean\n  empty: boolean\n  preserveMultipleSlashes: boolean\n  partial: boolean\n  globSet: string[]\n  globParts: string[][]\n  nocase: boolean\n\n  isWindows: boolean\n  platform: Platform\n  windowsNoMagicRoot: boolean\n  maxGlobstarRecursion: number\n\n  regexp: false | null | MMRegExp\n  constructor(pattern: string, options: MinimatchOptions = {}) {\n    assertValidPattern(pattern)\n\n    options = options || {}\n    this.options = options\n    this.maxGlobstarRecursion = options.maxGlobstarRecursion ?? 200\n    this.pattern = pattern\n    this.platform = options.platform || defaultPlatform\n    this.isWindows = this.platform === 'win32'\n    // avoid the annoying deprecation flag lol\n    const awe = ('allowWindow' + 'sEscape') as keyof MinimatchOptions\n    this.windowsPathsNoEscape =\n      !!options.windowsPathsNoEscape || options[awe] === false\n    if (this.windowsPathsNoEscape) {\n      this.pattern = this.pattern.replace(/\\\\/g, '/')\n    }\n    this.preserveMultipleSlashes = !!options.preserveMultipleSlashes\n    this.regexp = null\n    this.negate = false\n    this.nonegate = !!options.nonegate\n    this.comment = false\n    this.empty = false\n    this.partial = !!options.partial\n    this.nocase = !!this.options.nocase\n    this.windowsNoMagicRoot =\n      options.windowsNoMagicRoot !== undefined ?\n        options.windowsNoMagicRoot\n      : !!(this.isWindows && this.nocase)\n\n    this.globSet = []\n    this.globParts = []\n    this.set = []\n\n    // make the set of regexps etc.\n    this.make()\n  }\n\n  hasMagic(): boolean {\n    if (this.options.magicalBraces && this.set.length > 1) {\n      return true\n    }\n    for (const pattern of this.set) {\n      for (const part of pattern) {\n        if (typeof part !== 'string') return true\n      }\n    }\n    return false\n  }\n\n  debug(..._: any[]) {}\n\n  make() {\n    const pattern = this.pattern\n    const options = this.options\n\n    // empty patterns and comments match nothing.\n    if (!options.nocomment && pattern.charAt(0) === '#') {\n      this.comment = true\n      return\n    }\n\n    if (!pattern) {\n      this.empty = true\n      return\n    }\n\n    // step 1: figure out negation, etc.\n    this.parseNegate()\n\n    // step 2: expand braces\n    this.globSet = [...new Set(this.braceExpand())]\n\n    if (options.debug) {\n      this.debug = (...args: any[]) => console.error(...args)\n    }\n\n    this.debug(this.pattern, this.globSet)\n\n    // step 3: now we have a set, so turn each one into a series of\n    // path-portion matching patterns.\n    // These will be regexps, except in the case of \"**\", which is\n    // set to the GLOBSTAR object for globstar behavior,\n    // and will not contain any / characters\n    //\n    // First, we preprocess to make the glob pattern sets a bit simpler\n    // and deduped.  There are some perf-killing patterns that can cause\n    // problems with a glob walk, but we can simplify them down a bit.\n    const rawGlobParts = this.globSet.map(s => this.slashSplit(s))\n    this.globParts = this.preprocess(rawGlobParts)\n    this.debug(this.pattern, this.globParts)\n\n    // glob --> regexps\n    let set = this.globParts.map((s, _, __) => {\n      if (this.isWindows && this.windowsNoMagicRoot) {\n        // check if it's a drive or unc path.\n        const isUNC =\n          s[0] === '' &&\n          s[1] === '' &&\n          (s[2] === '?' || !globMagic.test(s[2])) &&\n          !globMagic.test(s[3])\n        const isDrive = /^[a-z]:/i.test(s[0])\n        if (isUNC) {\n          return [\n            ...s.slice(0, 4),\n            ...s.slice(4).map(ss => this.parse(ss)),\n          ]\n        } else if (isDrive) {\n          return [s[0], ...s.slice(1).map(ss => this.parse(ss))]\n        }\n      }\n      return s.map(ss => this.parse(ss))\n    })\n\n    this.debug(this.pattern, set)\n\n    // filter out everything that didn't compile properly.\n    this.set = set.filter(\n      s => s.indexOf(false) === -1,\n    ) as ParseReturnFiltered[][]\n\n    // do not treat the ? in UNC paths as magic\n    if (this.isWindows) {\n      for (let i = 0; i < this.set.length; i++) {\n        const p = this.set[i]\n        if (\n          p[0] === '' &&\n          p[1] === '' &&\n          this.globParts[i][2] === '?' &&\n          typeof p[3] === 'string' &&\n          /^[a-z]:$/i.test(p[3])\n        ) {\n          p[2] = '?'\n        }\n      }\n    }\n\n    this.debug(this.pattern, this.set)\n  }\n\n  // various transforms to equivalent pattern sets that are\n  // faster to process in a filesystem walk.  The goal is to\n  // eliminate what we can, and push all ** patterns as far\n  // to the right as possible, even if it increases the number\n  // of patterns that we have to process.\n  preprocess(globParts: string[][]) {\n    // if we're not in globstar mode, then turn ** into *\n    if (this.options.noglobstar) {\n      for (let i = 0; i < globParts.length; i++) {\n        for (let j = 0; j < globParts[i].length; j++) {\n          if (globParts[i][j] === '**') {\n            globParts[i][j] = '*'\n          }\n        }\n      }\n    }\n\n    const { optimizationLevel = 1 } = this.options\n\n    if (optimizationLevel >= 2) {\n      // aggressive optimization for the purpose of fs walking\n      globParts = this.firstPhasePreProcess(globParts)\n      globParts = this.secondPhasePreProcess(globParts)\n    } else if (optimizationLevel >= 1) {\n      // just basic optimizations to remove some .. parts\n      globParts = this.levelOneOptimize(globParts)\n    } else {\n      // just collapse multiple ** portions into one\n      globParts = this.adjascentGlobstarOptimize(globParts)\n    }\n\n    return globParts\n  }\n\n  // just get rid of adjascent ** portions\n  adjascentGlobstarOptimize(globParts: string[][]) {\n    return globParts.map(parts => {\n      let gs: number = -1\n      while (-1 !== (gs = parts.indexOf('**', gs + 1))) {\n        let i = gs\n        while (parts[i + 1] === '**') {\n          i++\n        }\n        if (i !== gs) {\n          parts.splice(gs, i - gs)\n        }\n      }\n      return parts\n    })\n  }\n\n  // get rid of adjascent ** and resolve .. portions\n  levelOneOptimize(globParts: string[][]) {\n    return globParts.map(parts => {\n      parts = parts.reduce((set: string[], part) => {\n        const prev = set[set.length - 1]\n        if (part === '**' && prev === '**') {\n          return set\n        }\n        if (part === '..') {\n          if (prev && prev !== '..' && prev !== '.' && prev !== '**') {\n            set.pop()\n            return set\n          }\n        }\n        set.push(part)\n        return set\n      }, [])\n      return parts.length === 0 ? [''] : parts\n    })\n  }\n\n  levelTwoFileOptimize(parts: string | string[]) {\n    if (!Array.isArray(parts)) {\n      parts = this.slashSplit(parts)\n    }\n    let didSomething: boolean = false\n    do {\n      didSomething = false\n      // 
// -> 
/\n      if (!this.preserveMultipleSlashes) {\n        for (let i = 1; i < parts.length - 1; i++) {\n          const p = parts[i]\n          // don't squeeze out UNC patterns\n          if (i === 1 && p === '' && parts[0] === '') continue\n          if (p === '.' || p === '') {\n            didSomething = true\n            parts.splice(i, 1)\n            i--\n          }\n        }\n        if (\n          parts[0] === '.' &&\n          parts.length === 2 &&\n          (parts[1] === '.' || parts[1] === '')\n        ) {\n          didSomething = true\n          parts.pop()\n        }\n      }\n\n      // 
/

/../ ->

/\n      let dd: number = 0\n      while (-1 !== (dd = parts.indexOf('..', dd + 1))) {\n        const p = parts[dd - 1]\n        if (p && p !== '.' && p !== '..' && p !== '**') {\n          didSomething = true\n          parts.splice(dd - 1, 2)\n          dd -= 2\n        }\n      }\n    } while (didSomething)\n    return parts.length === 0 ? [''] : parts\n  }\n\n  // First phase: single-pattern processing\n  // 
 is 1 or more portions\n  //  is 1 or more portions\n  // 

is any portion other than ., .., '', or **\n // is . or ''\n //\n // **/.. is *brutal* for filesystem walking performance, because\n // it effectively resets the recursive walk each time it occurs,\n // and ** cannot be reduced out by a .. pattern part like a regexp\n // or most strings (other than .., ., and '') can be.\n //\n //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/}\n //

// -> 
/\n  // 
/

/../ ->

/\n  // **/**/ -> **/\n  //\n  // **/*/ -> */**/ <== not valid because ** doesn't follow\n  // this WOULD be allowed if ** did follow symlinks, or * didn't\n  firstPhasePreProcess(globParts: string[][]) {\n    let didSomething = false\n    do {\n      didSomething = false\n      // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/}\n for (let parts of globParts) {\n let gs: number = -1\n while (-1 !== (gs = parts.indexOf('**', gs + 1))) {\n let gss: number = gs\n while (parts[gss + 1] === '**') {\n //

/**/**/ -> 
/**/\n            gss++\n          }\n          // eg, if gs is 2 and gss is 4, that means we have 3 **\n          // parts, and can remove 2 of them.\n          if (gss > gs) {\n            parts.splice(gs + 1, gss - gs)\n          }\n\n          let next = parts[gs + 1]\n          const p = parts[gs + 2]\n          const p2 = parts[gs + 3]\n          if (next !== '..') continue\n          if (\n            !p ||\n            p === '.' ||\n            p === '..' ||\n            !p2 ||\n            p2 === '.' ||\n            p2 === '..'\n          ) {\n            continue\n          }\n          didSomething = true\n          // edit parts in place, and push the new one\n          parts.splice(gs, 1)\n          const other = parts.slice(0)\n          other[gs] = '**'\n          globParts.push(other)\n          gs--\n        }\n\n        // 
// -> 
/\n        if (!this.preserveMultipleSlashes) {\n          for (let i = 1; i < parts.length - 1; i++) {\n            const p = parts[i]\n            // don't squeeze out UNC patterns\n            if (i === 1 && p === '' && parts[0] === '') continue\n            if (p === '.' || p === '') {\n              didSomething = true\n              parts.splice(i, 1)\n              i--\n            }\n          }\n          if (\n            parts[0] === '.' &&\n            parts.length === 2 &&\n            (parts[1] === '.' || parts[1] === '')\n          ) {\n            didSomething = true\n            parts.pop()\n          }\n        }\n\n        // 
/

/../ ->

/\n        let dd: number = 0\n        while (-1 !== (dd = parts.indexOf('..', dd + 1))) {\n          const p = parts[dd - 1]\n          if (p && p !== '.' && p !== '..' && p !== '**') {\n            didSomething = true\n            const needDot = dd === 1 && parts[dd + 1] === '**'\n            const splin = needDot ? ['.'] : []\n            parts.splice(dd - 1, 2, ...splin)\n            if (parts.length === 0) parts.push('')\n            dd -= 2\n          }\n        }\n      }\n    } while (didSomething)\n\n    return globParts\n  }\n\n  // second phase: multi-pattern dedupes\n  // {
/*/,
/

/} ->

/*/\n  // {
/,
/} -> 
/\n  // {
/**/,
/} -> 
/**/\n  //\n  // {
/**/,
/**/

/} ->

/**/\n  // ^-- not valid because ** doens't follow symlinks\n  secondPhasePreProcess(globParts: string[][]): string[][] {\n    for (let i = 0; i < globParts.length - 1; i++) {\n      for (let j = i + 1; j < globParts.length; j++) {\n        const matched = this.partsMatch(\n          globParts[i],\n          globParts[j],\n          !this.preserveMultipleSlashes,\n        )\n        if (matched) {\n          globParts[i] = []\n          globParts[j] = matched\n          break\n        }\n      }\n    }\n    return globParts.filter(gs => gs.length)\n  }\n\n  partsMatch(\n    a: string[],\n    b: string[],\n    emptyGSMatch: boolean = false,\n  ): false | string[] {\n    let ai = 0\n    let bi = 0\n    let result: string[] = []\n    let which: string = ''\n    while (ai < a.length && bi < b.length) {\n      if (a[ai] === b[bi]) {\n        result.push(which === 'b' ? b[bi] : a[ai])\n        ai++\n        bi++\n      } else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {\n        result.push(a[ai])\n        ai++\n      } else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {\n        result.push(b[bi])\n        bi++\n      } else if (\n        a[ai] === '*' &&\n        b[bi] &&\n        (this.options.dot || !b[bi].startsWith('.')) &&\n        b[bi] !== '**'\n      ) {\n        if (which === 'b') return false\n        which = 'a'\n        result.push(a[ai])\n        ai++\n        bi++\n      } else if (\n        b[bi] === '*' &&\n        a[ai] &&\n        (this.options.dot || !a[ai].startsWith('.')) &&\n        a[ai] !== '**'\n      ) {\n        if (which === 'a') return false\n        which = 'b'\n        result.push(b[bi])\n        ai++\n        bi++\n      } else {\n        return false\n      }\n    }\n    // if we fall out of the loop, it means they two are identical\n    // as long as their lengths match\n    return a.length === b.length && result\n  }\n\n  parseNegate() {\n    if (this.nonegate) return\n\n    const pattern = this.pattern\n    let negate = false\n    let negateOffset = 0\n\n    for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {\n      negate = !negate\n      negateOffset++\n    }\n\n    if (negateOffset) this.pattern = pattern.slice(negateOffset)\n    this.negate = negate\n  }\n\n  // set partial to true to test if, for example,\n  // \"/a/b\" matches the start of \"/*/b/*/d\"\n  // Partial means, if you run out of file before you run\n  // out of pattern, then that's fine, as long as all\n  // the parts match.\n  matchOne(\n    file: string[],\n    pattern: ParseReturn[],\n    partial: boolean = false,\n  ) {\n    let fileStartIndex = 0\n    let patternStartIndex = 0\n\n    // UNC paths like //?/X:/... can match X:/... and vice versa\n    // Drive letters in absolute drive or unc paths are always compared\n    // case-insensitively.\n    if (this.isWindows) {\n      const fileDrive =\n        typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0])\n      const fileUNC =\n        !fileDrive &&\n        file[0] === '' &&\n        file[1] === '' &&\n        file[2] === '?' &&\n        /^[a-z]:$/i.test(file[3])\n\n      const patternDrive =\n        typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0])\n      const patternUNC =\n        !patternDrive &&\n        pattern[0] === '' &&\n        pattern[1] === '' &&\n        pattern[2] === '?' &&\n        typeof pattern[3] === 'string' &&\n        /^[a-z]:$/i.test(pattern[3])\n\n      const fdi =\n        fileUNC ? 3\n        : fileDrive ? 0\n        : undefined\n      const pdi =\n        patternUNC ? 3\n        : patternDrive ? 0\n        : undefined\n      if (typeof fdi === 'number' && typeof pdi === 'number') {\n        const [fd, pd]: [string, string] = [\n          file[fdi],\n          pattern[pdi] as string,\n        ]\n        // start matching at the drive letter index of each\n        if (fd.toLowerCase() === pd.toLowerCase()) {\n          pattern[pdi] = fd\n          patternStartIndex = pdi\n          fileStartIndex = fdi\n        }\n      }\n    }\n\n    // resolve and reduce . and .. portions in the file as well.\n    // don't need to do the second phase, because it's only one string[]\n    const { optimizationLevel = 1 } = this.options\n    if (optimizationLevel >= 2) {\n      file = this.levelTwoFileOptimize(file)\n    }\n\n    if (pattern.includes(GLOBSTAR)) {\n      return this.#matchGlobstar(\n        file,\n        pattern,\n        partial,\n        fileStartIndex,\n        patternStartIndex,\n      )\n    }\n\n    return this.#matchOne(\n      file,\n      pattern,\n      partial,\n      fileStartIndex,\n      patternStartIndex,\n    )\n  }\n\n  #matchGlobstar(\n    file: string[],\n    pattern: ParseReturn[],\n    partial: boolean,\n    fileIndex: number,\n    patternIndex: number,\n  ) {\n    // split the pattern into head, tail, and middle of ** delimited parts\n    const firstgs = pattern.indexOf(GLOBSTAR, patternIndex)\n    const lastgs = pattern.lastIndexOf(GLOBSTAR)\n\n    // split the pattern up into globstar-delimited sections\n    // the tail has to be at the end, and the others just have\n    // to be found in order from the head.\n    const [head, body, tail] = partial ? [\n      pattern.slice(patternIndex, firstgs),\n      pattern.slice(firstgs + 1),\n      [],\n    ] : [\n      pattern.slice(patternIndex, firstgs),\n      pattern.slice(firstgs + 1, lastgs),\n      pattern.slice(lastgs + 1),\n    ]\n\n    // check the head, from the current file/pattern index.\n    if (head.length) {\n      const fileHead = file.slice(fileIndex, fileIndex + head.length)\n      if (!this.#matchOne(fileHead, head, partial, 0, 0)) {\n        return false\n      }\n      fileIndex += head.length\n      patternIndex += head.length\n    }\n    // now we know the head matches!\n\n    // if the last portion is not empty, it MUST match the end\n    // check the tail\n    let fileTailMatch: number = 0\n    if (tail.length) {\n      // if head + tail > file, then we cannot possibly match\n      if (tail.length + fileIndex > file.length) return false\n\n      // try to match the tail\n      let tailStart = file.length - tail.length\n      if (this.#matchOne(file, tail, partial, tailStart, 0)) {\n        fileTailMatch = tail.length\n      } else {\n        // affordance for stuff like a/**/* matching a/b/\n        // if the last file portion is '', and there's more to the pattern\n        // then try without the '' bit.\n        if (\n          file[file.length - 1] !== '' ||\n          fileIndex + tail.length === file.length\n        ) {\n          return false\n        }\n        tailStart--\n        if (!this.#matchOne(file, tail, partial, tailStart, 0)) {\n          return false\n        }\n        fileTailMatch = tail.length + 1\n      }\n    }\n\n    // now we know the tail matches!\n\n    // the middle is zero or more portions wrapped in **, possibly\n    // containing more ** sections.\n    // so a/**/b/**/c/**/d has become **/b/**/c/**\n    // if it's empty, it means a/**/b, just verify we have no bad dots\n    // if there's no tail, so it ends on /**, then we must have *something*\n    // after the head, or it's not a matc\n    if (!body.length) {\n      let sawSome = !!fileTailMatch\n      for (let i = fileIndex; i < file.length - fileTailMatch; i++) {\n        const f = String(file[i])\n        sawSome = true\n        if (\n          f === '.' ||\n          f === '..' ||\n          (!this.options.dot && f.startsWith('.'))\n        ) {\n          return false\n        }\n      }\n      // in partial mode, we just need to get past all file parts\n      return partial || sawSome\n    }\n\n    // now we know that there's one or more body sections, which can\n    // be matched anywhere from the 0 index (because the head was pruned)\n    // through to the length-fileTailMatch index.\n    // split the body up into sections, and note the minimum index it can\n    // be found at (start with the length of all previous segments)\n    // [section, before, after]\n    const bodySegments: [ParseReturn[], number][] = [[[], 0]]\n    let currentBody: [ParseReturn[], number] = bodySegments[0]\n    let nonGsParts = 0\n    const nonGsPartsSums: number[] = [0]\n    for (const b of body) {\n      if (b === GLOBSTAR) {\n        nonGsPartsSums.push(nonGsParts)\n        currentBody = [[], 0]\n        bodySegments.push(currentBody)\n      } else {\n        currentBody[0].push(b)\n        nonGsParts++\n      }\n    }\n    let i = bodySegments.length - 1\n    const fileLength = file.length - fileTailMatch\n    for (const b of bodySegments) {\n      b[1] = fileLength - ((nonGsPartsSums[i--] as number) + b[0].length)\n    }\n\n    return !!this.#matchGlobStarBodySections(\n      file,\n      bodySegments,\n      fileIndex,\n      0,\n      partial,\n      0,\n      !!fileTailMatch,\n    )\n  }\n\n  // return false for \"nope, not matching\"\n  // return null for \"not matching, cannot keep trying\"\n  #matchGlobStarBodySections(\n    file: string[],\n    // pattern section, last possible position for it\n    bodySegments: [ParseReturn[], number][],\n    fileIndex: number,\n    bodyIndex: number,\n    partial: boolean,\n    globStarDepth: number,\n    sawTail: boolean,\n  ): boolean | null {\n    // take the first body segment, and walk from fileIndex to its \"after\"\n    // value at the end\n    // If it doesn't match at that position, we increment, until we hit\n    // that final possible position, and give up.\n    // If it does match, then advance and try to rest.\n    // If any of them fail we keep walking forward.\n    // this is still a bit recursively painful, but it's more constrained\n    // than previous implementations, because we never test something that\n    // can't possibly be a valid matching condition.\n    const bs = bodySegments[bodyIndex]\n    if (!bs) {\n      // just make sure that there's no bad dots\n      for (let i = fileIndex; i < file.length; i++) {\n        sawTail = true\n        const f = file[i]\n        if (\n          f === '.' ||\n          f === '..' ||\n          (!this.options.dot && f.startsWith('.'))\n        ) {\n          return false\n        }\n      }\n      return sawTail\n    }\n\n    // have a non-globstar body section to test\n    const [body, after] = bs\n    while (fileIndex <= after) {\n      const m = this.#matchOne(\n        file.slice(0, fileIndex + body.length),\n        body,\n        partial,\n        fileIndex,\n        0,\n      )\n      // if limit exceeded, no match. intentional false negative,\n      // acceptable break in correctness for security.\n      if (m && globStarDepth < this.maxGlobstarRecursion) {\n        // match! see if the rest match. if so, we're done!\n        const sub = this.#matchGlobStarBodySections(\n          file,\n          bodySegments,\n          fileIndex + body.length,\n          bodyIndex + 1,\n          partial,\n          globStarDepth + 1,\n          sawTail,\n        )\n        if (sub !== false) {\n          return sub\n        }\n      }\n      const f = file[fileIndex]\n      if (\n        f === '.' ||\n        f === '..' ||\n        (!this.options.dot && f.startsWith('.'))\n      ) {\n        return false\n      }\n\n      fileIndex++\n    }\n    // walked off. no point continuing\n    return partial || null\n  }\n\n  #matchOne(\n    file: string[],\n    pattern: ParseReturn[],\n    partial: boolean,\n    fileIndex: number,\n    patternIndex: number,\n  ) {\n    let fi: number\n    let pi: number\n    let pl: number\n    let fl: number\n    for (\n      fi = fileIndex,\n        pi = patternIndex,\n        fl = file.length,\n        pl = pattern.length;\n      fi < fl && pi < pl;\n      fi++, pi++\n    ) {\n      this.debug('matchOne loop')\n      let p = pattern[pi]\n      let f = file[fi]\n\n      this.debug(pattern, p, f)\n\n      // should be impossible.\n      // some invalid regexp stuff in the set.\n      /* c8 ignore start */\n      if (p === false || p === GLOBSTAR) {\n        return false\n      }\n      /* c8 ignore stop */\n\n      // something other than **\n      // non-magic patterns just have to match exactly\n      // patterns with magic have been turned into regexps.\n      let hit: boolean\n      if (typeof p === 'string') {\n        hit = f === p\n        this.debug('string match', p, f, hit)\n      } else {\n        hit = p.test(f)\n        this.debug('pattern match', p, f, hit)\n      }\n\n      if (!hit) return false\n    }\n\n    // Note: ending in / means that we'll get a final \"\"\n    // at the end of the pattern.  This can only match a\n    // corresponding \"\" at the end of the file.\n    // If the file ends in /, then it can only match a\n    // a pattern that ends in /, unless the pattern just\n    // doesn't have any more for it. But, a/b/ should *not*\n    // match \"a/b/*\", even though \"\" matches against the\n    // [^/]*? pattern, except in partial mode, where it might\n    // simply not be reached yet.\n    // However, a/b/ should still satisfy a/*\n\n    // now either we fell off the end of the pattern, or we're done.\n    if (fi === fl && pi === pl) {\n      // ran out of pattern and filename at the same time.\n      // an exact hit!\n      return true\n    } else if (fi === fl) {\n      // ran out of file, but still had pattern left.\n      // this is ok if we're doing the match as part of\n      // a glob fs traversal.\n      return partial\n    } else if (pi === pl) {\n      // ran out of pattern, still have file left.\n      // this is only acceptable if we're on the very last\n      // empty segment of a file with a trailing slash.\n      // a/* should match a/b/\n      return fi === fl - 1 && file[fi] === ''\n\n      /* c8 ignore start */\n    } else {\n      // should be unreachable.\n      throw new Error('wtf?')\n    }\n    /* c8 ignore stop */\n  }\n\n  braceExpand() {\n    return braceExpand(this.pattern, this.options)\n  }\n\n  parse(pattern: string): ParseReturn {\n    assertValidPattern(pattern)\n\n    const options = this.options\n\n    // shortcuts\n    if (pattern === '**') return GLOBSTAR\n    if (pattern === '') return ''\n\n    // far and away, the most common glob pattern parts are\n    // *, *.*, and *.  Add a fast check method for those.\n    let m: RegExpMatchArray | null\n    let fastTest: null | ((f: string) => boolean) = null\n    if ((m = pattern.match(starRE))) {\n      fastTest = options.dot ? starTestDot : starTest\n    } else if ((m = pattern.match(starDotExtRE))) {\n      fastTest = (\n        options.nocase ?\n          options.dot ?\n            starDotExtTestNocaseDot\n          : starDotExtTestNocase\n        : options.dot ? starDotExtTestDot\n        : starDotExtTest)(m[1])\n    } else if ((m = pattern.match(qmarksRE))) {\n      fastTest = (\n        options.nocase ?\n          options.dot ?\n            qmarksTestNocaseDot\n          : qmarksTestNocase\n        : options.dot ? qmarksTestDot\n        : qmarksTest)(m)\n    } else if ((m = pattern.match(starDotStarRE))) {\n      fastTest = options.dot ? starDotStarTestDot : starDotStarTest\n    } else if ((m = pattern.match(dotStarRE))) {\n      fastTest = dotStarTest\n    }\n\n    const re = AST.fromGlob(pattern, this.options).toMMPattern()\n    if (fastTest && typeof re === 'object') {\n      // Avoids overriding in frozen environments\n      Reflect.defineProperty(re, 'test', { value: fastTest })\n    }\n    return re\n  }\n\n  makeRe() {\n    if (this.regexp || this.regexp === false) return this.regexp\n\n    // at this point, this.set is a 2d array of partial\n    // pattern strings, or \"**\".\n    //\n    // It's better to use .match().  This function shouldn't\n    // be used, really, but it's pretty convenient sometimes,\n    // when you just want to work with a regex.\n    const set = this.set\n\n    if (!set.length) {\n      this.regexp = false\n      return this.regexp\n    }\n    const options = this.options\n\n    const twoStar =\n      options.noglobstar ? star\n      : options.dot ? twoStarDot\n      : twoStarNoDot\n    const flags = new Set(options.nocase ? ['i'] : [])\n\n    // regexpify non-globstar patterns\n    // if ** is only item, then we just do one twoStar\n    // if ** is first, and there are more, prepend (\\/|twoStar\\/)? to next\n    // if ** is last, append (\\/twoStar|) to previous\n    // if ** is in the middle, append (\\/|\\/twoStar\\/) to previous\n    // then filter out GLOBSTAR symbols\n    let re = set\n      .map(pattern => {\n        const pp: (string | typeof GLOBSTAR)[] = pattern.map(p => {\n          if (p instanceof RegExp) {\n            for (const f of p.flags.split('')) flags.add(f)\n          }\n          return (\n            typeof p === 'string' ? regExpEscape(p)\n            : p === GLOBSTAR ? GLOBSTAR\n            : p._src\n          )\n        }) as (string | typeof GLOBSTAR)[]\n        pp.forEach((p, i) => {\n          const next = pp[i + 1]\n          const prev = pp[i - 1]\n          if (p !== GLOBSTAR || prev === GLOBSTAR) {\n            return\n          }\n          if (prev === undefined) {\n            if (next !== undefined && next !== GLOBSTAR) {\n              pp[i + 1] = '(?:\\\\/|' + twoStar + '\\\\/)?' + next\n            } else {\n              pp[i] = twoStar\n            }\n          } else if (next === undefined) {\n            pp[i - 1] = prev + '(?:\\\\/|\\\\/' + twoStar + ')?'\n          } else if (next !== GLOBSTAR) {\n            pp[i - 1] = prev + '(?:\\\\/|\\\\/' + twoStar + '\\\\/)' + next\n            pp[i + 1] = GLOBSTAR\n          }\n        })\n        const filtered = pp.filter(p => p !== GLOBSTAR)\n\n        // For partial matches, we need to make the pattern match\n        // any prefix of the full path. We do this by generating\n        // alternative patterns that match progressively longer prefixes.\n        if (this.partial && filtered.length >= 1) {\n          const prefixes: string[] = []\n          for (let i = 1; i <= filtered.length; i++) {\n            prefixes.push(filtered.slice(0, i).join('/'))\n          }\n          return '(?:' + prefixes.join('|') + ')'\n        }\n\n        return filtered.join('/')\n      })\n      .join('|')\n\n    // need to wrap in parens if we had more than one thing with |,\n    // otherwise only the first will be anchored to ^ and the last to $\n    const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', '']\n    // must match entire pattern\n    // ending in a * or ** will make it less strict.\n    re = '^' + open + re + close + '$'\n\n    // In partial mode, '/' should always match as it's a valid prefix for any pattern\n    if (this.partial) {\n      re = '^(?:\\\\/|' + open + re.slice(1, -1) + close + ')$'\n    }\n\n    // can match anything, as long as it's not this.\n    if (this.negate) re = '^(?!' + re + ').+$'\n\n    try {\n      this.regexp = new RegExp(re, [...flags].join(''))\n      /* c8 ignore start */\n    } catch (ex) {\n      // should be impossible\n      this.regexp = false\n    }\n    /* c8 ignore stop */\n    return this.regexp\n  }\n\n  slashSplit(p: string) {\n    // if p starts with // on windows, we preserve that\n    // so that UNC paths aren't broken.  Otherwise, any number of\n    // / characters are coalesced into one, unless\n    // preserveMultipleSlashes is set to true.\n    if (this.preserveMultipleSlashes) {\n      return p.split('/')\n    } else if (this.isWindows && /^\\/\\/[^\\/]+/.test(p)) {\n      // add an extra '' for the one we lose\n      return ['', ...p.split(/\\/+/)]\n    } else {\n      return p.split(/\\/+/)\n    }\n  }\n\n  match(f: string, partial = this.partial) {\n    this.debug('match', f, this.pattern)\n    // short-circuit in the case of busted things.\n    // comments, etc.\n    if (this.comment) {\n      return false\n    }\n    if (this.empty) {\n      return f === ''\n    }\n\n    if (f === '/' && partial) {\n      return true\n    }\n\n    const options = this.options\n\n    // windows: need to use /, not \\\n    if (this.isWindows) {\n      f = f.split('\\\\').join('/')\n    }\n\n    // treat the test path as a set of pathparts.\n    const ff = this.slashSplit(f)\n    this.debug(this.pattern, 'split', ff)\n\n    // just ONE of the pattern sets in this.set needs to match\n    // in order for it to be valid.  If negating, then just one\n    // match means that we have failed.\n    // Either way, return on the first hit.\n\n    const set = this.set\n    this.debug(this.pattern, 'set', set)\n\n    // Find the basename of the path by looking for the last non-empty segment\n    let filename: string = ff[ff.length - 1]\n    if (!filename) {\n      for (let i = ff.length - 2; !filename && i >= 0; i--) {\n        filename = ff[i]\n      }\n    }\n\n    for (let i = 0; i < set.length; i++) {\n      const pattern = set[i]\n      let file = ff\n      if (options.matchBase && pattern.length === 1) {\n        file = [filename]\n      }\n      const hit = this.matchOne(file, pattern, partial)\n      if (hit) {\n        if (options.flipNegate) {\n          return true\n        }\n        return !this.negate\n      }\n    }\n\n    // didn't get any hits.  this is success if it's a negative\n    // pattern, failure otherwise.\n    if (options.flipNegate) {\n      return false\n    }\n    return this.negate\n  }\n\n  static defaults(def: MinimatchOptions) {\n    return minimatch.defaults(def).Minimatch\n  }\n}\n/* c8 ignore start */\nexport { AST } from './ast.js'\nexport { escape } from './escape.js'\nexport { unescape } from './unescape.js'\n/* c8 ignore stop */\nminimatch.AST = AST\nminimatch.Minimatch = Minimatch\nminimatch.escape = escape\nminimatch.unescape = unescape\n", "import { spawnSync, SpawnSyncReturns } from 'child_process';\n\nimport type { CdsDependencyGraph } from './cds/parser';\nimport { addJavaScriptExtractorDiagnostic } from './diagnostics';\nimport { applyPathsIgnoreToLgtmFilters, configureLgtmIndexFilters } from './environment';\nimport { createMarkerFile, removeMarkerFile } from './filesystem';\nimport {\n  cdsExtractorLog,\n  logExtractorStop,\n  logPerformanceTrackingStart,\n  logPerformanceTrackingStop,\n} from './logging';\n\n/**\n * Run the JavaScript extractor autobuild script\n * @param sourceRoot The source root directory\n * @param autobuildScriptPath Path to the autobuild script\n * @param codeqlExePath Path to the CodeQL executable (optional)\n * @returns Success status and any error message\n */\nexport function runJavaScriptExtractor(\n  sourceRoot: string,\n  autobuildScriptPath: string,\n  codeqlExePath?: string,\n): { success: boolean; error?: string } {\n  cdsExtractorLog(\n    'info',\n    `Extracting the .cds.json files by running the 'javascript' extractor autobuild script:\n        ${autobuildScriptPath}`,\n  );\n\n  /**\n   * Invoke the javascript autobuilder to index the .cds.json files only.\n   *\n   * Environment variables must be passed from this script's process to the\n   * process that invokes the autobuild script, otherwise the CDS autobuild.sh\n   * script will not be invoked by the autobuild script built into the\n   * 'javascript' extractor.\n   *\n   * IMPORTANT: The JavaScript extractor autobuild script must be invoked with\n   * the current working directory set to the project (source) root directory\n   * because it assumes it is running from there.\n   */\n  const result: SpawnSyncReturns = spawnSync(autobuildScriptPath, {\n    cwd: sourceRoot,\n    env: process.env,\n    shell: true,\n    stdio: 'inherit',\n  });\n\n  if (result.error) {\n    const errorMessage = `Error running JavaScript extractor: ${result.error.message}`;\n    if (codeqlExePath) {\n      addJavaScriptExtractorDiagnostic(sourceRoot, errorMessage, codeqlExePath, sourceRoot);\n    }\n    return {\n      success: false,\n      error: errorMessage,\n    };\n  }\n\n  if (result.status !== 0) {\n    const errorMessage = `JavaScript extractor failed with exit code ${String(result.status)}`;\n    if (codeqlExePath) {\n      addJavaScriptExtractorDiagnostic(sourceRoot, errorMessage, codeqlExePath, sourceRoot);\n    }\n    return {\n      success: false,\n      error: errorMessage,\n    };\n  }\n\n  return { success: true };\n}\n\n/**\n * Runs JavaScript extraction with marker file handling and optional dependency graph updates.\n * Encapsulates the common pattern used in multiple places throughout the extractor.\n *\n * @param sourceRoot - The root directory of the source code\n * @param autobuildScriptPath - Path to the autobuild script\n * @param codeqlExePath - Path to the CodeQL executable\n * @param dependencyGraph - Optional dependency graph to update with performance metrics\n * @returns True if extraction was successful, false otherwise\n */\nexport function runJavaScriptExtractionWithMarker(\n  sourceRoot: string,\n  autobuildScriptPath: string,\n  codeqlExePath: string,\n  dependencyGraph?: CdsDependencyGraph,\n): boolean {\n  // Configure LGTM index filters\n  configureLgtmIndexFilters();\n\n  // Apply paths-ignore patterns from CodeQL config to LGTM_INDEX_FILTERS\n  applyPathsIgnoreToLgtmFilters(sourceRoot);\n\n  // Create marker file\n  const markerFilePath = createMarkerFile(sourceRoot);\n\n  try {\n    logPerformanceTrackingStart('JavaScript Extraction');\n    const extractionStartTime = Date.now();\n    const extractorResult = runJavaScriptExtractor(sourceRoot, autobuildScriptPath, codeqlExePath);\n    const extractionEndTime = Date.now();\n    logPerformanceTrackingStop('JavaScript Extraction');\n\n    // Update dependency graph metrics if provided\n    if (dependencyGraph) {\n      dependencyGraph.statusSummary.performance.extractionDurationMs =\n        extractionEndTime - extractionStartTime;\n      dependencyGraph.statusSummary.performance.totalDurationMs =\n        dependencyGraph.statusSummary.performance.parsingDurationMs +\n        dependencyGraph.statusSummary.performance.compilationDurationMs +\n        dependencyGraph.statusSummary.performance.extractionDurationMs;\n    }\n\n    // Handle extraction failure\n    if (!extractorResult.success && extractorResult.error) {\n      cdsExtractorLog('error', `Error running JavaScript extractor: ${extractorResult.error}`);\n\n      if (codeqlExePath) {\n        let representativeFile = sourceRoot;\n        if (dependencyGraph && dependencyGraph.projects.size > 0) {\n          const firstProject = Array.from(dependencyGraph.projects.values())[0];\n          representativeFile = firstProject.cdsFiles[0] ?? sourceRoot;\n        }\n        addJavaScriptExtractorDiagnostic(\n          representativeFile,\n          extractorResult.error,\n          codeqlExePath,\n          sourceRoot,\n        );\n      }\n      return false;\n    }\n\n    return true;\n  } finally {\n    // Always clean up marker file\n    removeMarkerFile(markerFilePath);\n  }\n}\n\n/**\n * Handles early exit scenarios by running JavaScript extraction and exiting gracefully.\n * This function never returns - it always exits the process with code 0.\n *\n * @param sourceRoot - The root directory of the source code\n * @param autobuildScriptPath - Path to the autobuild script\n * @param codeqlExePath - Path to the CodeQL executable\n * @param skipMessage - Message to log when exiting early\n */\nexport function handleEarlyExit(\n  sourceRoot: string,\n  autobuildScriptPath: string,\n  codeqlExePath: string,\n  skipMessage: string,\n): never {\n  const success = runJavaScriptExtractionWithMarker(sourceRoot, autobuildScriptPath, codeqlExePath);\n  logExtractorStop(success, success ? skipMessage : 'JavaScript extractor failed');\n  console.log(`Completed run of the cds-extractor.js script for the CDS extractor.`);\n  process.exit(0);\n}\n", "import { execFileSync } from 'child_process';\nimport { existsSync } from 'fs';\nimport { arch, platform } from 'os';\nimport { join, resolve } from 'path';\n\nimport { cdsExtractorMarkerFileName } from './constants';\nimport { dirExists } from './filesystem';\nimport { cdsExtractorLog } from './logging';\nimport { getPathsIgnorePatterns } from './paths-ignore';\n\n/**\n * Interface for platform information\n */\nexport interface PlatformInfo {\n  platform: string;\n  arch: string;\n  isWindows: boolean;\n  exeExtension: string;\n}\n\n/**\n * Interface for environment validation results\n */\nexport interface EnvironmentSetupResult {\n  success: boolean;\n  errorMessages: string[];\n  codeqlExePath: string;\n  jsExtractorRoot: string;\n  autobuildScriptPath: string;\n  platformInfo: PlatformInfo;\n}\n\n/**\n * Get platform information\n * @returns Platform information including OS platform, architecture, and whether it's Windows\n */\nexport function getPlatformInfo(): PlatformInfo {\n  const osPlatform: string = platform();\n  const osPlatformArch: string = arch();\n  const isWindows = osPlatform === 'win32';\n  const exeExtension = isWindows ? '.exe' : '';\n\n  return {\n    platform: osPlatform,\n    arch: osPlatformArch,\n    isWindows,\n    exeExtension,\n  };\n}\n\n/**\n * Get the path to the CodeQL executable.\n * Prioritizes CODEQL_DIST if set and valid. Otherwise, tries to find CodeQL via system PATH.\n * @returns The resolved path to the CodeQL executable, or an empty string if not found.\n */\nexport function getCodeQLExePath(): string {\n  const platformInfo = getPlatformInfo();\n  const codeqlExeName: string = platformInfo.isWindows ? 'codeql.exe' : 'codeql';\n\n  // First, check if CODEQL_DIST is set and valid\n  const codeqlDist = process.env.CODEQL_DIST;\n  if (codeqlDist) {\n    const codeqlPathFromDist = resolve(join(codeqlDist, codeqlExeName));\n    if (existsSync(codeqlPathFromDist)) {\n      cdsExtractorLog('info', `Using CodeQL executable from CODEQL_DIST: ${codeqlPathFromDist}`);\n      return codeqlPathFromDist;\n    } else {\n      cdsExtractorLog(\n        'error',\n        `CODEQL_DIST is set to '${codeqlDist}', but CodeQL executable was not found at '${codeqlPathFromDist}'. Please ensure this path is correct. Falling back to PATH-based discovery.`,\n      );\n      // Fall through to PATH-based discovery\n    }\n  }\n\n  // CODEQL_DIST is not set or was invalid, attempt to find CodeQL via system PATH using 'codeql version --format=json'\n  cdsExtractorLog(\n    'info',\n    'CODEQL_DIST environment variable not set or invalid. Attempting to find CodeQL executable via system PATH using \"codeql version --format=json\".',\n  );\n  try {\n    const versionOutput = execFileSync(codeqlExeName, ['version', '--format=json'], {\n      encoding: 'utf8',\n      timeout: 5000, // 5 seconds timeout\n      stdio: 'pipe', // Suppress output to console\n    });\n\n    interface CodeQLVersionInfo {\n      unpackedLocation?: string;\n      cliVersion?: string; // For potential future use or richer logging\n    }\n\n    try {\n      const versionInfo = JSON.parse(versionOutput) as CodeQLVersionInfo;\n\n      if (\n        versionInfo &&\n        typeof versionInfo.unpackedLocation === 'string' &&\n        versionInfo.unpackedLocation\n      ) {\n        const resolvedPathFromVersion = resolve(join(versionInfo.unpackedLocation, codeqlExeName));\n        if (existsSync(resolvedPathFromVersion)) {\n          cdsExtractorLog(\n            'info',\n            `CodeQL executable found via 'codeql version --format=json' at: ${resolvedPathFromVersion}`,\n          );\n          return resolvedPathFromVersion;\n        }\n        cdsExtractorLog(\n          'warn',\n          `'codeql version --format=json' provided unpackedLocation '${versionInfo.unpackedLocation}', but executable not found at '${resolvedPathFromVersion}'.`,\n        );\n      } else {\n        cdsExtractorLog(\n          'warn',\n          \"Could not determine CodeQL executable path from 'codeql version --format=json' output. 'unpackedLocation' field missing, empty, or invalid.\",\n        );\n      }\n    } catch (parseError) {\n      cdsExtractorLog(\n        'warn',\n        `Failed to parse 'codeql version --format=json' output: ${String(parseError)}. Output was: ${versionOutput}`,\n      );\n    }\n  } catch (error) {\n    let errorMessage = `INFO: Failed to find CodeQL executable via 'codeql version --format=json'. Error: ${String(error)}`;\n    if (error && typeof error === 'object' && 'code' in error && error.code === 'ENOENT') {\n      errorMessage += `\\nINFO: The command '${codeqlExeName}' was not found in your system PATH.`;\n    }\n    cdsExtractorLog('info', errorMessage);\n  }\n\n  cdsExtractorLog(\n    'error',\n    'Failed to determine CodeQL executable path. Please ensure the CODEQL_DIST environment variable is set and points to a valid CodeQL distribution, or that the CodeQL CLI (codeql) is available in your system PATH and \"codeql version --format=json\" can provide its location.',\n  );\n  return ''; // Return empty string if all attempts fail\n}\n\n/**\n * Get the JavaScript extractor root path.\n * @param codeqlExePath The path to the CodeQL executable. If empty, resolution will be skipped.\n * @returns The JavaScript extractor root path, or an empty string if not found or if codeqlExePath is empty.\n */\nexport function getJavaScriptExtractorRoot(codeqlExePath: string): string {\n  let jsExtractorRoot = process.env.CODEQL_EXTRACTOR_JAVASCRIPT_ROOT ?? '';\n\n  if (jsExtractorRoot) {\n    cdsExtractorLog(\n      'info',\n      `Using JavaScript extractor root from environment variable CODEQL_EXTRACTOR_JAVASCRIPT_ROOT: ${jsExtractorRoot}`,\n    );\n    return jsExtractorRoot;\n  }\n\n  if (!codeqlExePath) {\n    cdsExtractorLog(\n      'warn',\n      'Cannot resolve JavaScript extractor root because the CodeQL executable path was not provided or found.',\n    );\n    return '';\n  }\n\n  try {\n    jsExtractorRoot = execFileSync(\n      codeqlExePath,\n      ['resolve', 'extractor', '--language=javascript'],\n      { stdio: 'pipe' }, // Suppress output from the command itself\n    )\n      .toString()\n      .trim();\n    if (jsExtractorRoot) {\n      cdsExtractorLog('info', `JavaScript extractor root resolved to: ${jsExtractorRoot}`);\n    } else {\n      cdsExtractorLog(\n        'warn',\n        `'codeql resolve extractor --language=javascript' using '${codeqlExePath}' returned an empty path.`,\n      );\n    }\n  } catch (error) {\n    cdsExtractorLog(\n      'error',\n      `Error resolving JavaScript extractor root using '${codeqlExePath}': ${String(error)}`,\n    );\n    jsExtractorRoot = ''; // Ensure it's empty on error\n  }\n  return jsExtractorRoot;\n}\n\n/**\n * Set JavaScript extractor environment variables using CDS extractor variables\n */\nexport function setupJavaScriptExtractorEnv(): void {\n  process.env.CODEQL_EXTRACTOR_JAVASCRIPT_WIP_DATABASE =\n    process.env.CODEQL_EXTRACTOR_CDS_WIP_DATABASE;\n  process.env.CODEQL_EXTRACTOR_JAVASCRIPT_DIAGNOSTIC_DIR =\n    process.env.CODEQL_EXTRACTOR_CDS_DIAGNOSTIC_DIR;\n  process.env.CODEQL_EXTRACTOR_JAVASCRIPT_LOG_DIR = process.env.CODEQL_EXTRACTOR_CDS_LOG_DIR;\n  process.env.CODEQL_EXTRACTOR_JAVASCRIPT_SCRATCH_DIR =\n    process.env.CODEQL_EXTRACTOR_CDS_SCRATCH_DIR;\n  process.env.CODEQL_EXTRACTOR_JAVASCRIPT_TRAP_DIR = process.env.CODEQL_EXTRACTOR_CDS_TRAP_DIR;\n  process.env.CODEQL_EXTRACTOR_JAVASCRIPT_SOURCE_ARCHIVE_DIR =\n    process.env.CODEQL_EXTRACTOR_CDS_SOURCE_ARCHIVE_DIR;\n}\n\n/**\n * Get the path to the autobuild script\n * @param jsExtractorRoot The JavaScript extractor root path\n * @returns The path to the autobuild script, or an empty string if jsExtractorRoot is empty.\n */\nexport function getAutobuildScriptPath(jsExtractorRoot: string): string {\n  if (!jsExtractorRoot) return '';\n  const platformInfo = getPlatformInfo();\n  const autobuildScriptName: string = platformInfo.isWindows ? 'autobuild.cmd' : 'autobuild.sh';\n  return resolve(join(jsExtractorRoot, 'tools', autobuildScriptName));\n}\n\n/**\n * Configure LGTM index filters for CDS files\n */\nexport function configureLgtmIndexFilters(): void {\n  let excludeFilters = '';\n\n  if (process.env.LGTM_INDEX_FILTERS) {\n    cdsExtractorLog(\n      'info',\n      `Found $LGTM_INDEX_FILTERS already set to:\n${process.env.LGTM_INDEX_FILTERS}`,\n    );\n    const allowedExcludePatterns = [join('exclude:**', '*'), join('exclude:**', '*.*')];\n\n    excludeFilters =\n      '\\n' +\n      process.env.LGTM_INDEX_FILTERS.split('\\n')\n        .filter(\n          line =>\n            line.startsWith('exclude') &&\n            !allowedExcludePatterns.some(pattern => line.includes(pattern)),\n        )\n        .join('\\n');\n  }\n\n  // Enable extraction of the .cds.json files only.\n  //\n  // The cdsExtractorMarkerFileName file is auto-created by the CDS extractor in order\n  // to force the underlying JS extractor to see at least one .js file, which became a\n  // requirement starting with v2.23.5 of the CodeQL CLI.\n  const lgtmIndexFiltersPatterns = [\n    join('exclude:**', '*.*'),\n    join('include:**', '*.cds.json'),\n    join('include:**', '*.cds'),\n    join('include:**', cdsExtractorMarkerFileName),\n    join('exclude:**', 'node_modules', '**', '*.*'),\n  ].join('\\n');\n\n  process.env.LGTM_INDEX_FILTERS = lgtmIndexFiltersPatterns + excludeFilters;\n  process.env.LGTM_INDEX_TYPESCRIPT = 'NONE';\n  // Configure to copy over the .cds files as well, by pretending they are JSON.\n  process.env.LGTM_INDEX_FILETYPES = '.cds:JSON';\n}\n\n/**\n * Applies paths-ignore patterns from the CodeQL configuration to the\n * LGTM_INDEX_FILTERS environment variable. This ensures the JavaScript\n * extractor also respects the user's paths-ignore configuration for\n * compiled .cds.json output files.\n *\n * @param sourceRoot - The source root directory used to locate the config file\n */\nexport function applyPathsIgnoreToLgtmFilters(sourceRoot: string): void {\n  const patterns = getPathsIgnorePatterns(sourceRoot);\n  if (patterns.length === 0) {\n    return;\n  }\n\n  const excludeLines = patterns.map(p => `exclude:${p}`).join('\\n');\n  const current = process.env.LGTM_INDEX_FILTERS ?? '';\n  process.env.LGTM_INDEX_FILTERS = current + '\\n' + excludeLines;\n\n  cdsExtractorLog(\n    'info',\n    `Applied ${patterns.length} paths-ignore pattern(s) to LGTM_INDEX_FILTERS`,\n  );\n}\n\n/**\n * Sets up the environment and validates key components for running the CDS extractor.\n * This includes checking for the CodeQL executable, validating the source root directory,\n * and setting up environment variables for the JavaScript extractor.\n *\n * @param sourceRoot The source root directory.\n *\n * @returns The {@link EnvironmentSetupResult} containing success status, error messages,\n *          CodeQL executable path, JavaScript extractor root, autobuild script path,\n *          and platform information.\n *\n * @throws Will throw an error if the environment setup fails.\n */\nexport function setupAndValidateEnvironment(sourceRoot: string): EnvironmentSetupResult {\n  const errorMessages: string[] = [];\n  const platformInfo = getPlatformInfo();\n\n  // Get the CodeQL executable path\n  const codeqlExePath = getCodeQLExePath();\n  if (!codeqlExePath) {\n    errorMessages.push(\n      'Failed to find CodeQL executable. Ensure CODEQL_DIST is set and valid, or CodeQL CLI is in PATH.',\n    );\n  }\n\n  // Validate that the required source root directory exists\n  if (!dirExists(sourceRoot)) {\n    errorMessages.push(`Project root directory '${sourceRoot}' does not exist.`);\n  }\n\n  // Get JavaScript extractor root\n  const jsExtractorRoot = getJavaScriptExtractorRoot(codeqlExePath);\n  if (!jsExtractorRoot) {\n    if (codeqlExePath) {\n      // Only add this error if codeqlExePath was found but JS extractor root wasn't\n      errorMessages.push(\n        'Failed to determine JavaScript extractor root using the found CodeQL executable.',\n      );\n    } else {\n      // If codeqlExePath is empty, the error from getCodeQLExePath is usually sufficient.\n      // However, we can add a more specific one if needed.\n      errorMessages.push(\n        'Cannot determine JavaScript extractor root because CodeQL executable was not found.',\n      );\n    }\n  }\n\n  // Set environment variables for JavaScript extractor only if jsExtractorRoot is valid\n  if (jsExtractorRoot) {\n    process.env.CODEQL_EXTRACTOR_JAVASCRIPT_ROOT = jsExtractorRoot;\n    setupJavaScriptExtractorEnv();\n  }\n\n  // Get autobuild script path\n  const autobuildScriptPath = jsExtractorRoot ? getAutobuildScriptPath(jsExtractorRoot) : '';\n  // Not having an autobuild script path might be an error depending on the run mode,\n  // but for now, the function just returns what it found.\n\n  return {\n    success: errorMessages.length === 0,\n    errorMessages,\n    codeqlExePath, // Will be '' if not found\n    jsExtractorRoot, // Will be '' if not found\n    autobuildScriptPath,\n    platformInfo,\n  };\n}\n", "import { resolve } from 'path';\n\nconst USAGE_MESSAGE = `\\tUsage: node