blank project

This commit is contained in:
root
2025-10-17 20:17:33 +00:00
commit 14b2d53e8e
9366 changed files with 1515019 additions and 0 deletions

View File

@@ -0,0 +1,20 @@
export declare function isFrontmatterValid(frontmatter: Record<string, any>): boolean;
export declare function extractFrontmatter(code: string): string | undefined;
export interface ParseFrontmatterOptions {
/**
* How the frontmatter should be handled in the returned `content` string.
* - `preserve`: Keep the frontmatter.
* - `remove`: Remove the frontmatter.
* - `empty-with-spaces`: Replace the frontmatter with empty spaces. (preserves sourcemap line/col/offset)
* - `empty-with-lines`: Replace the frontmatter with empty line breaks. (preserves sourcemap line/col)
*
* @default 'remove'
*/
frontmatter: 'preserve' | 'remove' | 'empty-with-spaces' | 'empty-with-lines';
}
export interface ParseFrontmatterResult {
frontmatter: Record<string, any>;
rawFrontmatter: string;
content: string;
}
export declare function parseFrontmatter(code: string, options?: ParseFrontmatterOptions): ParseFrontmatterResult;

View File

@@ -0,0 +1,58 @@
import yaml from "js-yaml";
import * as toml from "smol-toml";
function isFrontmatterValid(frontmatter) {
try {
JSON.stringify(frontmatter);
} catch {
return false;
}
return typeof frontmatter === "object" && frontmatter !== null;
}
const frontmatterRE = /(?:^\uFEFF?|^\s*\n)(?:---|\+\+\+)([\s\S]*?\n)(?:---|\+\+\+)/;
const frontmatterTypeRE = /(?:^\uFEFF?|^\s*\n)(---|\+\+\+)/;
function extractFrontmatter(code) {
return frontmatterRE.exec(code)?.[1];
}
function getFrontmatterParser(code) {
return frontmatterTypeRE.exec(code)?.[1] === "+++" ? ["+++", toml.parse] : ["---", yaml.load];
}
function parseFrontmatter(code, options) {
const rawFrontmatter = extractFrontmatter(code);
if (rawFrontmatter == null) {
return { frontmatter: {}, rawFrontmatter: "", content: code };
}
const [delims, parser] = getFrontmatterParser(code);
const parsed = parser(rawFrontmatter);
const frontmatter = parsed && typeof parsed === "object" ? parsed : {};
let content;
switch (options?.frontmatter ?? "remove") {
case "preserve":
content = code;
break;
case "remove":
content = code.replace(`${delims}${rawFrontmatter}${delims}`, "");
break;
case "empty-with-spaces":
content = code.replace(
`${delims}${rawFrontmatter}${delims}`,
` ${rawFrontmatter.replace(/[^\r\n]/g, " ")} `
);
break;
case "empty-with-lines":
content = code.replace(
`${delims}${rawFrontmatter}${delims}`,
rawFrontmatter.replace(/[^\r\n]/g, "")
);
break;
}
return {
frontmatter,
rawFrontmatter,
content
};
}
export {
extractFrontmatter,
isFrontmatterValid,
parseFrontmatter
};

View File

@@ -0,0 +1,16 @@
import type { Root } from 'hast';
type Highlighter = (code: string, language: string, options?: {
meta?: string;
}) => Promise<Root | string>;
export declare const defaultExcludeLanguages: string[];
/**
* A hast utility to syntax highlight code blocks with a given syntax highlighter.
*
* @param tree
* The hast tree in which to syntax highlight code blocks.
* @param highlighter
* A function which receives the code and language, and returns the HTML of a syntax
* highlighted `<pre>` element.
*/
export declare function highlightCodeBlocks(tree: Root, highlighter: Highlighter, excludeLanguages?: string[]): Promise<void>;
export {};

View File

@@ -0,0 +1,61 @@
import { fromHtml } from "hast-util-from-html";
import { toText } from "hast-util-to-text";
import { removePosition } from "unist-util-remove-position";
import { visitParents } from "unist-util-visit-parents";
const languagePattern = /\blanguage-(\S+)\b/;
const defaultExcludeLanguages = ["math"];
async function highlightCodeBlocks(tree, highlighter, excludeLanguages = []) {
const nodes = [];
visitParents(tree, { type: "element", tagName: "code" }, (node, ancestors) => {
const parent = ancestors.at(-1);
if (parent?.type !== "element" || parent.tagName !== "pre") {
return;
}
if (parent.children.length !== 1) {
return;
}
let languageMatch;
let { className } = node.properties;
if (typeof className === "string") {
languageMatch = languagePattern.exec(className);
} else if (Array.isArray(className)) {
for (const cls of className) {
if (typeof cls !== "string") {
continue;
}
languageMatch = languagePattern.exec(cls);
if (languageMatch) {
break;
}
}
}
const language = languageMatch?.[1] || "plaintext";
if (excludeLanguages.includes(language) || defaultExcludeLanguages.includes(language)) {
return;
}
nodes.push({
node,
language,
parent,
grandParent: ancestors.at(-2)
});
});
for (const { node, language, grandParent, parent } of nodes) {
const meta = node.data?.meta ?? node.properties.metastring ?? void 0;
const code = toText(node, { whitespace: "pre" });
const result = await highlighter(code, language, { meta });
let replacement;
if (typeof result === "string") {
replacement = fromHtml(result, { fragment: true }).children[0];
removePosition(replacement);
} else {
replacement = result.children[0];
}
const index = grandParent.children.indexOf(parent);
grandParent.children[index] = replacement;
}
}
export {
defaultExcludeLanguages,
highlightCodeBlocks
};

View File

@@ -0,0 +1,2 @@
import type * as unified from 'unified';
export declare function importPlugin(p: string): Promise<unified.Plugin>;

View File

@@ -0,0 +1,7 @@
async function importPlugin(p) {
const importResult = await import(p);
return importResult.default;
}
export {
importPlugin
};

View File

@@ -0,0 +1,2 @@
import type * as unified from 'unified';
export declare function importPlugin(p: string): Promise<unified.Plugin>;

View File

@@ -0,0 +1,24 @@
import path from "node:path";
import { pathToFileURL } from "node:url";
import { resolve as importMetaResolve } from "import-meta-resolve";
let cwdUrlStr;
async function importPlugin(p) {
try {
const importResult2 = await import(
/* @vite-ignore */
p
);
return importResult2.default;
} catch {
}
cwdUrlStr ??= pathToFileURL(path.join(process.cwd(), "package.json")).toString();
const resolved = importMetaResolve(p, cwdUrlStr);
const importResult = await import(
/* @vite-ignore */
resolved
);
return importResult.default;
}
export {
importPlugin
};

14
node_modules/@astrojs/markdown-remark/dist/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,14 @@
import type { AstroMarkdownOptions, AstroMarkdownProcessorOptions, MarkdownProcessor, SyntaxHighlightConfig } from './types.js';
export { extractFrontmatter, isFrontmatterValid, type ParseFrontmatterOptions, type ParseFrontmatterResult, parseFrontmatter, } from './frontmatter.js';
export { rehypeHeadingIds } from './rehype-collect-headings.js';
export { rehypePrism } from './rehype-prism.js';
export { rehypeShiki } from './rehype-shiki.js';
export { remarkCollectImages } from './remark-collect-images.js';
export { type CreateShikiHighlighterOptions, createShikiHighlighter, type ShikiHighlighter, type ShikiHighlighterHighlightOptions, } from './shiki.js';
export * from './types.js';
export declare const syntaxHighlightDefaults: Required<SyntaxHighlightConfig>;
export declare const markdownConfigDefaults: Required<AstroMarkdownOptions>;
/**
* Create a markdown preprocessor to render multiple markdown files
*/
export declare function createMarkdownProcessor(opts?: AstroMarkdownProcessorOptions): Promise<MarkdownProcessor>;

157
node_modules/@astrojs/markdown-remark/dist/index.js generated vendored Normal file
View File

@@ -0,0 +1,157 @@
import rehypeRaw from "rehype-raw";
import rehypeStringify from "rehype-stringify";
import remarkGfm from "remark-gfm";
import remarkParse from "remark-parse";
import remarkRehype from "remark-rehype";
import remarkSmartypants from "remark-smartypants";
import { unified } from "unified";
import { VFile } from "vfile";
import { defaultExcludeLanguages } from "./highlight.js";
import { loadPlugins } from "./load-plugins.js";
import { rehypeHeadingIds } from "./rehype-collect-headings.js";
import { rehypeImages } from "./rehype-images.js";
import { rehypePrism } from "./rehype-prism.js";
import { rehypeShiki } from "./rehype-shiki.js";
import { remarkCollectImages } from "./remark-collect-images.js";
import {
extractFrontmatter,
isFrontmatterValid,
parseFrontmatter
} from "./frontmatter.js";
import { rehypeHeadingIds as rehypeHeadingIds2 } from "./rehype-collect-headings.js";
import { rehypePrism as rehypePrism2 } from "./rehype-prism.js";
import { rehypeShiki as rehypeShiki2 } from "./rehype-shiki.js";
import { remarkCollectImages as remarkCollectImages2 } from "./remark-collect-images.js";
import {
createShikiHighlighter
} from "./shiki.js";
export * from "./types.js";
const syntaxHighlightDefaults = {
type: "shiki",
excludeLangs: defaultExcludeLanguages
};
const markdownConfigDefaults = {
syntaxHighlight: syntaxHighlightDefaults,
shikiConfig: {
langs: [],
theme: "github-dark",
themes: {},
wrap: false,
transformers: [],
langAlias: {}
},
remarkPlugins: [],
rehypePlugins: [],
remarkRehype: {},
gfm: true,
smartypants: true
};
const isPerformanceBenchmark = Boolean(process.env.ASTRO_PERFORMANCE_BENCHMARK);
async function createMarkdownProcessor(opts) {
const {
syntaxHighlight = markdownConfigDefaults.syntaxHighlight,
shikiConfig = markdownConfigDefaults.shikiConfig,
remarkPlugins = markdownConfigDefaults.remarkPlugins,
rehypePlugins = markdownConfigDefaults.rehypePlugins,
remarkRehype: remarkRehypeOptions = markdownConfigDefaults.remarkRehype,
gfm = markdownConfigDefaults.gfm,
smartypants = markdownConfigDefaults.smartypants,
experimentalHeadingIdCompat = false
} = opts ?? {};
const loadedRemarkPlugins = await Promise.all(loadPlugins(remarkPlugins));
const loadedRehypePlugins = await Promise.all(loadPlugins(rehypePlugins));
const parser = unified().use(remarkParse);
if (!isPerformanceBenchmark) {
if (gfm) {
parser.use(remarkGfm);
}
if (smartypants) {
parser.use(remarkSmartypants);
}
}
for (const [plugin, pluginOpts] of loadedRemarkPlugins) {
parser.use(plugin, pluginOpts);
}
if (!isPerformanceBenchmark) {
parser.use(remarkCollectImages, opts?.image);
}
parser.use(remarkRehype, {
allowDangerousHtml: true,
passThrough: [],
...remarkRehypeOptions
});
if (syntaxHighlight && !isPerformanceBenchmark) {
const syntaxHighlightType = typeof syntaxHighlight === "string" ? syntaxHighlight : syntaxHighlight?.type;
const excludeLangs = typeof syntaxHighlight === "object" ? syntaxHighlight?.excludeLangs : void 0;
if (syntaxHighlightType === "shiki") {
parser.use(rehypeShiki, shikiConfig, excludeLangs);
} else if (syntaxHighlightType === "prism") {
parser.use(rehypePrism, excludeLangs);
}
}
for (const [plugin, pluginOpts] of loadedRehypePlugins) {
parser.use(plugin, pluginOpts);
}
parser.use(rehypeImages);
if (!isPerformanceBenchmark) {
parser.use(rehypeHeadingIds, { experimentalHeadingIdCompat });
}
parser.use(rehypeRaw).use(rehypeStringify, { allowDangerousHtml: true });
return {
async render(content, renderOpts) {
const vfile = new VFile({
value: content,
path: renderOpts?.fileURL,
data: {
astro: {
frontmatter: renderOpts?.frontmatter ?? {}
}
}
});
const result = await parser.process(vfile).catch((err) => {
err = prefixError(err, `Failed to parse Markdown file "${vfile.path}"`);
console.error(err);
throw err;
});
return {
code: String(result.value),
metadata: {
headings: result.data.astro?.headings ?? [],
localImagePaths: result.data.astro?.localImagePaths ?? [],
remoteImagePaths: result.data.astro?.remoteImagePaths ?? [],
frontmatter: result.data.astro?.frontmatter ?? {}
}
};
}
};
}
function prefixError(err, prefix) {
if (err?.message) {
try {
err.message = `${prefix}:
${err.message}`;
return err;
} catch {
}
}
const wrappedError = new Error(`${prefix}${err ? `: ${err}` : ""}`);
try {
wrappedError.stack = err.stack;
wrappedError.cause = err;
} catch {
}
return wrappedError;
}
export {
createMarkdownProcessor,
createShikiHighlighter,
extractFrontmatter,
isFrontmatterValid,
markdownConfigDefaults,
parseFrontmatter,
rehypeHeadingIds2 as rehypeHeadingIds,
rehypePrism2 as rehypePrism,
rehypeShiki2 as rehypeShiki,
remarkCollectImages2 as remarkCollectImages,
syntaxHighlightDefaults
};

View File

@@ -0,0 +1,2 @@
import type * as unified from 'unified';
export declare function loadPlugins(items: (string | [string, any] | unified.Plugin<any[], any> | [unified.Plugin<any[], any>, any])[]): Promise<[unified.Plugin, any?]>[];

View File

@@ -0,0 +1,22 @@
import { importPlugin as _importPlugin } from "#import-plugin";
async function importPlugin(p) {
if (typeof p === "string") {
return await _importPlugin(p);
} else {
return p;
}
}
function loadPlugins(items) {
return items.map((p) => {
return new Promise((resolve, reject) => {
if (Array.isArray(p)) {
const [plugin, opts] = p;
return importPlugin(plugin).then((m) => resolve([m, opts])).catch((e) => reject(e));
}
return importPlugin(p).then((m) => resolve([m])).catch((e) => reject(e));
});
});
}
export {
loadPlugins
};

View File

@@ -0,0 +1,11 @@
import type { RehypePlugin } from './types.js';
/**
* Rehype plugin that adds `id` attributes to headings based on their text content.
*
* @param options Optional configuration object for the plugin.
*
* @see https://docs.astro.build/en/guides/markdown-content/#heading-ids-and-plugins
*/
export declare function rehypeHeadingIds({ experimentalHeadingIdCompat, }?: {
experimentalHeadingIdCompat?: boolean;
}): ReturnType<RehypePlugin>;

View File

@@ -0,0 +1,97 @@
import Slugger from "github-slugger";
import { visit } from "unist-util-visit";
const rawNodeTypes = /* @__PURE__ */ new Set(["text", "raw", "mdxTextExpression"]);
const codeTagNames = /* @__PURE__ */ new Set(["code", "pre"]);
function rehypeHeadingIds({
experimentalHeadingIdCompat
} = {}) {
return function(tree, file) {
const headings = [];
const frontmatter = file.data.astro?.frontmatter;
const slugger = new Slugger();
const isMDX = isMDXFile(file);
visit(tree, (node) => {
if (node.type !== "element") return;
const { tagName } = node;
if (tagName[0] !== "h") return;
const [, level] = /h([0-6])/.exec(tagName) ?? [];
if (!level) return;
const depth = Number.parseInt(level);
let text = "";
visit(node, (child, __, parent) => {
if (child.type === "element" || parent == null) {
return;
}
if (child.type === "raw") {
if (/^\n?<.*>\n?$/.test(child.value)) {
return;
}
}
if (rawNodeTypes.has(child.type)) {
if (isMDX || codeTagNames.has(parent.tagName)) {
let value = child.value;
if (isMdxTextExpression(child) && frontmatter) {
const frontmatterPath = getMdxFrontmatterVariablePath(child);
if (Array.isArray(frontmatterPath) && frontmatterPath.length > 0) {
const frontmatterValue = getMdxFrontmatterVariableValue(
frontmatter,
frontmatterPath
);
if (typeof frontmatterValue === "string") {
value = frontmatterValue;
}
}
}
text += value;
} else {
text += child.value.replace(/\{/g, "${");
}
}
});
node.properties = node.properties || {};
if (typeof node.properties.id !== "string") {
let slug = slugger.slug(text);
if (!experimentalHeadingIdCompat) {
if (slug.endsWith("-")) slug = slug.slice(0, -1);
}
node.properties.id = slug;
}
headings.push({ depth, slug: node.properties.id, text });
});
file.data.astro ??= {};
file.data.astro.headings = headings;
};
}
function isMDXFile(file) {
return Boolean(file.history[0]?.endsWith(".mdx"));
}
function getMdxFrontmatterVariablePath(node) {
if (!node.data?.estree || node.data.estree.body.length !== 1) return new Error();
const statement = node.data.estree.body[0];
if (statement?.type !== "ExpressionStatement" || statement.expression.type !== "MemberExpression")
return new Error();
let expression = statement.expression;
const expressionPath = [];
while (expression.type === "MemberExpression" && expression.property.type === (expression.computed ? "Literal" : "Identifier")) {
expressionPath.push(
expression.property.type === "Literal" ? String(expression.property.value) : expression.property.name
);
expression = expression.object;
}
if (expression.type !== "Identifier" || expression.name !== "frontmatter") return new Error();
return expressionPath.reverse();
}
function getMdxFrontmatterVariableValue(frontmatter, path) {
let value = frontmatter;
for (const key of path) {
if (!value[key]) return void 0;
value = value[key];
}
return value;
}
function isMdxTextExpression(node) {
return node.type === "mdxTextExpression";
}
export {
rehypeHeadingIds
};

View File

@@ -0,0 +1,3 @@
import type { Root } from 'hast';
import type { VFile } from 'vfile';
export declare function rehypeImages(): (tree: Root, file: VFile) => void;

View File

@@ -0,0 +1,33 @@
import { visit } from "unist-util-visit";
function rehypeImages() {
return function(tree, file) {
if (!file.data.astro?.localImagePaths?.length && !file.data.astro?.remoteImagePaths?.length) {
return;
}
const imageOccurrenceMap = /* @__PURE__ */ new Map();
visit(tree, "element", (node) => {
if (node.tagName !== "img") return;
if (typeof node.properties?.src !== "string") return;
const src = decodeURI(node.properties.src);
let newProperties;
if (file.data.astro?.localImagePaths?.includes(src)) {
newProperties = { ...node.properties, src };
} else if (file.data.astro?.remoteImagePaths?.includes(src)) {
newProperties = {
// By default, markdown images won't have width and height set. However, just in case another user plugin does set these, we should respect them.
inferSize: "width" in node.properties && "height" in node.properties ? void 0 : true,
...node.properties,
src
};
} else {
return;
}
const index = imageOccurrenceMap.get(node.properties.src) || 0;
imageOccurrenceMap.set(node.properties.src, index + 1);
node.properties = { __ASTRO_IMAGE_: JSON.stringify({ ...newProperties, index }) };
});
};
}
export {
rehypeImages
};

View File

@@ -0,0 +1,3 @@
import type { Root } from 'hast';
import type { Plugin } from 'unified';
export declare const rehypePrism: Plugin<[string[]?], Root>;

View File

@@ -0,0 +1,19 @@
import { runHighlighterWithAstro } from "@astrojs/prism/dist/highlighter";
import { highlightCodeBlocks } from "./highlight.js";
const rehypePrism = (excludeLangs) => {
return async (tree) => {
await highlightCodeBlocks(
tree,
(code, language) => {
let { html, classLanguage } = runHighlighterWithAstro(language, code);
return Promise.resolve(
`<pre class="${classLanguage}" data-language="${language}"><code is:raw class="${classLanguage}">${html}</code></pre>`
);
},
excludeLangs
);
};
};
export {
rehypePrism
};

View File

@@ -0,0 +1,4 @@
import type { Root } from 'hast';
import type { Plugin } from 'unified';
import type { ShikiConfig } from './types.js';
export declare const rehypeShiki: Plugin<[ShikiConfig, string[]?], Root>;

View File

@@ -0,0 +1,29 @@
import { highlightCodeBlocks } from "./highlight.js";
import { createShikiHighlighter } from "./shiki.js";
const rehypeShiki = (config, excludeLangs) => {
let highlighterAsync;
return async (tree) => {
highlighterAsync ??= createShikiHighlighter({
langs: config?.langs,
theme: config?.theme,
themes: config?.themes,
langAlias: config?.langAlias
});
const highlighter = await highlighterAsync;
await highlightCodeBlocks(
tree,
(code, language, options) => {
return highlighter.codeToHast(code, language, {
meta: options?.meta,
wrap: config?.wrap,
defaultColor: config?.defaultColor,
transformers: config?.transformers
});
},
excludeLangs
);
};
};
export {
rehypeShiki
};

View File

@@ -0,0 +1,4 @@
import type { Root } from 'mdast';
import type { VFile } from 'vfile';
import type { AstroMarkdownProcessorOptions } from './types.js';
export declare function remarkCollectImages(opts: AstroMarkdownProcessorOptions['image']): (tree: Root, vfile: VFile) => void;

View File

@@ -0,0 +1,38 @@
import { isRemoteAllowed } from "@astrojs/internal-helpers/remote";
import { definitions } from "mdast-util-definitions";
import { visit } from "unist-util-visit";
function remarkCollectImages(opts) {
const domains = opts?.domains ?? [];
const remotePatterns = opts?.remotePatterns ?? [];
return function(tree, vfile) {
if (typeof vfile?.path !== "string") return;
const definition = definitions(tree);
const localImagePaths = /* @__PURE__ */ new Set();
const remoteImagePaths = /* @__PURE__ */ new Set();
visit(tree, (node) => {
let url;
if (node.type === "image") {
url = decodeURI(node.url);
} else if (node.type === "imageReference") {
const imageDefinition = definition(node.identifier);
if (imageDefinition) {
url = decodeURI(imageDefinition.url);
}
}
if (!url) return;
if (URL.canParse(url)) {
if (isRemoteAllowed(url, { domains, remotePatterns })) {
remoteImagePaths.add(url);
}
} else if (!url.startsWith("/")) {
localImagePaths.add(url);
}
});
vfile.data.astro ??= {};
vfile.data.astro.localImagePaths = Array.from(localImagePaths);
vfile.data.astro.remoteImagePaths = Array.from(remoteImagePaths);
};
}
export {
remarkCollectImages
};

43
node_modules/@astrojs/markdown-remark/dist/shiki.d.ts generated vendored Normal file
View File

@@ -0,0 +1,43 @@
import type { Root } from 'hast';
import { type HighlighterCoreOptions, type LanguageRegistration, type ShikiTransformer, type ThemeRegistration, type ThemeRegistrationRaw } from 'shiki';
import type { ThemePresets } from './types.js';
export interface ShikiHighlighter {
codeToHast(code: string, lang?: string, options?: ShikiHighlighterHighlightOptions): Promise<Root>;
codeToHtml(code: string, lang?: string, options?: ShikiHighlighterHighlightOptions): Promise<string>;
}
export interface CreateShikiHighlighterOptions {
langs?: LanguageRegistration[];
theme?: ThemePresets | ThemeRegistration | ThemeRegistrationRaw;
themes?: Record<string, ThemePresets | ThemeRegistration | ThemeRegistrationRaw>;
langAlias?: HighlighterCoreOptions['langAlias'];
}
export interface ShikiHighlighterHighlightOptions {
/**
* Generate inline code element only, without the pre element wrapper.
*/
inline?: boolean;
/**
* Enable word wrapping.
* - true: enabled.
* - false: disabled.
* - null: All overflow styling removed. Code will overflow the element by default.
*/
wrap?: boolean | null;
/**
* Chooses a theme from the "themes" option that you've defined as the default styling theme.
*/
defaultColor?: 'light' | 'dark' | string | false;
/**
* Shiki transformers to customize the generated HTML by manipulating the hast tree.
*/
transformers?: ShikiTransformer[];
/**
* Additional attributes to be added to the root code block element.
*/
attributes?: Record<string, string>;
/**
* Raw `meta` information to be used by Shiki transformers.
*/
meta?: string;
}
export declare function createShikiHighlighter({ langs, theme, themes, langAlias, }?: CreateShikiHighlighterOptions): Promise<ShikiHighlighter>;

109
node_modules/@astrojs/markdown-remark/dist/shiki.js generated vendored Normal file
View File

@@ -0,0 +1,109 @@
import {
createCssVariablesTheme,
createHighlighter,
isSpecialLang
} from "shiki";
let _cssVariablesTheme;
const cssVariablesTheme = () => _cssVariablesTheme ?? (_cssVariablesTheme = createCssVariablesTheme({
variablePrefix: "--astro-code-"
}));
async function createShikiHighlighter({
langs = [],
theme = "github-dark",
themes = {},
langAlias = {}
} = {}) {
theme = theme === "css-variables" ? cssVariablesTheme() : theme;
const highlighter = await createHighlighter({
langs: ["plaintext", ...langs],
langAlias,
themes: Object.values(themes).length ? Object.values(themes) : [theme]
});
async function highlight(code, lang = "plaintext", options, to) {
const resolvedLang = langAlias[lang] ?? lang;
const loadedLanguages = highlighter.getLoadedLanguages();
if (!isSpecialLang(lang) && !loadedLanguages.includes(resolvedLang)) {
try {
await highlighter.loadLanguage(resolvedLang);
} catch (_err) {
const langStr = lang === resolvedLang ? `"${lang}"` : `"${lang}" (aliased to "${resolvedLang}")`;
console.warn(`[Shiki] The language ${langStr} doesn't exist, falling back to "plaintext".`);
lang = "plaintext";
}
}
code = code.replace(/(?:\r\n|\r|\n)$/, "");
const themeOptions = Object.values(themes).length ? { themes } : { theme };
const inline = options?.inline ?? false;
return highlighter[to === "html" ? "codeToHtml" : "codeToHast"](code, {
...themeOptions,
defaultColor: options.defaultColor,
lang,
// NOTE: while we can spread `options.attributes` here so that Shiki can auto-serialize this as rendered
// attributes on the top-level tag, it's not clear whether it is fine to pass all attributes as meta, as
// they're technically not meta, nor parsed from Shiki's `parseMetaString` API.
meta: options?.meta ? { __raw: options?.meta } : void 0,
transformers: [
{
pre(node) {
if (inline) {
node.tagName = "code";
}
const {
class: attributesClass,
style: attributesStyle,
...rest
} = options?.attributes ?? {};
Object.assign(node.properties, rest);
const classValue = (normalizePropAsString(node.properties.class) ?? "") + (attributesClass ? ` ${attributesClass}` : "");
const styleValue = (normalizePropAsString(node.properties.style) ?? "") + (attributesStyle ? `; ${attributesStyle}` : "");
node.properties.class = classValue.replace(/shiki/g, "astro-code");
node.properties.dataLanguage = lang;
if (options.wrap === false || options.wrap === void 0) {
node.properties.style = styleValue + "; overflow-x: auto;";
} else if (options.wrap === true) {
node.properties.style = styleValue + "; overflow-x: auto; white-space: pre-wrap; word-wrap: break-word;";
}
},
line(node) {
if (resolvedLang === "diff") {
const innerSpanNode = node.children[0];
const innerSpanTextNode = innerSpanNode?.type === "element" && innerSpanNode.children?.[0];
if (innerSpanTextNode && innerSpanTextNode.type === "text") {
const start = innerSpanTextNode.value[0];
if (start === "+" || start === "-") {
innerSpanTextNode.value = innerSpanTextNode.value.slice(1);
innerSpanNode.children.unshift({
type: "element",
tagName: "span",
properties: { style: "user-select: none;" },
children: [{ type: "text", value: start }]
});
}
}
}
},
code(node) {
if (inline) {
return node.children[0];
}
}
},
...options.transformers ?? []
]
});
}
return {
codeToHast(code, lang, options = {}) {
return highlight(code, lang, options, "hast");
},
codeToHtml(code, lang, options = {}) {
return highlight(code, lang, options, "html");
}
};
}
function normalizePropAsString(value) {
return Array.isArray(value) ? value.join(" ") : value;
}
export {
createShikiHighlighter
};

74
node_modules/@astrojs/markdown-remark/dist/types.d.ts generated vendored Normal file
View File

@@ -0,0 +1,74 @@
import type { RemotePattern } from '@astrojs/internal-helpers/remote';
import type * as hast from 'hast';
import type * as mdast from 'mdast';
import type { Options as RemarkRehypeOptions } from 'remark-rehype';
import type { BuiltinTheme } from 'shiki';
import type * as unified from 'unified';
import type { CreateShikiHighlighterOptions, ShikiHighlighterHighlightOptions } from './shiki.js';
export type { Node } from 'unist';
declare module 'vfile' {
interface DataMap {
astro: {
headings?: MarkdownHeading[];
localImagePaths?: string[];
remoteImagePaths?: string[];
frontmatter?: Record<string, any>;
};
}
}
export type RemarkPlugin<PluginParameters extends any[] = any[]> = unified.Plugin<PluginParameters, mdast.Root>;
export type RemarkPlugins = (string | [string, any] | RemarkPlugin | [RemarkPlugin, any])[];
export type RehypePlugin<PluginParameters extends any[] = any[]> = unified.Plugin<PluginParameters, hast.Root>;
export type RehypePlugins = (string | [string, any] | RehypePlugin | [RehypePlugin, any])[];
export type RemarkRehype = RemarkRehypeOptions;
export type ThemePresets = BuiltinTheme | 'css-variables';
export type SyntaxHighlightConfigType = 'shiki' | 'prism';
export interface SyntaxHighlightConfig {
type: SyntaxHighlightConfigType;
excludeLangs?: string[];
}
export interface ShikiConfig extends Pick<CreateShikiHighlighterOptions, 'langs' | 'theme' | 'themes' | 'langAlias'>, Pick<ShikiHighlighterHighlightOptions, 'defaultColor' | 'wrap' | 'transformers'> {
}
/**
* Configuration options that end up in the markdown section of AstroConfig
*/
export interface AstroMarkdownOptions {
syntaxHighlight?: SyntaxHighlightConfig | SyntaxHighlightConfigType | false;
shikiConfig?: ShikiConfig;
remarkPlugins?: RemarkPlugins;
rehypePlugins?: RehypePlugins;
remarkRehype?: RemarkRehype;
gfm?: boolean;
smartypants?: boolean;
}
/**
* Extra configuration options from other parts of AstroConfig that get injected into this plugin
*/
export interface AstroMarkdownProcessorOptions extends AstroMarkdownOptions {
image?: {
domains?: string[];
remotePatterns?: RemotePattern[];
};
experimentalHeadingIdCompat?: boolean;
}
export interface MarkdownProcessor {
render: (content: string, opts?: MarkdownProcessorRenderOptions) => Promise<MarkdownProcessorRenderResult>;
}
export interface MarkdownProcessorRenderOptions {
/** Used for frontmatter injection plugins */
frontmatter?: Record<string, any>;
}
export interface MarkdownProcessorRenderResult {
code: string;
metadata: {
headings: MarkdownHeading[];
localImagePaths: string[];
remoteImagePaths: string[];
frontmatter: Record<string, any>;
};
}
export interface MarkdownHeading {
depth: number;
slug: string;
text: string;
}

0
node_modules/@astrojs/markdown-remark/dist/types.js generated vendored Normal file
View File